UNPKG

4.26 kBJavaScriptView Raw
1"use strict";
2
3Object.defineProperty(exports, "__esModule", {
4 value: true
5});
6exports.ContentDefinedChunker = undefined;
7
8var _bluebirdLst;
9
10function _load_bluebirdLst() {
11 return _bluebirdLst = require("bluebird-lst");
12}
13
14var _fsExtraP;
15
16function _load_fsExtraP() {
17 return _fsExtraP = require("fs-extra-p");
18}
19
20var _rabinBindings;
21
22function _load_rabinBindings() {
23 return _rabinBindings = require("rabin-bindings");
24}
25
26class ContentDefinedChunker {
27 computeChunks(fd, start, end, name) {
28 return (0, (_bluebirdLst || _load_bluebirdLst()).coroutine)(function* () {
29 const fileSize = end - start;
30 const buffer = Buffer.allocUnsafe(Math.min(4 * 1024 * 1024, fileSize));
31 const rabin = (0, (_rabinBindings || _load_rabinBindings()).Rabin)();
32 const avgBits = 12;
33 const min = 8 * 1024;
34 // see note in the nsis.ts about archive dict size
35 const max = 32 * 1024;
36 rabin.configure(avgBits, min, max);
37 const checksums = [];
38 const allSizes = [];
39 let tailBufferData = null;
40 let readOffset = start;
41 while (true) {
42 const actualBufferSize = Math.min(end - readOffset, buffer.length);
43 yield (0, (_fsExtraP || _load_fsExtraP()).read)(fd, buffer, 0, actualBufferSize, readOffset);
44 const dataBuffer = buffer.length === actualBufferSize ? buffer : buffer.slice(0, actualBufferSize);
45 const sizes = [];
46 rabin.fingerprint([dataBuffer], sizes);
47 let chunkStart = 0;
48 for (const size of sizes) {
49 allSizes.push(size);
50 let chunkEnd = chunkStart + size;
51 const hash = new Blake2s(CHECKSUM_OUTPUT_LENGTH);
52 if (tailBufferData !== null) {
53 hash.update(tailBufferData);
54 // if there is the tail data (already processed by rabin data), first size includes it
55 chunkEnd -= tailBufferData.length;
56 tailBufferData = null;
57 }
58 hash.update(dataBuffer, chunkStart, size);
59 checksums.push(digest(hash));
60 chunkStart = chunkEnd;
61 }
62 const tailSize = actualBufferSize - chunkStart;
63 if (tailSize !== 0) {
64 if (tailBufferData !== null) {
65 throw new Error(`Internal error (${name}): tailBufferData must be null`);
66 }
67 tailBufferData = dataBuffer.slice(chunkStart, chunkStart + tailSize);
68 }
69 readOffset += actualBufferSize;
70 if (readOffset >= end) {
71 if (tailBufferData !== null) {
72 allSizes.push(tailSize);
73 checksums.push(computeChecksum(tailBufferData));
74 }
75 break;
76 } else if (tailBufferData !== null) {
77 // copy data
78 tailBufferData = Buffer.from(tailBufferData);
79 }
80 }
81 const totalSize = allSizes.reduce(function (accumulator, currentValue) {
82 return accumulator + currentValue;
83 });
84 if (totalSize !== fileSize) {
85 throw new Error(`Internal error (${name}): size mismatch: expected: ${fileSize}, got: ${totalSize}`);
86 }
87 return { checksums, sizes: allSizes };
88 })();
89 }
90}
91exports.ContentDefinedChunker = ContentDefinedChunker; // base64 - should be divisible by 3 to avoid paddings
92
93const CHECKSUM_OUTPUT_LENGTH = 18;
94const Blake2s = require("../blake2s.js");
95function computeChecksum(chunk) {
96 const hash = new Blake2s(CHECKSUM_OUTPUT_LENGTH);
97 hash.update(chunk);
98 // node-base91 doesn't make a lot of sense - 29KB vs 30KB Because for base64 string value in the yml never escaped, but node-base91 often escaped (single quotes) and it adds extra 2 symbols.
99 return digest(hash);
100}
101function digest(hash) {
102 return hash.digest().toString("base64");
103}
104//# sourceMappingURL=ContentDefinedChunker.js.map
\No newline at end of file