1 | "use strict";
|
2 |
|
3 | Object.defineProperty(exports, "__esModule", {
|
4 | value: true
|
5 | });
|
6 | exports.ContentDefinedChunker = undefined;
|
7 |
|
8 | var _bluebirdLst;
|
9 |
|
10 | function _load_bluebirdLst() {
|
11 | return _bluebirdLst = require("bluebird-lst");
|
12 | }
|
13 |
|
14 | var _fsExtraP;
|
15 |
|
16 | function _load_fsExtraP() {
|
17 | return _fsExtraP = require("fs-extra-p");
|
18 | }
|
19 |
|
20 | var _rabinBindings;
|
21 |
|
22 | function _load_rabinBindings() {
|
23 | return _rabinBindings = require("rabin-bindings");
|
24 | }
|
25 |
|
26 | class ContentDefinedChunker {
|
27 | computeChunks(fd, start, end, name) {
|
28 | return (0, (_bluebirdLst || _load_bluebirdLst()).coroutine)(function* () {
|
29 | const fileSize = end - start;
|
30 | const buffer = Buffer.allocUnsafe(Math.min(4 * 1024 * 1024, fileSize));
|
31 | const rabin = (0, (_rabinBindings || _load_rabinBindings()).Rabin)();
|
32 | const avgBits = 12;
|
33 | const min = 8 * 1024;
|
34 |
|
35 | const max = 32 * 1024;
|
36 | rabin.configure(avgBits, min, max);
|
37 | const checksums = [];
|
38 | const allSizes = [];
|
39 | let tailBufferData = null;
|
40 | let readOffset = start;
|
41 | while (true) {
|
42 | const actualBufferSize = Math.min(end - readOffset, buffer.length);
|
43 | yield (0, (_fsExtraP || _load_fsExtraP()).read)(fd, buffer, 0, actualBufferSize, readOffset);
|
44 | const dataBuffer = buffer.length === actualBufferSize ? buffer : buffer.slice(0, actualBufferSize);
|
45 | const sizes = [];
|
46 | rabin.fingerprint([dataBuffer], sizes);
|
47 | let chunkStart = 0;
|
48 | for (const size of sizes) {
|
49 | allSizes.push(size);
|
50 | let chunkEnd = chunkStart + size;
|
51 | const hash = new Blake2s(CHECKSUM_OUTPUT_LENGTH);
|
52 | if (tailBufferData !== null) {
|
53 | hash.update(tailBufferData);
|
54 |
|
55 | chunkEnd -= tailBufferData.length;
|
56 | tailBufferData = null;
|
57 | }
|
58 | hash.update(dataBuffer, chunkStart, size);
|
59 | checksums.push(digest(hash));
|
60 | chunkStart = chunkEnd;
|
61 | }
|
62 | const tailSize = actualBufferSize - chunkStart;
|
63 | if (tailSize !== 0) {
|
64 | if (tailBufferData !== null) {
|
65 | throw new Error(`Internal error (${name}): tailBufferData must be null`);
|
66 | }
|
67 | tailBufferData = dataBuffer.slice(chunkStart, chunkStart + tailSize);
|
68 | }
|
69 | readOffset += actualBufferSize;
|
70 | if (readOffset >= end) {
|
71 | if (tailBufferData !== null) {
|
72 | allSizes.push(tailSize);
|
73 | checksums.push(computeChecksum(tailBufferData));
|
74 | }
|
75 | break;
|
76 | } else if (tailBufferData !== null) {
|
77 |
|
78 | tailBufferData = Buffer.from(tailBufferData);
|
79 | }
|
80 | }
|
81 | const totalSize = allSizes.reduce(function (accumulator, currentValue) {
|
82 | return accumulator + currentValue;
|
83 | });
|
84 | if (totalSize !== fileSize) {
|
85 | throw new Error(`Internal error (${name}): size mismatch: expected: ${fileSize}, got: ${totalSize}`);
|
86 | }
|
87 | return { checksums, sizes: allSizes };
|
88 | })();
|
89 | }
|
90 | }
|
91 | exports.ContentDefinedChunker = ContentDefinedChunker;
|
92 |
|
93 | const CHECKSUM_OUTPUT_LENGTH = 18;
|
94 | const Blake2s = require("../blake2s.js");
|
95 | function computeChecksum(chunk) {
|
96 | const hash = new Blake2s(CHECKSUM_OUTPUT_LENGTH);
|
97 | hash.update(chunk);
|
98 |
|
99 | return digest(hash);
|
100 | }
|
101 | function digest(hash) {
|
102 | return hash.digest().toString("base64");
|
103 | }
|
104 |
|
\ | No newline at end of file |