UNPKG

22 kBJavaScriptView Raw
1'use strict';
2
3Object.defineProperty(exports, "__esModule", {
4 value: true
5});
6exports.readRawContent = exports.VMDKDirectParser = undefined;
7
8var _promise = require('babel-runtime/core-js/promise');
9
10var _promise2 = _interopRequireDefault(_promise);
11
12var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck');
13
14var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
15
16var _createClass2 = require('babel-runtime/helpers/createClass');
17
18var _createClass3 = _interopRequireDefault(_createClass2);
19
20var _regenerator = require('babel-runtime/regenerator');
21
22var _regenerator2 = _interopRequireDefault(_regenerator);
23
24var _asyncToGenerator2 = require('babel-runtime/helpers/asyncToGenerator');
25
26var _asyncToGenerator3 = _interopRequireDefault(_asyncToGenerator2);
27
28var _getIterator2 = require('babel-runtime/core-js/get-iterator');
29
30var _getIterator3 = _interopRequireDefault(_getIterator2);
31
32var readGrain = function () {
33 var _ref = (0, _asyncToGenerator3.default)(_regenerator2.default.mark(function _callee(offsetSectors, buffer, compressed) {
34 var offset, size, grainBuffer, grainContent, lba;
35 return _regenerator2.default.wrap(function _callee$(_context) {
36 while (1) {
37 switch (_context.prev = _context.next) {
38 case 0:
39 offset = offsetSectors * sectorSize;
40 size = buffer.readUInt32LE(offset + 8);
41 grainBuffer = buffer.slice(offset + 12, offset + 12 + size);
42
43 if (!compressed) {
44 _context.next = 9;
45 break;
46 }
47
48 _context.next = 6;
49 return _zlib2.default.inflateSync(grainBuffer);
50
51 case 6:
52 _context.t0 = _context.sent;
53 _context.next = 10;
54 break;
55
56 case 9:
57 _context.t0 = grainBuffer;
58
59 case 10:
60 grainContent = _context.t0;
61 lba = parseU64b(buffer, offset, 'l2Lba');
62 return _context.abrupt('return', {
63 offsetSectors: offsetSectors,
64 offset: offset,
65 lba: lba,
66 lbaBytes: lba * sectorSize,
67 size: size,
68 buffer: grainBuffer,
69 grain: grainContent,
70 grainSize: grainContent.byteLength
71 });
72
73 case 13:
74 case 'end':
75 return _context.stop();
76 }
77 }
78 }, _callee, this);
79 }));
80
81 return function readGrain(_x, _x2, _x3) {
82 return _ref.apply(this, arguments);
83 };
84}();
85
86var readRawContent = exports.readRawContent = function () {
87 var _ref5 = (0, _asyncToGenerator3.default)(_regenerator2.default.mark(function _callee5(readStream) {
88 var virtualBuffer, headerBuffer, header, descriptorLength, descriptorBuffer, descriptor, remainingBuffer, buffer, rawOutputBuffer, l1Size, l2Size, l1, i, l1Entry, l2, j, l2Entry, grain, vmdkType;
89 return _regenerator2.default.wrap(function _callee5$(_context5) {
90 while (1) {
91 switch (_context5.prev = _context5.next) {
92 case 0:
93 virtualBuffer = new _virtualBuffer.VirtualBuffer(readStream);
94 _context5.next = 3;
95 return virtualBuffer.readChunk(512, 'header');
96
97 case 3:
98 headerBuffer = _context5.sent;
99 header = parseHeader(headerBuffer);
100
101 // I think the multiplications are OK, because the descriptor is always at the beginning of the file
102
103 descriptorLength = header.descriptorSizeSectors * sectorSize;
104 _context5.next = 8;
105 return virtualBuffer.readChunk(descriptorLength, 'descriptor');
106
107 case 8:
108 descriptorBuffer = _context5.sent;
109 descriptor = parseDescriptor(descriptorBuffer);
110
111 // TODO: we concat them back for now so that the indices match, we'll have to introduce a bias later
112
113 _context5.next = 12;
114 return virtualBuffer.readChunk(-1, 'remainder');
115
116 case 12:
117 remainingBuffer = _context5.sent;
118 buffer = Buffer.concat([headerBuffer, descriptorBuffer, remainingBuffer]);
119
120 if (header.grainDirectoryOffsetSectors === -1) {
121 header = parseHeader(buffer.slice(-1024, -1024 + sectorSize));
122 }
123 rawOutputBuffer = new Buffer(header.capacitySectors * sectorSize);
124
125 rawOutputBuffer.fill(0);
126 l1Size = Math.floor((header.capacitySectors + header.l1EntrySectors - 1) / header.l1EntrySectors);
127 l2Size = header.numGTEsPerGT;
128 l1 = [];
129 i = 0;
130
131 case 21:
132 if (!(i < l1Size)) {
133 _context5.next = 41;
134 break;
135 }
136
137 l1Entry = buffer.readUInt32LE(header.grainDirectoryOffsetSectors * sectorSize + 4 * i);
138
139 if (!(l1Entry !== 0)) {
140 _context5.next = 38;
141 break;
142 }
143
144 l1.push(l1Entry);
145 l2 = [];
146 j = 0;
147
148 case 27:
149 if (!(j < l2Size)) {
150 _context5.next = 38;
151 break;
152 }
153
154 l2Entry = buffer.readUInt32LE(l1Entry * sectorSize + 4 * j);
155
156 if (!(l2Entry !== 0 && l2Entry !== 1)) {
157 _context5.next = 35;
158 break;
159 }
160
161 _context5.next = 32;
162 return readGrain(l2Entry, buffer, header['flags']['compressedGrains']);
163
164 case 32:
165 grain = _context5.sent;
166
167 grain.grain.copy(rawOutputBuffer, grain.lba * sectorSize);
168 l2[j] = grain;
169
170 case 35:
171 j++;
172 _context5.next = 27;
173 break;
174
175 case 38:
176 i++;
177 _context5.next = 21;
178 break;
179
180 case 41:
181 vmdkType = descriptor['descriptor']['createType'];
182
183 if (!(!vmdkType || vmdkType.toLowerCase() !== 'streamOptimized'.toLowerCase())) {
184 _context5.next = 44;
185 break;
186 }
187
188 throw new Error('unsupported VMDK type "' + vmdkType + '", only streamOptimized is supported');
189
190 case 44:
191 return _context5.abrupt('return', { descriptor: descriptor.descriptor, extents: descriptor.extents, rawFile: rawOutputBuffer });
192
193 case 45:
194 case 'end':
195 return _context5.stop();
196 }
197 }
198 }, _callee5, this);
199 }));
200
201 return function readRawContent(_x4) {
202 return _ref5.apply(this, arguments);
203 };
204}();
205
206var _zlib = require('zlib');
207
208var _zlib2 = _interopRequireDefault(_zlib);
209
210var _virtualBuffer = require('./virtual-buffer');
211
212function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
213
214var sectorSize = 512;
215var compressionDeflate = 'COMPRESSION_DEFLATE';
216var compressionNone = 'COMPRESSION_NONE';
217var compressionMap = [compressionNone, compressionDeflate];
218
219function parseS64b(buffer, offset, valueName) {
220 var low = buffer.readInt32LE(offset);
221 var high = buffer.readInt32LE(offset + 4);
222 // here there might be a surprise because we are reading 64 integers into double floats (53 bits mantissa)
223 var value = low | high << 32;
224 if ((value & Math.pow(2, 32) - 1) !== low) {
225 throw new Error('Unsupported VMDK, ' + valueName + ' is too big');
226 }
227 return value;
228}
229
230function parseU64b(buffer, offset, valueName) {
231 var low = buffer.readUInt32LE(offset);
232 var high = buffer.readUInt32LE(offset + 4);
233 // here there might be a surprise because we are reading 64 integers into double floats (53 bits mantissa)
234 var value = low | high << 32;
235 if ((value & Math.pow(2, 32) - 1) !== low) {
236 throw new Error('Unsupported VMDK, ' + valueName + ' is too big');
237 }
238 return value;
239}
240
241function parseDescriptor(descriptorSlice) {
242 var descriptorText = descriptorSlice.toString('ascii').replace(/\x00+$/, '');
243 var descriptorDict = {};
244 var extentList = [];
245 var lines = descriptorText.split(/\r?\n/).filter(function (line) {
246 return line.trim().length > 0 && line[0] !== '#';
247 });
248 var _iteratorNormalCompletion = true;
249 var _didIteratorError = false;
250 var _iteratorError = undefined;
251
252 try {
253 for (var _iterator = (0, _getIterator3.default)(lines), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
254 var line = _step.value;
255
256 var defLine = line.split('=');
257 // the wonky quote test is to avoid having an equal sign in the name of an extent
258 if (defLine.length === 2 && defLine[0].indexOf('"') === -1) {
259 descriptorDict[defLine[0]] = defLine[1].replace(/['"]+/g, '');
260 } else {
261 var items = line.split(' ');
262 extentList.push({
263 access: items[0],
264 sizeSectors: items[1],
265 type: items[2],
266 name: items[3],
267 offset: items.length > 4 ? items[4] : 0
268 });
269 }
270 }
271 } catch (err) {
272 _didIteratorError = true;
273 _iteratorError = err;
274 } finally {
275 try {
276 if (!_iteratorNormalCompletion && _iterator.return) {
277 _iterator.return();
278 }
279 } finally {
280 if (_didIteratorError) {
281 throw _iteratorError;
282 }
283 }
284 }
285
286 return { descriptor: descriptorDict, extents: extentList };
287}
288
289function parseFlags(flagBuffer) {
290 var number = flagBuffer.readUInt32LE(0);
291 return {
292 newLineTest: !!(number & 1 << 0),
293 useSecondaryGrain: !!(number & 1 << 1),
294 useZeroedGrainTable: !!(number & 1 << 2),
295 compressedGrains: !!(number & 1 << 16),
296 hasMarkers: !!(number & 1 << 17)
297 };
298}
299
300function parseHeader(buffer) {
301 var magicString = buffer.slice(0, 4).toString('ascii');
302 if (magicString !== 'KDMV') {
303 throw new Error('not a VMDK file');
304 }
305 var version = buffer.readUInt32LE(4);
306 if (version !== 1 && version !== 3) {
307 throw new Error('unsupported VMDK version ' + version + ', only version 1 and 3 are supported');
308 }
309 var flags = parseFlags(buffer.slice(8, 12));
310 var capacitySectors = parseU64b(buffer, 12, 'capacitySectors');
311 var grainSizeSectors = parseU64b(buffer, 20, 'grainSizeSectors');
312 var descriptorOffsetSectors = parseU64b(buffer, 28, 'descriptorOffsetSectors');
313 var descriptorSizeSectors = parseU64b(buffer, 36, 'descriptorSizeSectors');
314 var numGTEsPerGT = buffer.readUInt32LE(44);
315 var rGrainDirectoryOffsetSectors = parseS64b(buffer, 48, 'rGrainDirectoryOffsetSectors');
316 var grainDirectoryOffsetSectors = parseS64b(buffer, 56, 'grainDirectoryOffsetSectors');
317 var overheadSectors = parseS64b(buffer, 64, 'overheadSectors');
318 var compressionMethod = compressionMap[buffer.readUInt16LE(77)];
319 var l1EntrySectors = numGTEsPerGT * grainSizeSectors;
320 return {
321 flags: flags,
322 compressionMethod: compressionMethod,
323 grainSizeSectors: grainSizeSectors,
324 overheadSectors: overheadSectors,
325 capacitySectors: capacitySectors,
326 descriptorOffsetSectors: descriptorOffsetSectors,
327 descriptorSizeSectors: descriptorSizeSectors,
328 grainDirectoryOffsetSectors: grainDirectoryOffsetSectors,
329 rGrainDirectoryOffsetSectors: rGrainDirectoryOffsetSectors,
330 l1EntrySectors: l1EntrySectors,
331 numGTEsPerGT: numGTEsPerGT
332 };
333}
334
335
336function tryToParseMarker(buffer) {
337 var value = buffer.readUInt32LE(0);
338 var size = buffer.readUInt32LE(8);
339 var type = buffer.readUInt32LE(12);
340 return { value: value, size: size, type: type };
341}
342
343function alignSectors(number) {
344 return Math.ceil(number / sectorSize) * sectorSize;
345}
346
347var VMDKDirectParser = exports.VMDKDirectParser = function () {
348 function VMDKDirectParser(readStream) {
349 (0, _classCallCheck3.default)(this, VMDKDirectParser);
350
351 this.virtualBuffer = new _virtualBuffer.VirtualBuffer(readStream);
352 this.header = null;
353 }
354
355 // I found a VMDK file whose L1 and L2 table did not have a marker, but they were at the top
356 // I detect this case and eat those tables first then let the normal loop go over the grains.
357
358
359 (0, _createClass3.default)(VMDKDirectParser, [{
360 key: '_readL1',
361 value: function () {
362 var _ref2 = (0, _asyncToGenerator3.default)(_regenerator2.default.mark(function _callee2() {
363 var position, l1entries, sectorAlignedL1Bytes, l1Buffer, l2Start, l2IsContiguous, i, l1Entry, previousL1Entry, l1L2FreeSpace, l2entries, l2ByteSize, l2Buffer, grainsAreInAscendingOrder, previousL2Entry, firstGrain, _i, l2Entry, freeSpace;
364
365 return _regenerator2.default.wrap(function _callee2$(_context2) {
366 while (1) {
367 switch (_context2.prev = _context2.next) {
368 case 0:
369 position = this.virtualBuffer.position;
370 l1entries = Math.floor((this.header.capacitySectors + this.header.l1EntrySectors - 1) / this.header.l1EntrySectors);
371 sectorAlignedL1Bytes = alignSectors(l1entries * 4);
372 _context2.next = 5;
373 return this.virtualBuffer.readChunk(sectorAlignedL1Bytes, 'L1 table ' + position);
374
375 case 5:
376 l1Buffer = _context2.sent;
377 l2Start = 0;
378 l2IsContiguous = true;
379
380 for (i = 0; i < l1entries; i++) {
381 l1Entry = l1Buffer.readUInt32LE(i * 4);
382
383 if (i > 0) {
384 previousL1Entry = l1Buffer.readUInt32LE((i - 1) * 4);
385
386 l2IsContiguous = l2IsContiguous && l1Entry - previousL1Entry === 4;
387 } else {
388 l2IsContiguous = l1Entry * sectorSize === this.virtualBuffer.position || l1Entry * sectorSize === this.virtualBuffer.position + 512;
389 l2Start = l1Entry * sectorSize;
390 }
391 }
392
393 if (l2IsContiguous) {
394 _context2.next = 11;
395 break;
396 }
397
398 return _context2.abrupt('return', null);
399
400 case 11:
401 l1L2FreeSpace = l2Start - this.virtualBuffer.position;
402
403 if (!(l1L2FreeSpace > 0)) {
404 _context2.next = 15;
405 break;
406 }
407
408 _context2.next = 15;
409 return this.virtualBuffer.readChunk(l1L2FreeSpace, 'freeSpace between L1 and L2');
410
411 case 15:
412 l2entries = Math.ceil(this.header.capacitySectors / this.header.grainSizeSectors);
413 l2ByteSize = alignSectors(l1entries * this.header.numGTEsPerGT * 4);
414 _context2.next = 19;
415 return this.virtualBuffer.readChunk(l2ByteSize, 'L2 table ' + position);
416
417 case 19:
418 l2Buffer = _context2.sent;
419 grainsAreInAscendingOrder = true;
420 previousL2Entry = 0;
421 firstGrain = null;
422
423 for (_i = 0; _i < l2entries; _i++) {
424 l2Entry = l2Buffer.readUInt32LE(_i * 4);
425
426 if (_i > 0 && previousL2Entry !== 0 && l2Entry !== 0) {
427 grainsAreInAscendingOrder = grainsAreInAscendingOrder && previousL2Entry < l2Entry;
428 }
429 previousL2Entry = l2Entry;
430 if (firstGrain === null) {
431 firstGrain = l2Entry;
432 }
433 }
434
435 if (grainsAreInAscendingOrder) {
436 _context2.next = 26;
437 break;
438 }
439
440 throw new Error('Unsupported file format');
441
442 case 26:
443 freeSpace = firstGrain * sectorSize - this.virtualBuffer.position;
444
445 if (!(freeSpace > 0)) {
446 _context2.next = 30;
447 break;
448 }
449
450 _context2.next = 30;
451 return this.virtualBuffer.readChunk(freeSpace, 'freeSpace after L2');
452
453 case 30:
454 case 'end':
455 return _context2.stop();
456 }
457 }
458 }, _callee2, this);
459 }));
460
461 function _readL1() {
462 return _ref2.apply(this, arguments);
463 }
464
465 return _readL1;
466 }()
467 }, {
468 key: 'readHeader',
469 value: function () {
470 var _ref3 = (0, _asyncToGenerator3.default)(_regenerator2.default.mark(function _callee3() {
471 var headerBuffer, magicString, version, descriptorLength, descriptorBuffer, l1PositionBytes, endOfDescriptor;
472 return _regenerator2.default.wrap(function _callee3$(_context3) {
473 while (1) {
474 switch (_context3.prev = _context3.next) {
475 case 0:
476 _context3.next = 2;
477 return this.virtualBuffer.readChunk(512, 'readHeader');
478
479 case 2:
480 headerBuffer = _context3.sent;
481 magicString = headerBuffer.slice(0, 4).toString('ascii');
482
483 if (!(magicString !== 'KDMV')) {
484 _context3.next = 6;
485 break;
486 }
487
488 throw new Error('not a VMDK file');
489
490 case 6:
491 version = headerBuffer.readUInt32LE(4);
492
493 if (!(version !== 1 && version !== 3)) {
494 _context3.next = 9;
495 break;
496 }
497
498 throw new Error('unsupported VMDK version ' + version + ', only version 1 and 3 are supported');
499
500 case 9:
501 this.header = parseHeader(headerBuffer);
502 // I think the multiplications are OK, because the descriptor is always at the beginning of the file
503 descriptorLength = this.header.descriptorSizeSectors * sectorSize;
504 _context3.next = 13;
505 return this.virtualBuffer.readChunk(descriptorLength, 'descriptor');
506
507 case 13:
508 descriptorBuffer = _context3.sent;
509
510 this.descriptor = parseDescriptor(descriptorBuffer);
511 l1PositionBytes = null;
512
513 if (this.header.grainDirectoryOffsetSectors !== -1 && this.header.grainDirectoryOffsetSectors !== 0) {
514 l1PositionBytes = this.header.grainDirectoryOffsetSectors * sectorSize;
515 }
516 endOfDescriptor = this.virtualBuffer.position;
517
518 if (!(l1PositionBytes !== null && (l1PositionBytes === endOfDescriptor || l1PositionBytes === endOfDescriptor + sectorSize))) {
519 _context3.next = 24;
520 break;
521 }
522
523 if (!(l1PositionBytes === endOfDescriptor + sectorSize)) {
524 _context3.next = 22;
525 break;
526 }
527
528 _context3.next = 22;
529 return this.virtualBuffer.readChunk(sectorSize, 'skipping L1 marker');
530
531 case 22:
532 _context3.next = 24;
533 return this._readL1();
534
535 case 24:
536 return _context3.abrupt('return', this.header);
537
538 case 25:
539 case 'end':
540 return _context3.stop();
541 }
542 }
543 }, _callee3, this);
544 }));
545
546 function readHeader() {
547 return _ref3.apply(this, arguments);
548 }
549
550 return readHeader;
551 }()
552 }, {
553 key: 'next',
554 value: function () {
555 var _ref4 = (0, _asyncToGenerator3.default)(_regenerator2.default.mark(function _callee4() {
556 var position, sector, marker, grainDiskSize, alignedGrainDiskSize, remainOfBufferSize, remainderOfGrainBuffer, grainBuffer;
557 return _regenerator2.default.wrap(function _callee4$(_context4) {
558 while (1) {
559 switch (_context4.prev = _context4.next) {
560 case 0:
561 if (this.virtualBuffer.isDepleted) {
562 _context4.next = 25;
563 break;
564 }
565
566 position = this.virtualBuffer.position;
567 _context4.next = 4;
568 return this.virtualBuffer.readChunk(512, 'marker start ' + position);
569
570 case 4:
571 sector = _context4.sent;
572
573 if (!(sector.length === 0)) {
574 _context4.next = 7;
575 break;
576 }
577
578 return _context4.abrupt('break', 25);
579
580 case 7:
581 marker = tryToParseMarker(sector);
582
583 if (!(marker.size === 0)) {
584 _context4.next = 14;
585 break;
586 }
587
588 if (!(marker.value !== 0)) {
589 _context4.next = 12;
590 break;
591 }
592
593 _context4.next = 12;
594 return this.virtualBuffer.readChunk(marker.value * sectorSize, 'other marker value ' + this.virtualBuffer.position);
595
596 case 12:
597 _context4.next = 23;
598 break;
599
600 case 14:
601 if (!(marker.size > 10)) {
602 _context4.next = 23;
603 break;
604 }
605
606 grainDiskSize = marker.size + 12;
607 alignedGrainDiskSize = alignSectors(grainDiskSize);
608 remainOfBufferSize = alignedGrainDiskSize - sectorSize;
609 _context4.next = 20;
610 return this.virtualBuffer.readChunk(remainOfBufferSize, 'grain remainder ' + this.virtualBuffer.position);
611
612 case 20:
613 remainderOfGrainBuffer = _context4.sent;
614 grainBuffer = Buffer.concat([sector, remainderOfGrainBuffer]);
615 return _context4.abrupt('return', readGrain(0, grainBuffer, this.header.compressionMethod === compressionDeflate && this.header.flags.compressedGrains));
616
617 case 23:
618 _context4.next = 0;
619 break;
620
621 case 25:
622 return _context4.abrupt('return', new _promise2.default(function (resolve) {
623 return resolve(null);
624 }));
625
626 case 26:
627 case 'end':
628 return _context4.stop();
629 }
630 }
631 }, _callee4, this);
632 }));
633
634 function next() {
635 return _ref4.apply(this, arguments);
636 }
637
638 return next;
639 }()
640 }]);
641 return VMDKDirectParser;
642}();
643//# sourceMappingURL=vmdk-read.js.map
\No newline at end of file