1 | "use strict";
|
2 |
|
3 | Object.defineProperty(exports, "__esModule", {
|
4 | value: true
|
5 | });
|
6 | exports.ensureArray = exports.ParsableFile = void 0;
|
7 | exports.parseOVAFile = parseOVAFile;
|
8 | exports.parseOVF = parseOVF;
|
9 | var _assert = _interopRequireDefault(require("assert"));
|
10 | var _find = _interopRequireDefault(require("lodash/find"));
|
11 | var _forEach = _interopRequireDefault(require("lodash/forEach"));
|
12 | var _pako = _interopRequireDefault(require("pako"));
|
13 | var _sum = _interopRequireDefault(require("lodash/sum"));
|
14 | var _xml2js = _interopRequireWildcard(require("xml2js"));
|
15 | var _ = require(".");
|
16 | var _util = require("./util");
|
17 | function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
|
18 | function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
|
19 | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
20 | const MEMORY_UNIT_TO_FACTOR = {
|
21 | k: 1024,
|
22 | m: 1048576,
|
23 | g: 1073741824,
|
24 | t: 1099511627776
|
25 | };
|
26 | const RESOURCE_TYPE_TO_HANDLER = {
|
27 | 3: (data, {
|
28 | VirtualQuantity: nCpus
|
29 | }) => {
|
30 | data.nCpus = +nCpus;
|
31 | },
|
32 | 4: (data, {
|
33 | AllocationUnits: unit,
|
34 | VirtualQuantity: quantity
|
35 | }) => {
|
36 | data.memory = quantity * allocationUnitsToFactor(unit);
|
37 | },
|
38 | 10: ({
|
39 | networks
|
40 | }, {
|
41 | AutomaticAllocation: enabled,
|
42 | Connection: name
|
43 | }) => {
|
44 | if (enabled) {
|
45 | networks.push(name);
|
46 | }
|
47 | },
|
48 | 17: ({
|
49 | disks
|
50 | }, {
|
51 | Address: address,
|
52 | AddressOnParent: position,
|
53 | Description: description = 'No description',
|
54 | ElementName: name,
|
55 | Caption: caption = name,
|
56 | HostResource: resource
|
57 | }) => {
|
58 | const diskId = resource.match(/^(?:ovf:)?\/disk\/(.+)$/);
|
59 | const disk = diskId && disks[diskId[1]];
|
60 | if (position === undefined && address !== undefined) {
|
61 | let parsed = address.replace(/\s+/g, '');
|
62 | if (parsed[0] === '{' && parsed[parsed.length - 1] === '}') {
|
63 | parsed = parsed.substring(1, parsed.length - 1);
|
64 | parsed = parsed.split(',');
|
65 | parsed = Object.fromEntries(parsed.map(couple => couple.split('=')));
|
66 | if ('target' in parsed) {
|
67 | position = +parsed.target;
|
68 | }
|
69 | }
|
70 | }
|
71 | if (position === undefined) {
|
72 | position = 0;
|
73 | }
|
74 | if (disk) {
|
75 | disk.descriptionLabel = description;
|
76 | disk.nameLabel = caption;
|
77 | disk.position = +position;
|
78 | } else {
|
79 | console.error(`No disk found: '${diskId}'.`);
|
80 | }
|
81 | }
|
82 | };
|
83 | function parseTarHeader(header, stringDeserializer) {
|
84 | const fileName = stringDeserializer(header.slice(0, 100), 'ascii').split('\0')[0];
|
85 | if (fileName.length === 0) {
|
86 | return null;
|
87 | }
|
88 | const sizeBuffer = header.slice(124, 124 + 12);
|
89 | let fileSize = 0;
|
90 | if (new Uint8Array(sizeBuffer)[0] === 128) {
|
91 | for (const byte of new Uint8Array(sizeBuffer.slice(1))) {
|
92 | fileSize *= 256;
|
93 | fileSize += byte;
|
94 | }
|
95 | } else {
|
96 | fileSize = parseInt(stringDeserializer(sizeBuffer.slice(0, 11), 'ascii'), 8);
|
97 | }
|
98 | return {
|
99 | fileName,
|
100 | fileSize
|
101 | };
|
102 | }
|
103 | class ParsableFile {
|
104 | slice(start, end) {}
|
105 | async read() {}
|
106 | }
|
107 | exports.ParsableFile = ParsableFile;
|
108 | const ensureArray = value => {
|
109 | if (value === undefined) {
|
110 | return [];
|
111 | }
|
112 | return Array.isArray(value) ? value : [value];
|
113 | };
|
114 | exports.ensureArray = ensureArray;
|
115 | const allocationUnitsToFactor = unit => {
|
116 | const intValue = unit.match(/\^([0-9]+)$/);
|
117 | return intValue != null ? Math.pow(2, intValue[1]) : MEMORY_UNIT_TO_FACTOR[unit.charAt(0).toLowerCase()];
|
118 | };
|
119 | const cleanDisks = disks => {
|
120 | const usedPositions = new Set();
|
121 | let nextPosition = Object.keys(disks).length;
|
122 | for (const diskId in disks) {
|
123 | let position = disks[diskId].position;
|
124 | if (position == null) {
|
125 | console.error(`No position specified for '${diskId}'.`);
|
126 | delete disks[diskId];
|
127 | } else {
|
128 | if (usedPositions.has(position)) {
|
129 | console.warn(`There is at least two disks with position ${position}, we're changing the second one to ${nextPosition}`);
|
130 | disks[diskId].position = position = nextPosition;
|
131 | nextPosition++;
|
132 | }
|
133 | usedPositions.add(position);
|
134 | }
|
135 | }
|
136 | };
|
137 | async function parseOVF(fileFragment, stringDeserializer) {
|
138 | const xmlString = stringDeserializer(await fileFragment.read(), 'utf-8');
|
139 | return new Promise((resolve, reject) => _xml2js.default.parseString(xmlString, {
|
140 | mergeAttrs: true,
|
141 | explicitArray: false,
|
142 | tagNameProcessors: [_xml2js.processors.stripPrefix],
|
143 | attrNameProcessors: [_xml2js.processors.stripPrefix]
|
144 | }, (err, res) => {
|
145 | if (err) {
|
146 | reject(err);
|
147 | return;
|
148 | }
|
149 | const {
|
150 | Envelope: {
|
151 | DiskSection: {
|
152 | Disk: disks
|
153 | },
|
154 | References: {
|
155 | File: files
|
156 | },
|
157 | VirtualSystem: system
|
158 | }
|
159 | } = res;
|
160 | const data = {
|
161 | disks: {},
|
162 | networks: []
|
163 | };
|
164 | const hardware = system.VirtualHardwareSection;
|
165 | data.nameLabel = hardware.System.VirtualSystemIdentifier;
|
166 | data.descriptionLabel = system.AnnotationSection && system.AnnotationSection.Annotation || system.OperatingSystemSection && system.OperatingSystemSection.Description;
|
167 | (0, _forEach.default)(ensureArray(disks), disk => {
|
168 | const file = (0, _find.default)(ensureArray(files), file => file.id === disk.fileRef);
|
169 | const unit = disk.capacityAllocationUnits;
|
170 | data.disks[disk.diskId] = {
|
171 | capacity: disk.capacity * (unit && allocationUnitsToFactor(unit) || 1),
|
172 | path: file && file.href,
|
173 | compression: file && file.compression
|
174 | };
|
175 | });
|
176 | const handleItem = item => {
|
177 | const handler = RESOURCE_TYPE_TO_HANDLER[item.ResourceType];
|
178 | if (!handler) {
|
179 | return;
|
180 | }
|
181 | handler(data, item);
|
182 | };
|
183 | (0, _forEach.default)(ensureArray(hardware.Item), handleItem);
|
184 | (0, _forEach.default)(ensureArray(hardware.StorageItem), handleItem);
|
185 | (0, _forEach.default)(ensureArray(hardware.EthernetPortItem), handleItem);
|
186 | cleanDisks(data.disks);
|
187 | resolve(data);
|
188 | }));
|
189 | }
|
190 | const GZIP_CHUNK_SIZE = 4 * 1024 * 1024;
|
191 | async function parseGzipFromEnd(start, end, fileSlice, header) {
|
192 | const l = end - start;
|
193 | const chunks = [];
|
194 | let savedSize = 0;
|
195 | let currentDeflatedPos = 0;
|
196 | const inflate = new _pako.default.Inflate();
|
197 | while (currentDeflatedPos < header.fileSize) {
|
198 | const slice = fileSlice.slice(currentDeflatedPos, currentDeflatedPos + GZIP_CHUNK_SIZE);
|
199 | const compressed = await slice.read();
|
200 | inflate.push(compressed, _pako.default.Z_SYNC_FLUSH);
|
201 | const chunk = inflate.result.slice();
|
202 | chunks.push({
|
203 | pos: currentDeflatedPos,
|
204 | buffer: chunk
|
205 | });
|
206 | savedSize += chunk.length;
|
207 | if (savedSize - chunks[0].buffer.length >= l) {
|
208 | savedSize -= chunks[0].buffer.length;
|
209 | chunks.shift();
|
210 | }
|
211 | currentDeflatedPos += GZIP_CHUNK_SIZE;
|
212 | }
|
213 | let resultBuffer = new Uint8Array((0, _sum.default)(chunks.map(c => c.buffer.length)));
|
214 | let index = 0;
|
215 | chunks.forEach(c => {
|
216 | resultBuffer.set(c.buffer, index);
|
217 | index += c.buffer.length;
|
218 | });
|
219 | resultBuffer = resultBuffer.slice(start, end);
|
220 | return resultBuffer.buffer;
|
221 | }
|
222 | async function parseOVAFile(parsableFile, stringDeserializer, skipVmdk = false) {
|
223 | let offset = 0;
|
224 | const HEADER_SIZE = 512;
|
225 | let data = {
|
226 | tables: {}
|
227 | };
|
228 | while (true) {
|
229 | const header = parseTarHeader(await parsableFile.slice(offset, offset + HEADER_SIZE).read(), stringDeserializer);
|
230 | offset += HEADER_SIZE;
|
231 | if (header === null) {
|
232 | break;
|
233 | }
|
234 | const fileSlice = parsableFile.slice(offset, offset + header.fileSize);
|
235 | fileSlice.fileName = header.fileName;
|
236 | if (!(header.fileName.startsWith('PaxHeader/') || header.fileName.startsWith('.'))) {
|
237 | if (header.fileName.toLowerCase().endsWith('.ovf')) {
|
238 | const res = await parseOVF(fileSlice, stringDeserializer);
|
239 | data = {
|
240 | ...data,
|
241 | ...res
|
242 | };
|
243 | }
|
244 | if (!skipVmdk && header.fileName.toLowerCase().endsWith('.vmdk')) {
|
245 | const readFile = async (start, end) => fileSlice.slice(start, end).read();
|
246 | readFile.fileName = header.fileName;
|
247 | data.tables[header.fileName] = (0, _util.suppressUnhandledRejection)((0, _.readVmdkGrainTable)(readFile));
|
248 | }
|
249 | }
|
250 | if (!skipVmdk && header.fileName.toLowerCase().endsWith('.vmdk.gz')) {
|
251 | let forwardsInflater = new _pako.default.Inflate();
|
252 | const readFile = async (start, end) => {
|
253 | async function parseGzipFromStart(start, end, fileSlice) {
|
254 | const chunks = [];
|
255 | const resultStart = () => forwardsInflater.strm.total_out - forwardsInflater.result.length;
|
256 | if (forwardsInflater.result != null && start < resultStart()) {
|
257 | forwardsInflater = new _pako.default.Inflate();
|
258 | }
|
259 | let isLast = false;
|
260 | while (true) {
|
261 | if (forwardsInflater.strm.total_out > start) {
|
262 | let chunk = forwardsInflater.result;
|
263 | if (resultStart() < start) {
|
264 | chunk = chunk.slice(start - resultStart());
|
265 | }
|
266 | if (forwardsInflater.strm.total_out > end) {
|
267 | chunk = chunk.slice(0, -(forwardsInflater.strm.total_out - end));
|
268 | isLast = true;
|
269 | }
|
270 | chunks.push(chunk);
|
271 | }
|
272 | if (isLast) {
|
273 | break;
|
274 | }
|
275 | const slice = fileSlice.slice(forwardsInflater.strm.total_in, forwardsInflater.strm.total_in + GZIP_CHUNK_SIZE);
|
276 | forwardsInflater.push(await slice.read(), _pako.default.Z_SYNC_FLUSH);
|
277 | }
|
278 | const resultBuffer = new Uint8Array((0, _sum.default)(chunks.map(c => c.length)));
|
279 | let index = 0;
|
280 | chunks.forEach(c => {
|
281 | resultBuffer.set(c, index);
|
282 | index += c.length;
|
283 | });
|
284 | _assert.default.strictEqual(resultBuffer.buffer.byteLength, end - start);
|
285 | return resultBuffer.buffer;
|
286 | }
|
287 | if (start === end) {
|
288 | return new Uint8Array(0);
|
289 | }
|
290 | if (start >= 0 && end >= 0) {
|
291 | return parseGzipFromStart(start, end, fileSlice);
|
292 | } else if (start < 0 && end < 0) {
|
293 | return parseGzipFromEnd(start, end, fileSlice, header);
|
294 | }
|
295 | };
|
296 | readFile.fileName = header.fileName;
|
297 | data.tables[header.fileName] = (0, _util.suppressUnhandledRejection)((0, _.readVmdkGrainTable)(readFile));
|
298 | }
|
299 | offset += Math.ceil(header.fileSize / 512) * 512;
|
300 | }
|
301 | return data;
|
302 | }
|
303 |
|
\ | No newline at end of file |