UNPKG

3.92 kBJavaScriptView Raw
1'use strict'
2const fs = require('fs')
3const path = require('path')
4const mkdirp = require('mkdirp')
5const pickle = require('chromium-pickle-js')
6
7const Filesystem = require('./filesystem')
8const filesystemCache = {}
9
10const copyFileToSync = function (dest, src, filename) {
11 const srcFile = path.join(src, filename)
12 const targetFile = path.join(dest, filename)
13
14 const content = fs.readFileSync(srcFile)
15 const stats = fs.statSync(srcFile)
16 mkdirp.sync(path.dirname(targetFile))
17 return fs.writeFileSync(targetFile, content, {mode: stats.mode})
18}
19
20const writeFileListToStream = function (dest, filesystem, out, list, metadata, callback) {
21 if (list.length === 0) {
22 out.end()
23 return callback(null)
24 }
25
26 const file = list[0]
27 if (file.unpack) {
28 // the file should not be packed into archive.
29 const filename = path.relative(filesystem.src, file.filename)
30 try {
31 copyFileToSync(`${dest}.unpacked`, filesystem.src, filename)
32 } catch (error) {
33 return callback(error)
34 }
35 return writeFileListToStream(dest, filesystem, out, list.slice(1), metadata, callback)
36 } else {
37 const tr = metadata[file.filename].transformed
38 const stream = fs.createReadStream((tr ? tr.path : file.filename))
39 stream.pipe(out, {end: false})
40 stream.on('error', callback)
41 return stream.on('end', function () {
42 return writeFileListToStream(dest, filesystem, out, list.slice(1), metadata, callback)
43 })
44 }
45}
46
47module.exports.writeFilesystem = function (dest, filesystem, files, metadata, callback) {
48 let sizeBuf
49 let headerBuf
50 try {
51 const headerPickle = pickle.createEmpty()
52 headerPickle.writeString(JSON.stringify(filesystem.header))
53 headerBuf = headerPickle.toBuffer()
54
55 const sizePickle = pickle.createEmpty()
56 sizePickle.writeUInt32(headerBuf.length)
57 sizeBuf = sizePickle.toBuffer()
58 } catch (error) {
59 return callback(error)
60 }
61
62 const out = fs.createWriteStream(dest)
63 out.on('error', callback)
64 out.write(sizeBuf)
65 return out.write(headerBuf, function () {
66 return writeFileListToStream(dest, filesystem, out, files, metadata, callback)
67 })
68}
69
70module.exports.readArchiveHeaderSync = function (archive) {
71 const fd = fs.openSync(archive, 'r')
72 let size
73 let headerBuf
74 try {
75 const sizeBuf = new Buffer(8)
76 if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) {
77 throw new Error('Unable to read header size')
78 }
79
80 const sizePickle = pickle.createFromBuffer(sizeBuf)
81 size = sizePickle.createIterator().readUInt32()
82 headerBuf = new Buffer(size)
83 if (fs.readSync(fd, headerBuf, 0, size, null) !== size) {
84 throw new Error('Unable to read header')
85 }
86 } finally {
87 fs.closeSync(fd)
88 }
89
90 const headerPickle = pickle.createFromBuffer(headerBuf)
91 const header = headerPickle.createIterator().readString()
92 return {header: JSON.parse(header), headerSize: size}
93}
94
95module.exports.readFilesystemSync = function (archive) {
96 if (!filesystemCache[archive]) {
97 const header = this.readArchiveHeaderSync(archive)
98 const filesystem = new Filesystem(archive)
99 filesystem.header = header.header
100 filesystem.headerSize = header.headerSize
101 filesystemCache[archive] = filesystem
102 }
103 return filesystemCache[archive]
104}
105
106module.exports.readFileSync = function (filesystem, filename, info) {
107 let buffer = new Buffer(info.size)
108 if (info.size <= 0) { return buffer }
109 if (info.unpacked) {
110 // it's an unpacked file, copy it.
111 buffer = fs.readFileSync(path.join(`${filesystem.src}.unpacked`, filename))
112 } else {
113 // Node throws an exception when reading 0 bytes into a 0-size buffer,
114 // so we short-circuit the read in this case.
115 const fd = fs.openSync(filesystem.src, 'r')
116 try {
117 const offset = 8 + filesystem.headerSize + parseInt(info.offset)
118 fs.readSync(fd, buffer, 0, info.size, offset)
119 } finally {
120 fs.closeSync(fd)
121 }
122 }
123 return buffer
124}