1 | "use strict";
|
2 | Object.defineProperty(exports, "__esModule", { value: true });
|
3 | exports.AsarPackager = void 0;
|
4 | const builder_util_1 = require("builder-util");
|
5 | const fs_1 = require("builder-util/out/fs");
|
6 | const fs_2 = require("fs");
|
7 | const promises_1 = require("fs/promises");
|
8 | const path = require("path");
|
9 | const appFileCopier_1 = require("../util/appFileCopier");
|
10 | const asar_1 = require("./asar");
|
11 | const integrity_1 = require("./integrity");
|
12 | const unpackDetector_1 = require("./unpackDetector");
|
13 |
|
14 | const pickle = require("chromium-pickle-js");
|
15 |
|
16 | class AsarPackager {
|
17 | constructor(src, destination, options, unpackPattern) {
|
18 | this.src = src;
|
19 | this.destination = destination;
|
20 | this.options = options;
|
21 | this.unpackPattern = unpackPattern;
|
22 | this.fs = new asar_1.AsarFilesystem(this.src);
|
23 | this.outFile = path.join(destination, "app.asar");
|
24 | this.unpackedDest = `${this.outFile}.unpacked`;
|
25 | }
|
26 |
|
27 | async pack(fileSets, packager) {
|
28 | if (this.options.ordering != null) {
|
29 |
|
30 | await order(fileSets[0].files, this.options.ordering, fileSets[0].src);
|
31 | }
|
32 | await (0, promises_1.mkdir)(path.dirname(this.outFile), { recursive: true });
|
33 | const unpackedFileIndexMap = new Map();
|
34 | const orderedFileSets = [
|
35 |
|
36 | ...fileSets.slice(1),
|
37 |
|
38 | fileSets[0],
|
39 | ].map(orderFileSet);
|
40 | for (const fileSet of orderedFileSets) {
|
41 | unpackedFileIndexMap.set(fileSet, await this.createPackageFromFiles(fileSet, packager.info));
|
42 | }
|
43 | await this.writeAsarFile(orderedFileSets, unpackedFileIndexMap);
|
44 | }
|
45 | async createPackageFromFiles(fileSet, packager) {
|
46 | const metadata = fileSet.metadata;
|
47 |
|
48 | const unpackedDirs = new Set();
|
49 | const rootForAppFilesWithoutAsar = path.join(this.destination, "app");
|
50 | if (this.options.smartUnpack !== false) {
|
51 | await (0, unpackDetector_1.detectUnpackedDirs)(fileSet, unpackedDirs, this.unpackedDest, rootForAppFilesWithoutAsar);
|
52 | }
|
53 | const dirToCreateForUnpackedFiles = new Set(unpackedDirs);
|
54 | const correctDirNodeUnpackedFlag = async (filePathInArchive, dirNode) => {
|
55 | for (const dir of unpackedDirs) {
|
56 | if (filePathInArchive.length > dir.length + 2 && filePathInArchive[dir.length] === path.sep && filePathInArchive.startsWith(dir)) {
|
57 | dirNode.unpacked = true;
|
58 | unpackedDirs.add(filePathInArchive);
|
59 |
|
60 |
|
61 | await (0, promises_1.mkdir)(path.join(this.unpackedDest, filePathInArchive), { recursive: true });
|
62 | break;
|
63 | }
|
64 | }
|
65 | };
|
66 | const transformedFiles = fileSet.transformedFiles;
|
67 | const taskManager = new builder_util_1.AsyncTaskManager(packager.cancellationToken);
|
68 | const fileCopier = new fs_1.FileCopier();
|
69 | let currentDirNode = null;
|
70 | let currentDirPath = null;
|
71 | const unpackedFileIndexSet = new Set();
|
72 | for (let i = 0, n = fileSet.files.length; i < n; i++) {
|
73 | const file = fileSet.files[i];
|
74 | const stat = metadata.get(file);
|
75 | if (stat == null) {
|
76 | continue;
|
77 | }
|
78 | const pathInArchive = path.relative(rootForAppFilesWithoutAsar, (0, appFileCopier_1.getDestinationPath)(file, fileSet));
|
79 | if (stat.isSymbolicLink()) {
|
80 | const s = stat;
|
81 | this.fs.getOrCreateNode(pathInArchive).link = s.relativeLink;
|
82 | s.pathInArchive = pathInArchive;
|
83 | unpackedFileIndexSet.add(i);
|
84 | continue;
|
85 | }
|
86 | let fileParent = path.dirname(pathInArchive);
|
87 | if (fileParent === ".") {
|
88 | fileParent = "";
|
89 | }
|
90 | if (currentDirPath !== fileParent) {
|
91 | if (fileParent.startsWith("..")) {
|
92 | throw new Error(`Internal error: path must not start with "..": ${fileParent}`);
|
93 | }
|
94 | currentDirPath = fileParent;
|
95 | currentDirNode = this.fs.getOrCreateNode(fileParent);
|
96 |
|
97 | if (fileParent !== "" && !currentDirNode.unpacked) {
|
98 | if (unpackedDirs.has(fileParent)) {
|
99 | currentDirNode.unpacked = true;
|
100 | }
|
101 | else {
|
102 | await correctDirNodeUnpackedFlag(fileParent, currentDirNode);
|
103 | }
|
104 | }
|
105 | }
|
106 | const dirNode = currentDirNode;
|
107 | const newData = transformedFiles == null ? undefined : transformedFiles.get(i);
|
108 | const isUnpacked = dirNode.unpacked || (this.unpackPattern != null && this.unpackPattern(file, stat));
|
109 | const integrity = newData === undefined ? await (0, integrity_1.hashFile)(file) : (0, integrity_1.hashFileContents)(newData);
|
110 | this.fs.addFileNode(file, dirNode, newData == undefined ? stat.size : Buffer.byteLength(newData), isUnpacked, stat, integrity);
|
111 | if (isUnpacked) {
|
112 | if (!dirNode.unpacked && !dirToCreateForUnpackedFiles.has(fileParent)) {
|
113 | dirToCreateForUnpackedFiles.add(fileParent);
|
114 | await (0, promises_1.mkdir)(path.join(this.unpackedDest, fileParent), { recursive: true });
|
115 | }
|
116 | const unpackedFile = path.join(this.unpackedDest, pathInArchive);
|
117 | taskManager.addTask(copyFileOrData(fileCopier, newData, file, unpackedFile, stat));
|
118 | if (taskManager.tasks.length > fs_1.MAX_FILE_REQUESTS) {
|
119 | await taskManager.awaitTasks();
|
120 | }
|
121 | unpackedFileIndexSet.add(i);
|
122 | }
|
123 | }
|
124 | if (taskManager.tasks.length > 0) {
|
125 | await taskManager.awaitTasks();
|
126 | }
|
127 | return unpackedFileIndexSet;
|
128 | }
|
129 | writeAsarFile(fileSets, unpackedFileIndexMap) {
|
130 | return new Promise((resolve, reject) => {
|
131 | const headerPickle = pickle.createEmpty();
|
132 | headerPickle.writeString(JSON.stringify(this.fs.header));
|
133 | const headerBuf = headerPickle.toBuffer();
|
134 | const sizePickle = pickle.createEmpty();
|
135 | sizePickle.writeUInt32(headerBuf.length);
|
136 | const sizeBuf = sizePickle.toBuffer();
|
137 | const writeStream = (0, fs_2.createWriteStream)(this.outFile);
|
138 | writeStream.on("error", reject);
|
139 | writeStream.on("close", resolve);
|
140 | writeStream.write(sizeBuf);
|
141 | let fileSetIndex = 0;
|
142 | let files = fileSets[0].files;
|
143 | let metadata = fileSets[0].metadata;
|
144 | let transformedFiles = fileSets[0].transformedFiles;
|
145 | let unpackedFileIndexSet = unpackedFileIndexMap.get(fileSets[0]);
|
146 | const w = (index) => {
|
147 | while (true) {
|
148 | if (index >= files.length) {
|
149 | if (++fileSetIndex >= fileSets.length) {
|
150 | writeStream.end();
|
151 | return;
|
152 | }
|
153 | else {
|
154 | files = fileSets[fileSetIndex].files;
|
155 | metadata = fileSets[fileSetIndex].metadata;
|
156 | transformedFiles = fileSets[fileSetIndex].transformedFiles;
|
157 | unpackedFileIndexSet = unpackedFileIndexMap.get(fileSets[fileSetIndex]);
|
158 | index = 0;
|
159 | }
|
160 | }
|
161 | if (!unpackedFileIndexSet.has(index)) {
|
162 | break;
|
163 | }
|
164 | else {
|
165 | const stat = metadata.get(files[index]);
|
166 | if (stat != null && stat.isSymbolicLink()) {
|
167 | (0, fs_2.symlink)(stat.linkRelativeToFile, path.join(this.unpackedDest, stat.pathInArchive), () => w(index + 1));
|
168 | return;
|
169 | }
|
170 | }
|
171 | index++;
|
172 | }
|
173 | const data = transformedFiles == null ? null : transformedFiles.get(index);
|
174 | const file = files[index];
|
175 | if (data !== null && data !== undefined) {
|
176 | writeStream.write(data, () => w(index + 1));
|
177 | return;
|
178 | }
|
179 |
|
180 | const stat = metadata.get(file);
|
181 | if (stat != null && stat.size < 2 * 1024 * 1024) {
|
182 | (0, promises_1.readFile)(file)
|
183 | .then(it => {
|
184 | writeStream.write(it, () => w(index + 1));
|
185 | })
|
186 | .catch((e) => reject(`Cannot read file ${file}: ${e.stack || e}`));
|
187 | }
|
188 | else {
|
189 | const readStream = (0, fs_2.createReadStream)(file);
|
190 | readStream.on("error", reject);
|
191 | readStream.once("end", () => w(index + 1));
|
192 | readStream.on("open", () => {
|
193 | readStream.pipe(writeStream, {
|
194 | end: false,
|
195 | });
|
196 | });
|
197 | }
|
198 | };
|
199 | writeStream.write(headerBuf, () => w(0));
|
200 | });
|
201 | }
|
202 | }
|
203 | exports.AsarPackager = AsarPackager;
|
204 | async function order(filenames, orderingFile, src) {
|
205 | const orderingFiles = (await (0, promises_1.readFile)(orderingFile, "utf8")).split("\n").map(line => {
|
206 | if (line.indexOf(":") !== -1) {
|
207 | line = line.split(":").pop();
|
208 | }
|
209 | line = line.trim();
|
210 | if (line[0] === "/") {
|
211 | line = line.slice(1);
|
212 | }
|
213 | return line;
|
214 | });
|
215 | const ordering = [];
|
216 | for (const file of orderingFiles) {
|
217 | const pathComponents = file.split(path.sep);
|
218 | for (const pathComponent of pathComponents) {
|
219 | ordering.push(path.join(src, pathComponent));
|
220 | }
|
221 | }
|
222 | const sortedFiles = [];
|
223 | let missing = 0;
|
224 | const total = filenames.length;
|
225 | for (const file of ordering) {
|
226 | if (!sortedFiles.includes(file) && filenames.includes(file)) {
|
227 | sortedFiles.push(file);
|
228 | }
|
229 | }
|
230 | for (const file of filenames) {
|
231 | if (!sortedFiles.includes(file)) {
|
232 | sortedFiles.push(file);
|
233 | missing += 1;
|
234 | }
|
235 | }
|
236 | builder_util_1.log.info({ coverage: ((total - missing) / total) * 100 }, "ordering files in ASAR archive");
|
237 | return sortedFiles;
|
238 | }
|
239 | function copyFileOrData(fileCopier, data, source, destination, stats) {
|
240 | if (data == null) {
|
241 | return fileCopier.copy(source, destination, stats);
|
242 | }
|
243 | else {
|
244 | return (0, promises_1.writeFile)(destination, data);
|
245 | }
|
246 | }
|
247 | function orderFileSet(fileSet) {
|
248 | const sortedFileEntries = Array.from(fileSet.files.entries());
|
249 | sortedFileEntries.sort(([, a], [, b]) => {
|
250 | if (a === b) {
|
251 | return 0;
|
252 | }
|
253 |
|
254 | const isAAddon = a.endsWith(".node");
|
255 | const isBAddon = b.endsWith(".node");
|
256 | if (isAAddon && !isBAddon) {
|
257 | return 1;
|
258 | }
|
259 | if (isBAddon && !isAAddon) {
|
260 | return -1;
|
261 | }
|
262 |
|
263 | return a < b ? -1 : 1;
|
264 | });
|
265 | let transformedFiles;
|
266 | if (fileSet.transformedFiles) {
|
267 | transformedFiles = new Map();
|
268 | const indexMap = new Map();
|
269 | for (const [newIndex, [oldIndex]] of sortedFileEntries.entries()) {
|
270 | indexMap.set(oldIndex, newIndex);
|
271 | }
|
272 | for (const [oldIndex, value] of fileSet.transformedFiles) {
|
273 | const newIndex = indexMap.get(oldIndex);
|
274 | if (newIndex === undefined) {
|
275 | const file = fileSet.files[oldIndex];
|
276 | throw new Error(`Internal error: ${file} was lost while ordering asar`);
|
277 | }
|
278 | transformedFiles.set(newIndex, value);
|
279 | }
|
280 | }
|
281 | const { src, destination, metadata } = fileSet;
|
282 | return {
|
283 | src,
|
284 | destination,
|
285 | metadata,
|
286 | files: sortedFileEntries.map(([, file]) => file),
|
287 | transformedFiles,
|
288 | };
|
289 | }
|
290 |
|
\ | No newline at end of file |