UNPKG

93.5 kBJavaScriptView Raw
1"use strict";
2Object.defineProperty(exports, "__esModule", { value: true });
3exports.Project = void 0;
4const tslib_1 = require("tslib");
5const fslib_1 = require("@yarnpkg/fslib");
6const parsers_1 = require("@yarnpkg/parsers");
7const clipanion_1 = require("clipanion");
8const crypto_1 = require("crypto");
9const diff_1 = require("diff");
10// @ts-expect-error
11const logic_solver_1 = tslib_1.__importDefault(require("logic-solver"));
12const p_limit_1 = tslib_1.__importDefault(require("p-limit"));
13const semver_1 = tslib_1.__importDefault(require("semver"));
14const util_1 = require("util");
15const v8_1 = tslib_1.__importDefault(require("v8"));
16const zlib_1 = tslib_1.__importDefault(require("zlib"));
17const Configuration_1 = require("./Configuration");
18const Installer_1 = require("./Installer");
19const LegacyMigrationResolver_1 = require("./LegacyMigrationResolver");
20const LockfileResolver_1 = require("./LockfileResolver");
21const Manifest_1 = require("./Manifest");
22const MessageName_1 = require("./MessageName");
23const MultiResolver_1 = require("./MultiResolver");
24const Report_1 = require("./Report");
25const RunInstallPleaseResolver_1 = require("./RunInstallPleaseResolver");
26const ThrowReport_1 = require("./ThrowReport");
27const Workspace_1 = require("./Workspace");
28const folderUtils_1 = require("./folderUtils");
29const hashUtils = tslib_1.__importStar(require("./hashUtils"));
30const miscUtils = tslib_1.__importStar(require("./miscUtils"));
31const scriptUtils = tslib_1.__importStar(require("./scriptUtils"));
32const semverUtils = tslib_1.__importStar(require("./semverUtils"));
33const structUtils = tslib_1.__importStar(require("./structUtils"));
34const types_1 = require("./types");
35// When upgraded, the lockfile entries have to be resolved again (but the specific
36// versions are still pinned, no worry). Bump it when you change the fields within
37// the Package type; no more no less.
38const LOCKFILE_VERSION = 4;
39// Same thing but must be bumped when the members of the Project class changes (we
40// don't recommend our users to check-in this file, so it's fine to bump it even
41// between patch or minor releases).
42const INSTALL_STATE_VERSION = 1;
43const MULTIPLE_KEYS_REGEXP = / *, */g;
44const TRAILING_SLASH_REGEXP = /\/$/;
45const FETCHER_CONCURRENCY = 32;
46const gzip = util_1.promisify(zlib_1.default.gzip);
47const gunzip = util_1.promisify(zlib_1.default.gunzip);
48class Project {
49 constructor(projectCwd, { configuration }) {
50 /**
51 * Is meant to be populated by the consumer. Should the descriptor referenced
52 * by the key be requested, the descriptor referenced in the value will be
53 * resolved instead. The resolved data will then be used as final resolution
54 * for the initial descriptor.
55 *
56 * Note that the lockfile will contain the second descriptor but not the
57 * first one (meaning that if you remove the alias during a subsequent
58 * install, it'll be lost and the real package will be resolved / installed).
59 */
60 this.resolutionAliases = new Map();
61 this.workspaces = [];
62 this.workspacesByCwd = new Map();
63 this.workspacesByIdent = new Map();
64 this.storedResolutions = new Map();
65 this.storedDescriptors = new Map();
66 this.storedPackages = new Map();
67 this.storedChecksums = new Map();
68 this.accessibleLocators = new Set();
69 this.originalPackages = new Map();
70 this.optionalBuilds = new Set();
71 this.lockFileChecksum = null;
72 this.configuration = configuration;
73 this.cwd = projectCwd;
74 }
75 static async find(configuration, startingCwd) {
76 var _a, _b, _c;
77 if (!configuration.projectCwd)
78 throw new clipanion_1.UsageError(`No project found in ${startingCwd}`);
79 let packageCwd = configuration.projectCwd;
80 let nextCwd = startingCwd;
81 let currentCwd = null;
82 while (currentCwd !== configuration.projectCwd) {
83 currentCwd = nextCwd;
84 if (fslib_1.xfs.existsSync(fslib_1.ppath.join(currentCwd, fslib_1.Filename.manifest))) {
85 packageCwd = currentCwd;
86 break;
87 }
88 nextCwd = fslib_1.ppath.dirname(currentCwd);
89 }
90 const project = new Project(configuration.projectCwd, { configuration });
91 (_a = Configuration_1.Configuration.telemetry) === null || _a === void 0 ? void 0 : _a.reportProject(project.cwd);
92 await project.setupResolutions();
93 await project.setupWorkspaces();
94 (_b = Configuration_1.Configuration.telemetry) === null || _b === void 0 ? void 0 : _b.reportWorkspaceCount(project.workspaces.length);
95 (_c = Configuration_1.Configuration.telemetry) === null || _c === void 0 ? void 0 : _c.reportDependencyCount(project.workspaces.reduce((sum, workspace) => sum + workspace.manifest.dependencies.size + workspace.manifest.devDependencies.size, 0));
96 // If we're in a workspace, no need to go any further to find which package we're in
97 const workspace = project.tryWorkspaceByCwd(packageCwd);
98 if (workspace)
99 return { project, workspace, locator: workspace.anchoredLocator };
100 // Otherwise, we need to ask the project (which will in turn ask the linkers for help)
101 // Note: the trailing slash is caused by a quirk in the PnP implementation that requires folders to end with a trailing slash to disambiguate them from regular files
102 const locator = await project.findLocatorForLocation(`${packageCwd}/`, { strict: true });
103 if (locator)
104 return { project, locator, workspace: null };
105 throw new clipanion_1.UsageError(`The nearest package directory (${configuration.format(packageCwd, Configuration_1.FormatType.PATH)}) doesn't seem to be part of the project declared in ${configuration.format(project.cwd, Configuration_1.FormatType.PATH)}.\n\n- If the project directory is right, it might be that you forgot to list ${configuration.format(fslib_1.ppath.relative(project.cwd, packageCwd), Configuration_1.FormatType.PATH)} as a workspace.\n- If it isn't, it's likely because you have a yarn.lock or package.json file there, confusing the project root detection.`);
106 }
107 static generateBuildStateFile(buildState, locatorStore) {
108 let bstateFile = `# Warning: This file is automatically generated. Removing it is fine, but will\n# cause all your builds to become invalidated.\n`;
109 const bstateData = [...buildState].map(([locatorHash, hash]) => {
110 const locator = locatorStore.get(locatorHash);
111 if (typeof locator === `undefined`)
112 throw new Error(`Assertion failed: The locator should have been registered`);
113 return [structUtils.stringifyLocator(locator), locator.locatorHash, hash];
114 });
115 for (const [locatorString, locatorHash, buildHash] of miscUtils.sortMap(bstateData, [d => d[0], d => d[1]])) {
116 bstateFile += `\n`;
117 bstateFile += `# ${locatorString}\n`;
118 bstateFile += `${JSON.stringify(locatorHash)}:\n`;
119 bstateFile += ` ${buildHash}\n`;
120 }
121 return bstateFile;
122 }
123 async setupResolutions() {
124 this.storedResolutions = new Map();
125 this.storedDescriptors = new Map();
126 this.storedPackages = new Map();
127 this.lockFileChecksum = null;
128 const lockfilePath = fslib_1.ppath.join(this.cwd, this.configuration.get(`lockfileFilename`));
129 const defaultLanguageName = this.configuration.get(`defaultLanguageName`);
130 if (fslib_1.xfs.existsSync(lockfilePath)) {
131 const content = await fslib_1.xfs.readFilePromise(lockfilePath, `utf8`);
132 // We store the salted checksum of the lockfile in order to invalidate the install state when needed
133 this.lockFileChecksum = hashUtils.makeHash(`${INSTALL_STATE_VERSION}`, content);
134 const parsed = parsers_1.parseSyml(content);
135 // Protects against v1 lockfiles
136 if (parsed.__metadata) {
137 const lockfileVersion = parsed.__metadata.version;
138 const cacheKey = parsed.__metadata.cacheKey;
139 for (const key of Object.keys(parsed)) {
140 if (key === `__metadata`)
141 continue;
142 const data = parsed[key];
143 if (typeof data.resolution === `undefined`)
144 throw new Error(`Assertion failed: Expected the lockfile entry to have a resolution field (${key})`);
145 const locator = structUtils.parseLocator(data.resolution, true);
146 const manifest = new Manifest_1.Manifest();
147 manifest.load(data);
148 const version = manifest.version;
149 const languageName = manifest.languageName || defaultLanguageName;
150 const linkType = data.linkType.toUpperCase();
151 const dependencies = manifest.dependencies;
152 const peerDependencies = manifest.peerDependencies;
153 const dependenciesMeta = manifest.dependenciesMeta;
154 const peerDependenciesMeta = manifest.peerDependenciesMeta;
155 const bin = manifest.bin;
156 if (data.checksum != null) {
157 const checksum = typeof cacheKey !== `undefined` && !data.checksum.includes(`/`)
158 ? `${cacheKey}/${data.checksum}`
159 : data.checksum;
160 this.storedChecksums.set(locator.locatorHash, checksum);
161 }
162 if (lockfileVersion >= LOCKFILE_VERSION) {
163 const pkg = { ...locator, version, languageName, linkType, dependencies, peerDependencies, dependenciesMeta, peerDependenciesMeta, bin };
164 this.originalPackages.set(pkg.locatorHash, pkg);
165 }
166 for (const entry of key.split(MULTIPLE_KEYS_REGEXP)) {
167 const descriptor = structUtils.parseDescriptor(entry);
168 this.storedDescriptors.set(descriptor.descriptorHash, descriptor);
169 if (lockfileVersion >= LOCKFILE_VERSION) {
170 // If the lockfile is up-to-date, we can simply register the
171 // resolution as a done deal.
172 this.storedResolutions.set(descriptor.descriptorHash, locator.locatorHash);
173 }
174 else {
175 // But if it isn't, then we instead setup an alias so that the
176 // descriptor will be re-resolved (so that we get to retrieve the
177 // new fields) while still resolving to the same locators.
178 const resolutionDescriptor = structUtils.convertLocatorToDescriptor(locator);
179 if (resolutionDescriptor.descriptorHash !== descriptor.descriptorHash) {
180 this.storedDescriptors.set(resolutionDescriptor.descriptorHash, resolutionDescriptor);
181 this.resolutionAliases.set(descriptor.descriptorHash, resolutionDescriptor.descriptorHash);
182 }
183 }
184 }
185 }
186 }
187 }
188 }
189 async setupWorkspaces() {
190 this.workspaces = [];
191 this.workspacesByCwd = new Map();
192 this.workspacesByIdent = new Map();
193 let workspaceCwds = [this.cwd];
194 while (workspaceCwds.length > 0) {
195 const passCwds = workspaceCwds;
196 workspaceCwds = [];
197 for (const workspaceCwd of passCwds) {
198 if (this.workspacesByCwd.has(workspaceCwd))
199 continue;
200 const workspace = await this.addWorkspace(workspaceCwd);
201 const workspacePkg = this.storedPackages.get(workspace.anchoredLocator.locatorHash);
202 if (workspacePkg)
203 workspace.dependencies = workspacePkg.dependencies;
204 for (const workspaceCwd of workspace.workspacesCwds) {
205 workspaceCwds.push(workspaceCwd);
206 }
207 }
208 }
209 }
210 async addWorkspace(workspaceCwd) {
211 const workspace = new Workspace_1.Workspace(workspaceCwd, { project: this });
212 await workspace.setup();
213 const dup = this.workspacesByIdent.get(workspace.locator.identHash);
214 if (typeof dup !== `undefined`)
215 throw new Error(`Duplicate workspace name ${structUtils.prettyIdent(this.configuration, workspace.locator)}: ${workspaceCwd} conflicts with ${dup.cwd}`);
216 this.workspaces.push(workspace);
217 this.workspacesByCwd.set(workspaceCwd, workspace);
218 this.workspacesByIdent.set(workspace.locator.identHash, workspace);
219 return workspace;
220 }
221 get topLevelWorkspace() {
222 return this.getWorkspaceByCwd(this.cwd);
223 }
224 tryWorkspaceByCwd(workspaceCwd) {
225 if (!fslib_1.ppath.isAbsolute(workspaceCwd))
226 workspaceCwd = fslib_1.ppath.resolve(this.cwd, workspaceCwd);
227 const workspace = this.workspacesByCwd.get(workspaceCwd);
228 if (!workspace)
229 return null;
230 return workspace;
231 }
232 getWorkspaceByCwd(workspaceCwd) {
233 const workspace = this.tryWorkspaceByCwd(workspaceCwd);
234 if (!workspace)
235 throw new Error(`Workspace not found (${workspaceCwd})`);
236 return workspace;
237 }
238 tryWorkspaceByFilePath(filePath) {
239 let bestWorkspace = null;
240 for (const workspace of this.workspaces) {
241 const rel = fslib_1.ppath.relative(workspace.cwd, filePath);
242 if (rel.startsWith(`../`))
243 continue;
244 if (bestWorkspace && bestWorkspace.cwd.length >= workspace.cwd.length)
245 continue;
246 bestWorkspace = workspace;
247 }
248 if (!bestWorkspace)
249 return null;
250 return bestWorkspace;
251 }
252 getWorkspaceByFilePath(filePath) {
253 const workspace = this.tryWorkspaceByFilePath(filePath);
254 if (!workspace)
255 throw new Error(`Workspace not found (${filePath})`);
256 return workspace;
257 }
258 tryWorkspaceByIdent(ident) {
259 const workspace = this.workspacesByIdent.get(ident.identHash);
260 if (typeof workspace === `undefined`)
261 return null;
262 return workspace;
263 }
264 getWorkspaceByIdent(ident) {
265 const workspace = this.tryWorkspaceByIdent(ident);
266 if (!workspace)
267 throw new Error(`Workspace not found (${structUtils.prettyIdent(this.configuration, ident)})`);
268 return workspace;
269 }
270 tryWorkspaceByDescriptor(descriptor) {
271 const workspace = this.tryWorkspaceByIdent(descriptor);
272 if (workspace === null || !workspace.accepts(descriptor.range))
273 return null;
274 return workspace;
275 }
276 getWorkspaceByDescriptor(descriptor) {
277 const workspace = this.tryWorkspaceByDescriptor(descriptor);
278 if (workspace === null)
279 throw new Error(`Workspace not found (${structUtils.prettyDescriptor(this.configuration, descriptor)})`);
280 return workspace;
281 }
282 tryWorkspaceByLocator(locator) {
283 if (structUtils.isVirtualLocator(locator))
284 locator = structUtils.devirtualizeLocator(locator);
285 const workspace = this.tryWorkspaceByIdent(locator);
286 if (workspace === null || (workspace.locator.locatorHash !== locator.locatorHash && workspace.anchoredLocator.locatorHash !== locator.locatorHash))
287 return null;
288 return workspace;
289 }
290 getWorkspaceByLocator(locator) {
291 const workspace = this.tryWorkspaceByLocator(locator);
292 if (!workspace)
293 throw new Error(`Workspace not found (${structUtils.prettyLocator(this.configuration, locator)})`);
294 return workspace;
295 }
296 /**
297 * Import the dependencies of each resolved workspace into their own
298 * `Workspace` instance.
299 */
300 refreshWorkspaceDependencies() {
301 for (const workspace of this.workspaces) {
302 const pkg = this.storedPackages.get(workspace.anchoredLocator.locatorHash);
303 if (!pkg)
304 throw new Error(`Assertion failed: Expected workspace to have been resolved`);
305 workspace.dependencies = new Map(pkg.dependencies);
306 }
307 }
308 forgetResolution(dataStructure) {
309 const deleteDescriptor = (descriptorHash) => {
310 this.storedResolutions.delete(descriptorHash);
311 this.storedDescriptors.delete(descriptorHash);
312 };
313 const deleteLocator = (locatorHash) => {
314 this.originalPackages.delete(locatorHash);
315 this.storedPackages.delete(locatorHash);
316 this.accessibleLocators.delete(locatorHash);
317 };
318 if (`descriptorHash` in dataStructure) {
319 const locatorHash = this.storedResolutions.get(dataStructure.descriptorHash);
320 deleteDescriptor(dataStructure.descriptorHash);
321 // We delete unused locators
322 const remainingResolutions = new Set(this.storedResolutions.values());
323 if (typeof locatorHash !== `undefined` && !remainingResolutions.has(locatorHash)) {
324 deleteLocator(locatorHash);
325 }
326 }
327 if (`locatorHash` in dataStructure) {
328 deleteLocator(dataStructure.locatorHash);
329 // We delete all of the descriptors that have been resolved to the locator
330 for (const [descriptorHash, locatorHash] of this.storedResolutions) {
331 if (locatorHash === dataStructure.locatorHash) {
332 deleteDescriptor(descriptorHash);
333 }
334 }
335 }
336 }
337 forgetTransientResolutions() {
338 const resolver = this.configuration.makeResolver();
339 for (const pkg of this.originalPackages.values()) {
340 let shouldPersistResolution;
341 try {
342 shouldPersistResolution = resolver.shouldPersistResolution(pkg, { project: this, resolver });
343 }
344 catch (_a) {
345 shouldPersistResolution = false;
346 }
347 if (!shouldPersistResolution) {
348 this.forgetResolution(pkg);
349 }
350 }
351 }
352 forgetVirtualResolutions() {
353 for (const pkg of this.storedPackages.values()) {
354 for (const [dependencyHash, dependency] of pkg.dependencies) {
355 if (structUtils.isVirtualDescriptor(dependency)) {
356 pkg.dependencies.set(dependencyHash, structUtils.devirtualizeDescriptor(dependency));
357 }
358 }
359 }
360 }
361 getDependencyMeta(ident, version) {
362 const dependencyMeta = {};
363 const dependenciesMeta = this.topLevelWorkspace.manifest.dependenciesMeta;
364 const dependencyMetaSet = dependenciesMeta.get(structUtils.stringifyIdent(ident));
365 if (!dependencyMetaSet)
366 return dependencyMeta;
367 const defaultMeta = dependencyMetaSet.get(null);
368 if (defaultMeta)
369 Object.assign(dependencyMeta, defaultMeta);
370 if (version === null || !semver_1.default.valid(version))
371 return dependencyMeta;
372 for (const [range, meta] of dependencyMetaSet)
373 if (range !== null && range === version)
374 Object.assign(dependencyMeta, meta);
375 return dependencyMeta;
376 }
377 async findLocatorForLocation(cwd, { strict = false } = {}) {
378 const report = new ThrowReport_1.ThrowReport();
379 const linkers = this.configuration.getLinkers();
380 const linkerOptions = { project: this, report };
381 for (const linker of linkers) {
382 const locator = await linker.findPackageLocator(cwd, linkerOptions);
383 if (locator) {
384 // If strict mode, the specified cwd must be a package,
385 // not merely contained in a package.
386 if (strict) {
387 const location = await linker.findPackageLocation(locator, linkerOptions);
388 if (location.replace(TRAILING_SLASH_REGEXP, ``) !== cwd.replace(TRAILING_SLASH_REGEXP, ``)) {
389 continue;
390 }
391 }
392 return locator;
393 }
394 }
395 return null;
396 }
397 async validateEverything(opts) {
398 for (const warning of opts.validationWarnings)
399 opts.report.reportWarning(warning.name, warning.text);
400 for (const error of opts.validationErrors) {
401 opts.report.reportError(error.name, error.text);
402 }
403 }
404 async resolveEverything(opts) {
405 if (!this.workspacesByCwd || !this.workspacesByIdent)
406 throw new Error(`Workspaces must have been setup before calling this function`);
407 // Reverts the changes that have been applied to the tree because of any previous virtual resolution pass
408 this.forgetVirtualResolutions();
409 // Ensures that we notice it when dependencies are added / removed from all sources coming from the filesystem
410 if (!opts.lockfileOnly)
411 this.forgetTransientResolutions();
412 // Note that the resolution process is "offline" until everything has been
413 // successfully resolved; all the processing is expected to have zero side
414 // effects until we're ready to set all the variables at once (the one
415 // exception being when a resolver needs to fetch a package, in which case
416 // we might need to populate the cache).
417 //
418 // This makes it possible to use the same Project instance for multiple
419 // purposes at the same time (since `resolveEverything` is async, it might
420 // happen that we want to do something while waiting for it to end; if we
421 // were to mutate the project then it would end up in a partial state that
422 // could lead to hard-to-debug issues).
423 const realResolver = opts.resolver || this.configuration.makeResolver();
424 const legacyMigrationResolver = new LegacyMigrationResolver_1.LegacyMigrationResolver();
425 await legacyMigrationResolver.setup(this, { report: opts.report });
426 const resolver = opts.lockfileOnly
427 ? new MultiResolver_1.MultiResolver([new LockfileResolver_1.LockfileResolver(), new RunInstallPleaseResolver_1.RunInstallPleaseResolver(realResolver)])
428 : new MultiResolver_1.MultiResolver([new LockfileResolver_1.LockfileResolver(), legacyMigrationResolver, realResolver]);
429 const fetcher = this.configuration.makeFetcher();
430 const resolveOptions = opts.lockfileOnly
431 ? { project: this, report: opts.report, resolver }
432 : { project: this, report: opts.report, resolver, fetchOptions: { project: this, cache: opts.cache, checksums: this.storedChecksums, report: opts.report, fetcher } };
433 const allDescriptors = new Map();
434 const allPackages = new Map();
435 const allResolutions = new Map();
436 const originalPackages = new Map();
437 const resolutionDependencies = new Map();
438 const haveBeenAliased = new Set();
439 let nextResolutionPass = new Set();
440 for (const workspace of this.workspaces) {
441 const workspaceDescriptor = workspace.anchoredDescriptor;
442 allDescriptors.set(workspaceDescriptor.descriptorHash, workspaceDescriptor);
443 nextResolutionPass.add(workspaceDescriptor.descriptorHash);
444 }
445 while (nextResolutionPass.size !== 0) {
446 const currentResolutionPass = nextResolutionPass;
447 nextResolutionPass = new Set();
448 // We remove from the "mustBeResolved" list all packages that have
449 // already been resolved previously.
450 for (const descriptorHash of currentResolutionPass)
451 if (allResolutions.has(descriptorHash))
452 currentResolutionPass.delete(descriptorHash);
453 if (currentResolutionPass.size === 0)
454 break;
455 // We check that the resolution dependencies have been resolved for all
456 // descriptors that we're about to resolve. Buffalo buffalo buffalo
457 // buffalo.
458 const deferredResolutions = new Set();
459 const resolvedDependencies = new Map();
460 for (const descriptorHash of currentResolutionPass) {
461 const descriptor = allDescriptors.get(descriptorHash);
462 if (!descriptor)
463 throw new Error(`Assertion failed: The descriptor should have been registered`);
464 let dependencies = resolutionDependencies.get(descriptorHash);
465 if (typeof dependencies === `undefined`) {
466 resolutionDependencies.set(descriptorHash, dependencies = new Set());
467 for (const dependency of resolver.getResolutionDependencies(descriptor, resolveOptions)) {
468 allDescriptors.set(dependency.descriptorHash, dependency);
469 dependencies.add(dependency.descriptorHash);
470 }
471 }
472 const resolved = miscUtils.getMapWithDefault(resolvedDependencies, descriptorHash);
473 for (const dependencyHash of dependencies) {
474 const resolution = allResolutions.get(dependencyHash);
475 if (typeof resolution !== `undefined`) {
476 const dependencyPkg = allPackages.get(resolution);
477 if (typeof dependencyPkg === `undefined`)
478 throw new Error(`Assertion failed: The package should have been registered`);
479 // The dependency is ready. We register it into the map so
480 // that we can pass that to getCandidates right after.
481 resolved.set(dependencyHash, dependencyPkg);
482 }
483 else {
484 // One of the resolution dependencies of this descriptor is
485 // missing; we need to postpone its resolution for now.
486 deferredResolutions.add(descriptorHash);
487 // For this pass however we'll want to schedule the resolution
488 // of the dependency (so that it's probably ready next pass).
489 currentResolutionPass.add(dependencyHash);
490 }
491 }
492 }
493 // Note: we're postponing the resolution only once we already know all
494 // those that are going to be postponed. This way we can detect
495 // potential cyclic dependencies.
496 for (const descriptorHash of deferredResolutions) {
497 currentResolutionPass.delete(descriptorHash);
498 nextResolutionPass.add(descriptorHash);
499 }
500 if (currentResolutionPass.size === 0)
501 throw new Error(`Assertion failed: Descriptors should not have cyclic dependencies`);
502 // Then we request the resolvers for the list of possible references that
503 // match the given ranges. That will give us a set of candidate references
504 // for each descriptor.
505 const passCandidates = new Map(await Promise.all(Array.from(currentResolutionPass).map(async (descriptorHash) => {
506 const descriptor = allDescriptors.get(descriptorHash);
507 if (typeof descriptor === `undefined`)
508 throw new Error(`Assertion failed: The descriptor should have been registered`);
509 const descriptorDependencies = resolvedDependencies.get(descriptor.descriptorHash);
510 if (typeof descriptorDependencies === `undefined`)
511 throw new Error(`Assertion failed: The descriptor dependencies should have been registered`);
512 let candidateLocators;
513 try {
514 candidateLocators = await resolver.getCandidates(descriptor, descriptorDependencies, resolveOptions);
515 }
516 catch (error) {
517 error.message = `${structUtils.prettyDescriptor(this.configuration, descriptor)}: ${error.message}`;
518 throw error;
519 }
520 if (candidateLocators.length === 0)
521 throw new Error(`No candidate found for ${structUtils.prettyDescriptor(this.configuration, descriptor)}`);
522 return [descriptor.descriptorHash, candidateLocators];
523 })));
524 // That's where we'll store our resolutions until everything has been
525 // resolved and can be injected into the various stores.
526 //
527 // The reason we're storing them in a temporary store instead of writing
528 // them directly into the global ones is that otherwise we would end up
529 // with different store orderings between dependency loaded from a
530 // lockfiles and those who don't (when using a lockfile all descriptors
531 // will fall into the next shortcut, but when no lockfile is there only
532 // some of them will; since maps are sorted by insertion, it would affect
533 // the way they would be ordered).
534 const passResolutions = new Map();
535 // We now make a pre-pass to automatically resolve the descriptors that
536 // can only be satisfied by a single reference.
537 for (const [descriptorHash, candidateLocators] of passCandidates) {
538 if (candidateLocators.length !== 1)
539 continue;
540 passResolutions.set(descriptorHash, candidateLocators[0]);
541 passCandidates.delete(descriptorHash);
542 }
543 // We make a second pre-pass to automatically resolve the descriptors
544 // that can be satisfied by a package we're already using (deduplication).
545 for (const [descriptorHash, candidateLocators] of passCandidates) {
546 const selectedLocator = candidateLocators.find(locator => allPackages.has(locator.locatorHash));
547 if (!selectedLocator)
548 continue;
549 passResolutions.set(descriptorHash, selectedLocator);
550 passCandidates.delete(descriptorHash);
551 }
552 // All entries that remain in "passCandidates" are from descriptors that
553 // we haven't been able to resolve in the first place. We'll now configure
554 // our SAT solver so that it can figure it out for us. To do this, we
555 // simply add a constraint for each descriptor that lists all the
556 // descriptors it would accept. We don't have to check whether the
557 // locators obtained have already been selected, because if they were the
558 // would have been resolved in the previous step (we never backtrace to
559 // try to find better solutions, it would be a too expensive process - we
560 // just want to get an acceptable solution, not the very best one).
561 if (passCandidates.size > 0) {
562 const solver = new logic_solver_1.default.Solver();
563 for (const candidateLocators of passCandidates.values())
564 solver.require(logic_solver_1.default.or(...candidateLocators.map(locator => locator.locatorHash)));
565 let remainingSolutions = 100;
566 let solution;
567 let bestSolution = null;
568 let bestScore = Infinity;
569 while (remainingSolutions > 0 && (solution = solver.solve()) !== null) {
570 const trueVars = solution.getTrueVars();
571 solver.forbid(solution.getFormula());
572 if (trueVars.length < bestScore) {
573 bestSolution = trueVars;
574 bestScore = trueVars.length;
575 }
576 remainingSolutions -= 1;
577 }
578 if (!bestSolution)
579 throw new Error(`Assertion failed: No resolution found by the SAT solver`);
580 const solutionSet = new Set(bestSolution);
581 for (const [descriptorHash, candidateLocators] of passCandidates.entries()) {
582 const selectedLocator = candidateLocators.find(locator => solutionSet.has(locator.locatorHash));
583 if (!selectedLocator)
584 throw new Error(`Assertion failed: The descriptor should have been solved during the previous step`);
585 passResolutions.set(descriptorHash, selectedLocator);
586 passCandidates.delete(descriptorHash);
587 }
588 }
589 // We now iterate over the locators we've got and, for each of them that
590 // hasn't been seen before, we fetch its dependency list and schedule
591 // them for the next cycle.
592 const newLocators = Array.from(passResolutions.values()).filter(locator => {
593 return !allPackages.has(locator.locatorHash);
594 });
595 const newPackages = new Map(await Promise.all(newLocators.map(async (locator) => {
596 const original = await miscUtils.prettifyAsyncErrors(async () => {
597 return await resolver.resolve(locator, resolveOptions);
598 }, message => {
599 return `${structUtils.prettyLocator(this.configuration, locator)}: ${message}`;
600 });
601 if (!structUtils.areLocatorsEqual(locator, original))
602 throw new Error(`Assertion failed: The locator cannot be changed by the resolver (went from ${structUtils.prettyLocator(this.configuration, locator)} to ${structUtils.prettyLocator(this.configuration, original)})`);
603 const pkg = this.configuration.normalizePackage(original);
604 for (const [identHash, descriptor] of pkg.dependencies) {
605 const dependency = await this.configuration.reduceHook(hooks => {
606 return hooks.reduceDependency;
607 }, descriptor, this, pkg, descriptor, {
608 resolver,
609 resolveOptions,
610 });
611 if (!structUtils.areIdentsEqual(descriptor, dependency))
612 throw new Error(`Assertion failed: The descriptor ident cannot be changed through aliases`);
613 const bound = resolver.bindDescriptor(dependency, locator, resolveOptions);
614 pkg.dependencies.set(identHash, bound);
615 }
616 return [pkg.locatorHash, { original, pkg }];
617 })));
618 // Now that the resolution is finished, we can finally insert the data
619 // stored inside our pass stores into the resolution ones (we now have
620 // the guarantee that they'll always be inserted into in the same order,
621 // since mustBeResolved is stable regardless of the order in which the
622 // resolvers return)
623 for (const descriptorHash of currentResolutionPass) {
624 const locator = passResolutions.get(descriptorHash);
625 if (!locator)
626 throw new Error(`Assertion failed: The locator should have been registered`);
627 allResolutions.set(descriptorHash, locator.locatorHash);
628 // If undefined it means that the package was already known and thus
629 // didn't need to be resolved again.
630 const resolutionEntry = newPackages.get(locator.locatorHash);
631 if (typeof resolutionEntry === `undefined`)
632 continue;
633 const { original, pkg } = resolutionEntry;
634 originalPackages.set(original.locatorHash, original);
635 allPackages.set(pkg.locatorHash, pkg);
636 for (const descriptor of pkg.dependencies.values()) {
637 allDescriptors.set(descriptor.descriptorHash, descriptor);
638 nextResolutionPass.add(descriptor.descriptorHash);
639 // We must check and make sure that the descriptor didn't get aliased
640 // to something else
641 const aliasHash = this.resolutionAliases.get(descriptor.descriptorHash);
642 if (aliasHash === undefined)
643 continue;
644 // It doesn't cost us much to support the case where a descriptor is
645 // equal to its own alias (which should mean "no alias")
646 if (descriptor.descriptorHash === aliasHash)
647 continue;
648 const alias = this.storedDescriptors.get(aliasHash);
649 if (!alias)
650 throw new Error(`Assertion failed: The alias should have been registered`);
651 // If it's already been "resolved" (in reality it will be the temporary
652 // resolution we've set in the next few lines) we simply must skip it
653 if (allResolutions.has(descriptor.descriptorHash))
654 continue;
655 // Temporarily set an invalid resolution so that it won't be resolved
656 // multiple times if it is found multiple times in the dependency
657 // tree (this is only temporary, we will replace it by the actual
658 // resolution after we've finished resolving everything)
659 allResolutions.set(descriptor.descriptorHash, `temporary`);
660 // We can now replace the descriptor by its alias in the list of
661 // descriptors that must be resolved
662 nextResolutionPass.delete(descriptor.descriptorHash);
663 nextResolutionPass.add(aliasHash);
664 allDescriptors.set(aliasHash, alias);
665 haveBeenAliased.add(descriptor.descriptorHash);
666 }
667 }
668 }
669 // Each package that should have been resolved but was skipped because it
670 // was aliased will now see the resolution for its alias propagated to it
671 while (haveBeenAliased.size > 0) {
672 let hasChanged = false;
673 for (const descriptorHash of haveBeenAliased) {
674 const descriptor = allDescriptors.get(descriptorHash);
675 if (!descriptor)
676 throw new Error(`Assertion failed: The descriptor should have been registered`);
677 const aliasHash = this.resolutionAliases.get(descriptorHash);
678 if (aliasHash === undefined)
679 throw new Error(`Assertion failed: The descriptor should have an alias`);
680 const resolution = allResolutions.get(aliasHash);
681 if (resolution === undefined)
682 throw new Error(`Assertion failed: The resolution should have been registered`);
683 // The following can happen if a package gets aliased to another package
684 // that's itself aliased - in this case we just process all those we can
685 // do, then make new passes until everything is resolved
686 if (resolution === `temporary`)
687 continue;
688 haveBeenAliased.delete(descriptorHash);
689 allResolutions.set(descriptorHash, resolution);
690 hasChanged = true;
691 }
692 if (!hasChanged) {
693 throw new Error(`Alias loop detected`);
694 }
695 }
696 // In this step we now create virtual packages for each package with at
697 // least one peer dependency. We also use it to search for the alias
698 // descriptors that aren't depended upon by anything and can be safely
699 // pruned.
700 const volatileDescriptors = new Set(this.resolutionAliases.values());
701 const optionalBuilds = new Set(allPackages.keys());
702 const accessibleLocators = new Set();
703 applyVirtualResolutionMutations({
704 project: this,
705 report: opts.report,
706 accessibleLocators,
707 volatileDescriptors,
708 optionalBuilds,
709 allDescriptors,
710 allResolutions,
711 allPackages,
712 });
713 // All descriptors still referenced within the volatileDescriptors set are
714 // descriptors that aren't depended upon by anything in the dependency tree.
715 for (const descriptorHash of volatileDescriptors) {
716 allDescriptors.delete(descriptorHash);
717 allResolutions.delete(descriptorHash);
718 }
719 // Everything is done, we can now update our internal resolutions to
720 // reference the new ones
721 this.storedResolutions = allResolutions;
722 this.storedDescriptors = allDescriptors;
723 this.storedPackages = allPackages;
724 this.accessibleLocators = accessibleLocators;
725 this.originalPackages = originalPackages;
726 this.optionalBuilds = optionalBuilds;
727 // Now that the internal resolutions have been updated, we can refresh the
728 // dependencies of each resolved workspace's `Workspace` instance.
729 this.refreshWorkspaceDependencies();
730 }
731 async fetchEverything({ cache, report, fetcher: userFetcher }) {
732 const fetcher = userFetcher || this.configuration.makeFetcher();
733 const fetcherOptions = { checksums: this.storedChecksums, project: this, cache, fetcher, report };
734 const locatorHashes = Array.from(new Set(miscUtils.sortMap(this.storedResolutions.values(), [
735 (locatorHash) => {
736 const pkg = this.storedPackages.get(locatorHash);
737 if (!pkg)
738 throw new Error(`Assertion failed: The locator should have been registered`);
739 return structUtils.stringifyLocator(pkg);
740 },
741 ])));
742 let firstError = false;
743 const progress = Report_1.Report.progressViaCounter(locatorHashes.length);
744 report.reportProgress(progress);
745 const limit = p_limit_1.default(FETCHER_CONCURRENCY);
746 await report.startCacheReport(async () => {
747 await Promise.all(locatorHashes.map(locatorHash => limit(async () => {
748 const pkg = this.storedPackages.get(locatorHash);
749 if (!pkg)
750 throw new Error(`Assertion failed: The locator should have been registered`);
751 if (structUtils.isVirtualLocator(pkg))
752 return;
753 let fetchResult;
754 try {
755 fetchResult = await fetcher.fetch(pkg, fetcherOptions);
756 }
757 catch (error) {
758 error.message = `${structUtils.prettyLocator(this.configuration, pkg)}: ${error.message}`;
759 report.reportExceptionOnce(error);
760 firstError = error;
761 return;
762 }
763 if (fetchResult.checksum)
764 this.storedChecksums.set(pkg.locatorHash, fetchResult.checksum);
765 else
766 this.storedChecksums.delete(pkg.locatorHash);
767 if (fetchResult.releaseFs) {
768 fetchResult.releaseFs();
769 }
770 }).finally(() => {
771 progress.tick();
772 })));
773 });
774 if (firstError) {
775 throw firstError;
776 }
777 }
778 async linkEverything({ cache, report, fetcher: optFetcher }) {
779 const fetcher = optFetcher || this.configuration.makeFetcher();
780 const fetcherOptions = { checksums: this.storedChecksums, project: this, cache, fetcher, report, skipIntegrityCheck: true };
781 const linkers = this.configuration.getLinkers();
782 const linkerOptions = { project: this, report };
783 const installers = new Map(linkers.map(linker => {
784 return [linker, linker.makeInstaller(linkerOptions)];
785 }));
786 const packageLinkers = new Map();
787 const packageLocations = new Map();
788 const packageBuildDirectives = new Map();
789 // Step 1: Installing the packages on the disk
790 for (const locatorHash of this.accessibleLocators) {
791 const pkg = this.storedPackages.get(locatorHash);
792 if (!pkg)
793 throw new Error(`Assertion failed: The locator should have been registered`);
794 const fetchResult = await fetcher.fetch(pkg, fetcherOptions);
795 if (this.tryWorkspaceByLocator(pkg) !== null) {
796 const buildScripts = [];
797 const { scripts } = await Manifest_1.Manifest.find(fetchResult.prefixPath, { baseFs: fetchResult.packageFs });
798 for (const scriptName of [`preinstall`, `install`, `postinstall`])
799 if (scripts.has(scriptName))
800 buildScripts.push([Installer_1.BuildType.SCRIPT, scriptName]);
801 try {
802 for (const installer of installers.values()) {
803 await installer.installPackage(pkg, fetchResult);
804 }
805 }
806 finally {
807 if (fetchResult.releaseFs) {
808 fetchResult.releaseFs();
809 }
810 }
811 const location = fslib_1.ppath.join(fetchResult.packageFs.getRealPath(), fetchResult.prefixPath);
812 packageLocations.set(pkg.locatorHash, location);
813 if (buildScripts.length > 0) {
814 packageBuildDirectives.set(pkg.locatorHash, {
815 directives: buildScripts,
816 buildLocations: [location],
817 });
818 }
819 }
820 else {
821 const linker = linkers.find(linker => linker.supportsPackage(pkg, linkerOptions));
822 if (!linker)
823 throw new Report_1.ReportError(MessageName_1.MessageName.LINKER_NOT_FOUND, `${structUtils.prettyLocator(this.configuration, pkg)} isn't supported by any available linker`);
824 const installer = installers.get(linker);
825 if (!installer)
826 throw new Error(`Assertion failed: The installer should have been registered`);
827 let installStatus;
828 try {
829 installStatus = await installer.installPackage(pkg, fetchResult);
830 }
831 finally {
832 if (fetchResult.releaseFs) {
833 fetchResult.releaseFs();
834 }
835 }
836 packageLinkers.set(pkg.locatorHash, linker);
837 packageLocations.set(pkg.locatorHash, installStatus.packageLocation);
838 if (installStatus.buildDirective && installStatus.packageLocation) {
839 packageBuildDirectives.set(pkg.locatorHash, {
840 directives: installStatus.buildDirective,
841 buildLocations: [installStatus.packageLocation],
842 });
843 }
844 }
845 }
846 // Step 2: Link packages together
847 const externalDependents = new Map();
848 for (const locatorHash of this.accessibleLocators) {
849 const pkg = this.storedPackages.get(locatorHash);
850 if (!pkg)
851 throw new Error(`Assertion failed: The locator should have been registered`);
852 const isWorkspace = this.tryWorkspaceByLocator(pkg) !== null;
853 const linkPackage = async (packageLinker, installer) => {
854 const packageLocation = packageLocations.get(pkg.locatorHash);
855 if (typeof packageLocation === `undefined`)
856 throw new Error(`Assertion failed: The package (${structUtils.prettyLocator(this.configuration, pkg)}) should have been registered`);
857 const internalDependencies = [];
858 for (const descriptor of pkg.dependencies.values()) {
859 const resolution = this.storedResolutions.get(descriptor.descriptorHash);
860 if (typeof resolution === `undefined`)
861 throw new Error(`Assertion failed: The resolution (${structUtils.prettyDescriptor(this.configuration, descriptor)}, from ${structUtils.prettyLocator(this.configuration, pkg)})should have been registered`);
862 const dependency = this.storedPackages.get(resolution);
863 if (typeof dependency === `undefined`)
864 throw new Error(`Assertion failed: The package (${resolution}, resolved from ${structUtils.prettyDescriptor(this.configuration, descriptor)}) should have been registered`);
865 const dependencyLinker = this.tryWorkspaceByLocator(dependency) === null
866 ? packageLinkers.get(resolution)
867 : null;
868 if (typeof dependencyLinker === `undefined`)
869 throw new Error(`Assertion failed: The package (${resolution}, resolved from ${structUtils.prettyDescriptor(this.configuration, descriptor)}) should have been registered`);
870 const isWorkspaceDependency = dependencyLinker === null;
871 if (dependencyLinker === packageLinker || isWorkspace || isWorkspaceDependency) {
872 if (packageLocations.get(dependency.locatorHash) !== null) {
873 internalDependencies.push([descriptor, dependency]);
874 }
875 }
876 else if (packageLocation !== null) {
877 const externalEntry = miscUtils.getArrayWithDefault(externalDependents, resolution);
878 externalEntry.push(packageLocation);
879 }
880 }
881 if (packageLocation !== null) {
882 await installer.attachInternalDependencies(pkg, internalDependencies);
883 }
884 };
885 if (isWorkspace) {
886 for (const [packageLinker, installer] of installers) {
887 await linkPackage(packageLinker, installer);
888 }
889 }
890 else {
891 const packageLinker = packageLinkers.get(pkg.locatorHash);
892 if (!packageLinker)
893 throw new Error(`Assertion failed: The linker should have been found`);
894 const installer = installers.get(packageLinker);
895 if (!installer)
896 throw new Error(`Assertion failed: The installer should have been registered`);
897 await linkPackage(packageLinker, installer);
898 }
899 }
900 for (const [locatorHash, dependentPaths] of externalDependents) {
901 const pkg = this.storedPackages.get(locatorHash);
902 if (!pkg)
903 throw new Error(`Assertion failed: The package should have been registered`);
904 const packageLinker = packageLinkers.get(pkg.locatorHash);
905 if (!packageLinker)
906 throw new Error(`Assertion failed: The linker should have been found`);
907 const installer = installers.get(packageLinker);
908 if (!installer)
909 throw new Error(`Assertion failed: The installer should have been registered`);
910 await installer.attachExternalDependents(pkg, dependentPaths);
911 }
912 // Step 3: Inform our linkers that they should have all the info needed
913 for (const installer of installers.values()) {
914 const installStatuses = await installer.finalizeInstall();
915 if (installStatuses) {
916 for (const installStatus of installStatuses) {
917 if (installStatus.buildDirective) {
918 packageBuildDirectives.set(installStatus.locatorHash, {
919 directives: installStatus.buildDirective,
920 buildLocations: installStatus.buildLocations,
921 });
922 }
923 }
924 }
925 }
926 // Step 4: Build the packages in multiple steps
927 const readyPackages = new Set(this.storedPackages.keys());
928 const buildablePackages = new Set(packageBuildDirectives.keys());
929 for (const locatorHash of buildablePackages)
930 readyPackages.delete(locatorHash);
931 const globalHashGenerator = crypto_1.createHash(`sha512`);
932 globalHashGenerator.update(process.versions.node);
933 this.configuration.triggerHook(hooks => {
934 return hooks.globalHashGeneration;
935 }, this, (data) => {
936 globalHashGenerator.update(`\0`);
937 globalHashGenerator.update(data);
938 });
939 const globalHash = globalHashGenerator.digest(`hex`);
940 const packageHashMap = new Map();
941 // We'll use this function is order to compute a hash for each package
942 // that exposes a build directive. If the hash changes compared to the
943 // previous run, the package is rebuilt. This has the advantage of making
944 // the rebuilds much more predictable than before, and to give us the tools
945 // later to improve this further by explaining *why* a rebuild happened.
946 const getBaseHash = (locator) => {
947 let hash = packageHashMap.get(locator.locatorHash);
948 if (typeof hash !== `undefined`)
949 return hash;
950 const pkg = this.storedPackages.get(locator.locatorHash);
951 if (typeof pkg === `undefined`)
952 throw new Error(`Assertion failed: The package should have been registered`);
953 const builder = crypto_1.createHash(`sha512`);
954 builder.update(locator.locatorHash);
955 // To avoid the case where one dependency depends on itself somehow
956 packageHashMap.set(locator.locatorHash, `<recursive>`);
957 for (const descriptor of pkg.dependencies.values()) {
958 const resolution = this.storedResolutions.get(descriptor.descriptorHash);
959 if (typeof resolution === `undefined`)
960 throw new Error(`Assertion failed: The resolution (${structUtils.prettyDescriptor(this.configuration, descriptor)}) should have been registered`);
961 const dependency = this.storedPackages.get(resolution);
962 if (typeof dependency === `undefined`)
963 throw new Error(`Assertion failed: The package should have been registered`);
964 builder.update(getBaseHash(dependency));
965 }
966 hash = builder.digest(`hex`);
967 packageHashMap.set(locator.locatorHash, hash);
968 return hash;
969 };
970 const getBuildHash = (locator, buildLocations) => {
971 const builder = crypto_1.createHash(`sha512`);
972 builder.update(globalHash);
973 builder.update(getBaseHash(locator));
974 for (const location of buildLocations)
975 builder.update(location);
976 return builder.digest(`hex`);
977 };
978 const bstatePath = this.configuration.get(`bstatePath`);
979 const bstate = fslib_1.xfs.existsSync(bstatePath)
980 ? parsers_1.parseSyml(await fslib_1.xfs.readFilePromise(bstatePath, `utf8`))
981 : {};
982 // We reconstruct the build state from an empty object because we want to
983 // remove the state from packages that got removed
984 const nextBState = new Map();
985 while (buildablePackages.size > 0) {
986 const savedSize = buildablePackages.size;
987 const buildPromises = [];
988 for (const locatorHash of buildablePackages) {
989 const pkg = this.storedPackages.get(locatorHash);
990 if (!pkg)
991 throw new Error(`Assertion failed: The package should have been registered`);
992 let isBuildable = true;
993 for (const dependency of pkg.dependencies.values()) {
994 const resolution = this.storedResolutions.get(dependency.descriptorHash);
995 if (!resolution)
996 throw new Error(`Assertion failed: The resolution (${structUtils.prettyDescriptor(this.configuration, dependency)}) should have been registered`);
997 if (buildablePackages.has(resolution)) {
998 isBuildable = false;
999 break;
1000 }
1001 }
1002 // Wait until all dependencies of the current package have been built
1003 // before trying to build it (since it might need them to build itself)
1004 if (!isBuildable)
1005 continue;
1006 buildablePackages.delete(locatorHash);
1007 const buildInfo = packageBuildDirectives.get(pkg.locatorHash);
1008 if (!buildInfo)
1009 throw new Error(`Assertion failed: The build directive should have been registered`);
1010 const buildHash = getBuildHash(pkg, buildInfo.buildLocations);
1011 // No need to rebuild the package if its hash didn't change
1012 if (Object.prototype.hasOwnProperty.call(bstate, pkg.locatorHash) && bstate[pkg.locatorHash] === buildHash) {
1013 nextBState.set(pkg.locatorHash, buildHash);
1014 continue;
1015 }
1016 if (Object.prototype.hasOwnProperty.call(bstate, pkg.locatorHash))
1017 report.reportInfo(MessageName_1.MessageName.MUST_REBUILD, `${structUtils.prettyLocator(this.configuration, pkg)} must be rebuilt because its dependency tree changed`);
1018 else
1019 report.reportInfo(MessageName_1.MessageName.MUST_BUILD, `${structUtils.prettyLocator(this.configuration, pkg)} must be built because it never did before or the last one failed`);
1020 for (const location of buildInfo.buildLocations) {
1021 if (!fslib_1.ppath.isAbsolute(location))
1022 throw new Error(`Assertion failed: Expected the build location to be absolute (not ${location})`);
1023 buildPromises.push((async () => {
1024 for (const [buildType, scriptName] of buildInfo.directives) {
1025 let header = `# This file contains the result of Yarn building a package (${structUtils.stringifyLocator(pkg)})\n`;
1026 switch (buildType) {
1027 case Installer_1.BuildType.SCRIPT:
1028 {
1029 header += `# Script name: ${scriptName}\n`;
1030 }
1031 break;
1032 case Installer_1.BuildType.SHELLCODE:
1033 {
1034 header += `# Script code: ${scriptName}\n`;
1035 }
1036 break;
1037 }
1038 const stdin = null;
1039 await fslib_1.xfs.mktempPromise(async (logDir) => {
1040 const logFile = fslib_1.ppath.join(logDir, `build.log`);
1041 const { stdout, stderr } = this.configuration.getSubprocessStreams(logFile, {
1042 header,
1043 prefix: structUtils.prettyLocator(this.configuration, pkg),
1044 report,
1045 });
1046 let exitCode;
1047 try {
1048 switch (buildType) {
1049 case Installer_1.BuildType.SCRIPT:
1050 {
1051 exitCode = await scriptUtils.executePackageScript(pkg, scriptName, [], { cwd: location, project: this, stdin, stdout, stderr });
1052 }
1053 break;
1054 case Installer_1.BuildType.SHELLCODE:
1055 {
1056 exitCode = await scriptUtils.executePackageShellcode(pkg, scriptName, [], { cwd: location, project: this, stdin, stdout, stderr });
1057 }
1058 break;
1059 }
1060 }
1061 catch (error) {
1062 stderr.write(error.stack);
1063 exitCode = 1;
1064 }
1065 stdout.end();
1066 stderr.end();
1067 if (exitCode === 0) {
1068 nextBState.set(pkg.locatorHash, buildHash);
1069 return true;
1070 }
1071 fslib_1.xfs.detachTemp(logDir);
1072 const buildMessage = `${structUtils.prettyLocator(this.configuration, pkg)} couldn't be built successfully (exit code ${this.configuration.format(String(exitCode), Configuration_1.FormatType.NUMBER)}, logs can be found here: ${this.configuration.format(logFile, Configuration_1.FormatType.PATH)})`;
1073 report.reportInfo(MessageName_1.MessageName.BUILD_FAILED, buildMessage);
1074 if (this.optionalBuilds.has(pkg.locatorHash)) {
1075 nextBState.set(pkg.locatorHash, buildHash);
1076 return true;
1077 }
1078 report.reportError(MessageName_1.MessageName.BUILD_FAILED, buildMessage);
1079 return false;
1080 });
1081 }
1082 })());
1083 }
1084 }
1085 await Promise.all(buildPromises);
1086 // If we reach this code, it means that we have circular dependencies
1087 // somewhere. Worst, it means that the circular dependencies both have
1088 // build scripts, making them unsatisfiable.
1089 if (savedSize === buildablePackages.size) {
1090 const prettyLocators = Array.from(buildablePackages).map(locatorHash => {
1091 const pkg = this.storedPackages.get(locatorHash);
1092 if (!pkg)
1093 throw new Error(`Assertion failed: The package should have been registered`);
1094 return structUtils.prettyLocator(this.configuration, pkg);
1095 }).join(`, `);
1096 report.reportError(MessageName_1.MessageName.CYCLIC_DEPENDENCIES, `Some packages have circular dependencies that make their build order unsatisfiable - as a result they won't be built (affected packages are: ${prettyLocators})`);
1097 break;
1098 }
1099 }
1100 // We can now generate the bstate file, which will allow us to "remember"
1101 // what's the dependency tree subset that we used to build a specific
1102 // package (and avoid rebuilding it later if it didn't change).
1103 if (nextBState.size > 0) {
1104 const bstatePath = this.configuration.get(`bstatePath`);
1105 const bstateFile = Project.generateBuildStateFile(nextBState, this.storedPackages);
1106 await fslib_1.xfs.mkdirPromise(fslib_1.ppath.dirname(bstatePath), { recursive: true });
1107 await fslib_1.xfs.changeFilePromise(bstatePath, bstateFile, {
1108 automaticNewlines: true,
1109 });
1110 }
1111 else {
1112 await fslib_1.xfs.removePromise(bstatePath);
1113 }
1114 }
1115 async install(opts) {
1116 var _a, _b;
1117 const nodeLinker = this.configuration.get(`nodeLinker`);
1118 (_a = Configuration_1.Configuration.telemetry) === null || _a === void 0 ? void 0 : _a.reportInstall(nodeLinker);
1119 for (const extensions of this.configuration.packageExtensions.values())
1120 for (const { descriptor, changes } of extensions)
1121 for (const change of changes)
1122 (_b = Configuration_1.Configuration.telemetry) === null || _b === void 0 ? void 0 : _b.reportPackageExtension(`${structUtils.stringifyIdent(descriptor)}:${change}`);
1123 const validationWarnings = [];
1124 const validationErrors = [];
1125 await this.configuration.triggerHook(hooks => {
1126 return hooks.validateProject;
1127 }, this, {
1128 reportWarning: (name, text) => validationWarnings.push({ name, text }),
1129 reportError: (name, text) => validationErrors.push({ name, text }),
1130 });
1131 const problemCount = validationWarnings.length + validationErrors.length;
1132 if (problemCount > 0) {
1133 await opts.report.startTimerPromise(`Validation step`, async () => {
1134 await this.validateEverything({ validationWarnings, validationErrors, report: opts.report });
1135 });
1136 }
1137 await opts.report.startTimerPromise(`Resolution step`, async () => {
1138 const lockfilePath = fslib_1.ppath.join(this.cwd, this.configuration.get(`lockfileFilename`));
1139 // If we operate with a frozen lockfile, we take a snapshot of it to later make sure it didn't change
1140 let initialLockfile = null;
1141 if (opts.immutable) {
1142 try {
1143 initialLockfile = await fslib_1.xfs.readFilePromise(lockfilePath, `utf8`);
1144 }
1145 catch (error) {
1146 if (error.code === `ENOENT`) {
1147 throw new Report_1.ReportError(MessageName_1.MessageName.FROZEN_LOCKFILE_EXCEPTION, `The lockfile would have been created by this install, which is explicitly forbidden.`);
1148 }
1149 else {
1150 throw error;
1151 }
1152 }
1153 }
1154 await this.resolveEverything(opts);
1155 if (initialLockfile !== null) {
1156 const newLockfile = fslib_1.normalizeLineEndings(initialLockfile, this.generateLockfile());
1157 if (newLockfile !== initialLockfile) {
1158 const diff = diff_1.structuredPatch(lockfilePath, lockfilePath, initialLockfile, newLockfile);
1159 opts.report.reportSeparator();
1160 for (const hunk of diff.hunks) {
1161 opts.report.reportInfo(null, `@@ -${hunk.oldStart},${hunk.oldLines} +${hunk.newStart},${hunk.newLines} @@`);
1162 for (const line of hunk.lines) {
1163 if (line.startsWith(`+`)) {
1164 opts.report.reportError(MessageName_1.MessageName.FROZEN_LOCKFILE_EXCEPTION, this.configuration.format(line, Configuration_1.FormatType.ADDED));
1165 }
1166 else if (line.startsWith(`-`)) {
1167 opts.report.reportError(MessageName_1.MessageName.FROZEN_LOCKFILE_EXCEPTION, this.configuration.format(line, Configuration_1.FormatType.REMOVED));
1168 }
1169 else {
1170 opts.report.reportInfo(null, this.configuration.format(line, `grey`));
1171 }
1172 }
1173 }
1174 opts.report.reportSeparator();
1175 throw new Report_1.ReportError(MessageName_1.MessageName.FROZEN_LOCKFILE_EXCEPTION, `The lockfile would have been modified by this install, which is explicitly forbidden.`);
1176 }
1177 }
1178 });
1179 await opts.report.startTimerPromise(`Fetch step`, async () => {
1180 await this.fetchEverything(opts);
1181 if (typeof opts.persistProject === `undefined` || opts.persistProject) {
1182 await this.cacheCleanup(opts);
1183 }
1184 });
1185 if (typeof opts.persistProject === `undefined` || opts.persistProject)
1186 await this.persist();
1187 await opts.report.startTimerPromise(`Link step`, async () => {
1188 const immutablePatterns = opts.immutable
1189 ? [...new Set(this.configuration.get(`immutablePatterns`))].sort()
1190 : [];
1191 const before = await Promise.all(immutablePatterns.map(async (pattern) => {
1192 return hashUtils.checksumPattern(pattern, { cwd: this.cwd });
1193 }));
1194 await this.linkEverything(opts);
1195 const after = await Promise.all(immutablePatterns.map(async (pattern) => {
1196 return hashUtils.checksumPattern(pattern, { cwd: this.cwd });
1197 }));
1198 for (let t = 0; t < immutablePatterns.length; ++t) {
1199 if (before[t] !== after[t]) {
1200 opts.report.reportError(MessageName_1.MessageName.FROZEN_ARTIFACT_EXCEPTION, `The checksum for ${immutablePatterns[t]} has been modified by this install, which is explicitly forbidden.`);
1201 }
1202 }
1203 });
1204 await this.configuration.triggerHook(hooks => {
1205 return hooks.afterAllInstalled;
1206 }, this, opts);
1207 }
1208 generateLockfile() {
1209 // We generate the data structure that will represent our lockfile. To do this, we create a
1210 // reverse lookup table, where the key will be the resolved locator and the value will be a set
1211 // of all the descriptors that resolved to it. Then we use it to construct an optimized version
1212 // if the final object.
1213 const reverseLookup = new Map();
1214 for (const [descriptorHash, locatorHash] of this.storedResolutions.entries()) {
1215 let descriptorHashes = reverseLookup.get(locatorHash);
1216 if (!descriptorHashes)
1217 reverseLookup.set(locatorHash, descriptorHashes = new Set());
1218 descriptorHashes.add(descriptorHash);
1219 }
1220 const optimizedLockfile = {};
1221 optimizedLockfile.__metadata = {
1222 version: LOCKFILE_VERSION,
1223 };
1224 for (const [locatorHash, descriptorHashes] of reverseLookup.entries()) {
1225 const pkg = this.originalPackages.get(locatorHash);
1226 // A resolution that isn't in `originalPackages` is a virtual packages.
1227 // Since virtual packages can be derived from the information stored in
1228 // the rest of the lockfile we don't want to bother storing them.
1229 if (!pkg)
1230 continue;
1231 const descriptors = [];
1232 for (const descriptorHash of descriptorHashes) {
1233 const descriptor = this.storedDescriptors.get(descriptorHash);
1234 if (!descriptor)
1235 throw new Error(`Assertion failed: The descriptor should have been registered`);
1236 descriptors.push(descriptor);
1237 }
1238 const key = descriptors.map(descriptor => {
1239 return structUtils.stringifyDescriptor(descriptor);
1240 }).sort().join(`, `);
1241 const manifest = new Manifest_1.Manifest();
1242 manifest.version = pkg.linkType === types_1.LinkType.HARD
1243 ? pkg.version
1244 : `0.0.0-use.local`;
1245 manifest.languageName = pkg.languageName;
1246 manifest.dependencies = new Map(pkg.dependencies);
1247 manifest.peerDependencies = new Map(pkg.peerDependencies);
1248 manifest.dependenciesMeta = new Map(pkg.dependenciesMeta);
1249 manifest.peerDependenciesMeta = new Map(pkg.peerDependenciesMeta);
1250 manifest.bin = new Map(pkg.bin);
1251 let entryChecksum;
1252 const checksum = this.storedChecksums.get(pkg.locatorHash);
1253 if (typeof checksum !== `undefined`) {
1254 const cacheKeyIndex = checksum.indexOf(`/`);
1255 if (cacheKeyIndex === -1)
1256 throw new Error(`Assertion failed: Expecte the checksum to reference its cache key`);
1257 const cacheKey = checksum.slice(0, cacheKeyIndex);
1258 const hash = checksum.slice(cacheKeyIndex + 1);
1259 if (typeof optimizedLockfile.__metadata.cacheKey === `undefined`)
1260 optimizedLockfile.__metadata.cacheKey = cacheKey;
1261 if (cacheKey === optimizedLockfile.__metadata.cacheKey) {
1262 entryChecksum = hash;
1263 }
1264 else {
1265 entryChecksum = checksum;
1266 }
1267 }
1268 optimizedLockfile[key] = {
1269 ...manifest.exportTo({}, {
1270 compatibilityMode: false,
1271 }),
1272 linkType: pkg.linkType.toLowerCase(),
1273 resolution: structUtils.stringifyLocator(pkg),
1274 checksum: entryChecksum,
1275 };
1276 }
1277 const header = `${[
1278 `# This file is generated by running "yarn install" inside your project.\n`,
1279 `# Manual changes might be lost - proceed with caution!\n`,
1280 ].join(``)}\n`;
1281 return header + parsers_1.stringifySyml(optimizedLockfile);
1282 }
1283 async persistLockfile() {
1284 const lockfilePath = fslib_1.ppath.join(this.cwd, this.configuration.get(`lockfileFilename`));
1285 const lockfileContent = this.generateLockfile();
1286 await fslib_1.xfs.changeFilePromise(lockfilePath, lockfileContent, {
1287 automaticNewlines: true,
1288 });
1289 }
1290 async persistInstallStateFile() {
1291 const { accessibleLocators, optionalBuilds, storedDescriptors, storedResolutions, storedPackages, lockFileChecksum } = this;
1292 const installState = { accessibleLocators, optionalBuilds, storedDescriptors, storedResolutions, storedPackages, lockFileChecksum };
1293 const serializedState = await gzip(v8_1.default.serialize(installState));
1294 const installStatePath = this.configuration.get(`installStatePath`);
1295 await fslib_1.xfs.mkdirPromise(fslib_1.ppath.dirname(installStatePath), { recursive: true });
1296 await fslib_1.xfs.writeFilePromise(installStatePath, serializedState);
1297 }
1298 async restoreInstallState() {
1299 const installStatePath = this.configuration.get(`installStatePath`);
1300 if (!fslib_1.xfs.existsSync(installStatePath)) {
1301 await this.applyLightResolution();
1302 return;
1303 }
1304 const serializedState = await fslib_1.xfs.readFilePromise(installStatePath);
1305 const installState = v8_1.default.deserialize(await gunzip(serializedState));
1306 if (installState.lockFileChecksum !== this.lockFileChecksum) {
1307 await this.applyLightResolution();
1308 return;
1309 }
1310 Object.assign(this, installState);
1311 this.refreshWorkspaceDependencies();
1312 }
1313 async applyLightResolution() {
1314 await this.resolveEverything({
1315 lockfileOnly: true,
1316 report: new ThrowReport_1.ThrowReport(),
1317 });
1318 await this.persistInstallStateFile();
1319 }
1320 async persist() {
1321 await this.persistLockfile();
1322 await this.persistInstallStateFile();
1323 for (const workspace of this.workspacesByCwd.values()) {
1324 await workspace.persistManifest();
1325 }
1326 }
1327 async cacheCleanup({ cache, report }) {
1328 const PRESERVED_FILES = new Set([
1329 `.gitignore`,
1330 ]);
1331 if (!fslib_1.xfs.existsSync(cache.cwd))
1332 return;
1333 if (!folderUtils_1.isFolderInside(cache.cwd, this.cwd))
1334 return;
1335 for (const entry of await fslib_1.xfs.readdirPromise(cache.cwd)) {
1336 if (PRESERVED_FILES.has(entry))
1337 continue;
1338 const entryPath = fslib_1.ppath.resolve(cache.cwd, entry);
1339 if (cache.markedFiles.has(entryPath))
1340 continue;
1341 if (cache.immutable) {
1342 report.reportError(MessageName_1.MessageName.IMMUTABLE_CACHE, `${this.configuration.format(fslib_1.ppath.basename(entryPath), `magenta`)} appears to be unused and would marked for deletion, but the cache is immutable`);
1343 }
1344 else {
1345 report.reportInfo(MessageName_1.MessageName.UNUSED_CACHE_ENTRY, `${this.configuration.format(fslib_1.ppath.basename(entryPath), `magenta`)} appears to be unused - removing`);
1346 await fslib_1.xfs.removePromise(entryPath);
1347 }
1348 }
1349 cache.markedFiles.clear();
1350 }
1351}
1352exports.Project = Project;
1353/**
1354 * This function is worth some documentation. It takes a set of packages,
1355 * traverses them all, and generates virtual packages for each package that
1356 * lists peer dependencies.
1357 *
1358 * We also take advantage of the tree traversal to detect which packages are
1359 * actually used and which have disappeared, and to know which packages truly
1360 * have an optional build (since a package may be optional in one part of the
1361 * tree but not another).
1362 */
1363function applyVirtualResolutionMutations({ project, allDescriptors, allResolutions, allPackages, accessibleLocators = new Set(), optionalBuilds = new Set(), volatileDescriptors = new Set(), report, tolerateMissingPackages = false, }) {
1364 const virtualStack = new Map();
1365 const resolutionStack = [];
1366 // We'll be keeping track of all virtual descriptors; once they have all
1367 // been generated we'll check whether they can be consolidated into one.
1368 const allVirtualInstances = new Map();
1369 const allVirtualDependents = new Map();
1370 // We must keep a copy of the workspaces original dependencies, because they
1371 // may be overriden during the virtual package resolution - cf Dragon Test #5
1372 const originalWorkspaceDefinitions = new Map(project.workspaces.map(workspace => {
1373 const locatorHash = workspace.anchoredLocator.locatorHash;
1374 const pkg = allPackages.get(locatorHash);
1375 if (typeof pkg === `undefined`) {
1376 if (tolerateMissingPackages) {
1377 return [locatorHash, null];
1378 }
1379 else {
1380 throw new Error(`Assertion failed: The workspace should have an associated package`);
1381 }
1382 }
1383 return [locatorHash, structUtils.copyPackage(pkg)];
1384 }));
1385 const reportStackOverflow = () => {
1386 const logDir = fslib_1.xfs.mktempSync();
1387 const logFile = fslib_1.ppath.join(logDir, `stacktrace.log`);
1388 const maxSize = String(resolutionStack.length + 1).length;
1389 const content = resolutionStack.map((locator, index) => {
1390 const prefix = `${index + 1}.`.padStart(maxSize, ` `);
1391 return `${prefix} ${structUtils.stringifyLocator(locator)}\n`;
1392 }).join(``);
1393 fslib_1.xfs.writeFileSync(logFile, content);
1394 throw new Report_1.ReportError(MessageName_1.MessageName.STACK_OVERFLOW_RESOLUTION, `Encountered a stack overflow when resolving peer dependencies; cf ${logFile}`);
1395 };
1396 const getPackageFromDescriptor = (descriptor) => {
1397 const resolution = allResolutions.get(descriptor.descriptorHash);
1398 if (typeof resolution === `undefined`)
1399 throw new Error(`Assertion failed: The resolution should have been registered`);
1400 const pkg = allPackages.get(resolution);
1401 if (!pkg)
1402 throw new Error(`Assertion failed: The package could not be found`);
1403 return pkg;
1404 };
1405 const resolvePeerDependencies = (parentLocator, first, optional) => {
1406 if (resolutionStack.length > 1000)
1407 reportStackOverflow();
1408 resolutionStack.push(parentLocator);
1409 const result = resolvePeerDependenciesImpl(parentLocator, first, optional);
1410 resolutionStack.pop();
1411 return result;
1412 };
1413 const resolvePeerDependenciesImpl = (parentLocator, first, optional) => {
1414 if (accessibleLocators.has(parentLocator.locatorHash))
1415 return;
1416 accessibleLocators.add(parentLocator.locatorHash);
1417 if (!optional)
1418 optionalBuilds.delete(parentLocator.locatorHash);
1419 const parentPackage = allPackages.get(parentLocator.locatorHash);
1420 if (!parentPackage) {
1421 if (tolerateMissingPackages) {
1422 return;
1423 }
1424 else {
1425 throw new Error(`Assertion failed: The package (${structUtils.prettyLocator(project.configuration, parentLocator)}) should have been registered`);
1426 }
1427 }
1428 const newVirtualInstances = [];
1429 const firstPass = [];
1430 const secondPass = [];
1431 const thirdPass = [];
1432 const fourthPass = [];
1433 // During this first pass we virtualize the descriptors. This allows us
1434 // to reference them from their sibling without being order-dependent,
1435 // which is required to solve cases where packages with peer dependencies
1436 // have peer dependencies themselves.
1437 for (const descriptor of Array.from(parentPackage.dependencies.values())) {
1438 // We shouldn't virtualize the package if it was obtained through a peer
1439 // dependency (which can't be the case for workspaces when resolved
1440 // through their top-level)
1441 if (parentPackage.peerDependencies.has(descriptor.identHash) && !first)
1442 continue;
1443 // We had some issues where virtual packages were incorrectly set inside
1444 // workspaces, causing leaks. Check the Dragon Test #5 for more details.
1445 if (structUtils.isVirtualDescriptor(descriptor))
1446 throw new Error(`Assertion failed: Virtual packages shouldn't be encountered when virtualizing a branch`);
1447 // Mark this package as being used (won't be removed from the lockfile)
1448 volatileDescriptors.delete(descriptor.descriptorHash);
1449 // Detect whether this package is being required
1450 let isOptional = optional;
1451 if (!isOptional) {
1452 const dependencyMetaSet = parentPackage.dependenciesMeta.get(structUtils.stringifyIdent(descriptor));
1453 if (typeof dependencyMetaSet !== `undefined`) {
1454 const dependencyMeta = dependencyMetaSet.get(null);
1455 if (typeof dependencyMeta !== `undefined` && dependencyMeta.optional) {
1456 isOptional = true;
1457 }
1458 }
1459 }
1460 const resolution = allResolutions.get(descriptor.descriptorHash);
1461 if (!resolution) {
1462 // Note that we can't use `getPackageFromDescriptor` (defined below,
1463 // because when doing the initial tree building right after loading the
1464 // project it's possible that we get some entries that haven't been
1465 // registered into the lockfile yet - for example when the user has
1466 // manually changed the package.json dependencies)
1467 if (tolerateMissingPackages) {
1468 continue;
1469 }
1470 else {
1471 throw new Error(`Assertion failed: The resolution (${structUtils.prettyDescriptor(project.configuration, descriptor)}) should have been registered`);
1472 }
1473 }
1474 const pkg = originalWorkspaceDefinitions.get(resolution) || allPackages.get(resolution);
1475 if (!pkg)
1476 throw new Error(`Assertion failed: The package (${resolution}, resolved from ${structUtils.prettyDescriptor(project.configuration, descriptor)}) should have been registered`);
1477 if (pkg.peerDependencies.size === 0) {
1478 resolvePeerDependencies(pkg, false, isOptional);
1479 continue;
1480 }
1481 // The stack overflow is checked against two level because a workspace
1482 // may have a dev dependency on another workspace that lists the first
1483 // one as a regular dependency. In this case the loop will break so we
1484 // don't need to throw an exception.
1485 const stackDepth = virtualStack.get(pkg.locatorHash);
1486 if (typeof stackDepth === `number` && stackDepth >= 2)
1487 reportStackOverflow();
1488 let virtualizedDescriptor;
1489 let virtualizedPackage;
1490 const missingPeerDependencies = new Set();
1491 firstPass.push(() => {
1492 virtualizedDescriptor = structUtils.virtualizeDescriptor(descriptor, parentLocator.locatorHash);
1493 virtualizedPackage = structUtils.virtualizePackage(pkg, parentLocator.locatorHash);
1494 parentPackage.dependencies.delete(descriptor.identHash);
1495 parentPackage.dependencies.set(virtualizedDescriptor.identHash, virtualizedDescriptor);
1496 allResolutions.set(virtualizedDescriptor.descriptorHash, virtualizedPackage.locatorHash);
1497 allDescriptors.set(virtualizedDescriptor.descriptorHash, virtualizedDescriptor);
1498 allPackages.set(virtualizedPackage.locatorHash, virtualizedPackage);
1499 // Keep track of all new virtual packages since we'll want to dedupe them
1500 newVirtualInstances.push([pkg, virtualizedDescriptor, virtualizedPackage]);
1501 });
1502 secondPass.push(() => {
1503 for (const peerRequest of virtualizedPackage.peerDependencies.values()) {
1504 let peerDescriptor = parentPackage.dependencies.get(peerRequest.identHash);
1505 if (!peerDescriptor && structUtils.areIdentsEqual(parentLocator, peerRequest)) {
1506 peerDescriptor = structUtils.convertLocatorToDescriptor(parentLocator);
1507 allDescriptors.set(peerDescriptor.descriptorHash, peerDescriptor);
1508 allResolutions.set(peerDescriptor.descriptorHash, parentLocator.locatorHash);
1509 volatileDescriptors.delete(peerDescriptor.descriptorHash);
1510 }
1511 if (!peerDescriptor && virtualizedPackage.dependencies.has(peerRequest.identHash)) {
1512 virtualizedPackage.peerDependencies.delete(peerRequest.identHash);
1513 continue;
1514 }
1515 if (!peerDescriptor) {
1516 if (!parentPackage.peerDependencies.has(peerRequest.identHash)) {
1517 const peerDependencyMeta = virtualizedPackage.peerDependenciesMeta.get(structUtils.stringifyIdent(peerRequest));
1518 if (report !== null && (!peerDependencyMeta || !peerDependencyMeta.optional)) {
1519 report.reportWarning(MessageName_1.MessageName.MISSING_PEER_DEPENDENCY, `${structUtils.prettyLocator(project.configuration, parentLocator)} doesn't provide ${structUtils.prettyDescriptor(project.configuration, peerRequest)} requested by ${structUtils.prettyLocator(project.configuration, pkg)}`);
1520 }
1521 }
1522 peerDescriptor = structUtils.makeDescriptor(peerRequest, `missing:`);
1523 }
1524 virtualizedPackage.dependencies.set(peerDescriptor.identHash, peerDescriptor);
1525 // Need to track when a virtual descriptor is set as a dependency in case
1526 // the descriptor will be consolidated.
1527 if (structUtils.isVirtualDescriptor(peerDescriptor)) {
1528 const dependents = miscUtils.getSetWithDefault(allVirtualDependents, peerDescriptor.descriptorHash);
1529 dependents.add(virtualizedPackage.locatorHash);
1530 }
1531 if (peerDescriptor.range === `missing:`) {
1532 missingPeerDependencies.add(peerDescriptor.identHash);
1533 }
1534 else if (report !== null) {
1535 // When the parent provides the peer dependency request it must be checked to ensure
1536 // it is a compatible version.
1537 const peerPackage = getPackageFromDescriptor(peerDescriptor);
1538 if (!semverUtils.satisfiesWithPrereleases(peerPackage.version, peerRequest.range)) {
1539 report.reportWarning(MessageName_1.MessageName.INCOMPATIBLE_PEER_DEPENDENCY, `${structUtils.prettyLocator(project.configuration, parentLocator)} provides ${structUtils.prettyLocator(project.configuration, peerPackage)} with version ${peerPackage.version} which doesn't satisfy ${structUtils.prettyRange(project.configuration, peerRequest.range)} requested by ${structUtils.prettyLocator(project.configuration, pkg)}`);
1540 }
1541 }
1542 }
1543 // Since we've had to add new dependencies we need to sort them all over again
1544 virtualizedPackage.dependencies = new Map(miscUtils.sortMap(virtualizedPackage.dependencies, ([identHash, descriptor]) => {
1545 return structUtils.stringifyIdent(descriptor);
1546 }));
1547 });
1548 thirdPass.push(() => {
1549 if (!allPackages.has(virtualizedPackage.locatorHash))
1550 return;
1551 const current = virtualStack.get(pkg.locatorHash);
1552 const next = typeof current !== `undefined` ? current + 1 : 1;
1553 virtualStack.set(pkg.locatorHash, next);
1554 resolvePeerDependencies(virtualizedPackage, false, isOptional);
1555 virtualStack.set(pkg.locatorHash, next - 1);
1556 });
1557 fourthPass.push(() => {
1558 if (!allPackages.has(virtualizedPackage.locatorHash))
1559 return;
1560 for (const missingPeerDependency of missingPeerDependencies) {
1561 virtualizedPackage.dependencies.delete(missingPeerDependency);
1562 }
1563 });
1564 }
1565 for (const fn of [...firstPass, ...secondPass])
1566 fn();
1567 let stable;
1568 do {
1569 stable = true;
1570 for (const [physicalLocator, virtualDescriptor, virtualPackage] of newVirtualInstances) {
1571 if (!allPackages.has(virtualPackage.locatorHash))
1572 continue;
1573 const otherVirtualInstances = miscUtils.getMapWithDefault(allVirtualInstances, physicalLocator.locatorHash);
1574 // We take all the dependencies from the new virtual instance and
1575 // generate a hash from it. By checking if this hash is already
1576 // registered, we know whether we can trim the new version.
1577 const dependencyHash = hashUtils.makeHash(...[...virtualPackage.dependencies.values()].map(descriptor => {
1578 const resolution = descriptor.range !== `missing:`
1579 ? allResolutions.get(descriptor.descriptorHash)
1580 : `missing:`;
1581 if (typeof resolution === `undefined`)
1582 throw new Error(`Assertion failed: Expected the resolution for ${structUtils.prettyDescriptor(project.configuration, descriptor)} to have been registered`);
1583 return resolution;
1584 }),
1585 // We use the identHash to disambiguate between virtual descriptors
1586 // with different base idents being resolved to the same virtual package.
1587 // Note: We don't use the descriptorHash because the whole point of duplicate
1588 // virtual descriptors is that they have different `virtual:` ranges.
1589 // This causes the virtual descriptors with different base idents
1590 // to be preserved, while the virtual package they resolve to gets deduped.
1591 virtualDescriptor.identHash);
1592 const masterDescriptor = otherVirtualInstances.get(dependencyHash);
1593 if (typeof masterDescriptor === `undefined`) {
1594 otherVirtualInstances.set(dependencyHash, virtualDescriptor);
1595 continue;
1596 }
1597 // Since we're applying multiple pass, we might have already registered
1598 // ourselves as the "master" descriptor in the previous pass.
1599 if (masterDescriptor === virtualDescriptor)
1600 continue;
1601 stable = false;
1602 allPackages.delete(virtualPackage.locatorHash);
1603 allDescriptors.delete(virtualDescriptor.descriptorHash);
1604 allResolutions.delete(virtualDescriptor.descriptorHash);
1605 accessibleLocators.delete(virtualPackage.locatorHash);
1606 const dependents = allVirtualDependents.get(virtualDescriptor.descriptorHash) || [];
1607 const allDependents = [parentPackage.locatorHash, ...dependents];
1608 for (const dependent of allDependents) {
1609 const pkg = allPackages.get(dependent);
1610 if (typeof pkg === `undefined`)
1611 continue;
1612 pkg.dependencies.set(virtualDescriptor.identHash, masterDescriptor);
1613 }
1614 }
1615 } while (!stable);
1616 for (const fn of [...thirdPass, ...fourthPass]) {
1617 fn();
1618 }
1619 };
1620 for (const workspace of project.workspaces) {
1621 volatileDescriptors.delete(workspace.anchoredDescriptor.descriptorHash);
1622 resolvePeerDependencies(workspace.anchoredLocator, true, false);
1623 }
1624}