UNPKG

93.9 kBJavaScriptView Raw
1"use strict";
2Object.defineProperty(exports, "__esModule", { value: true });
3exports.Project = void 0;
4const tslib_1 = require("tslib");
5const fslib_1 = require("@yarnpkg/fslib");
6const parsers_1 = require("@yarnpkg/parsers");
7const clipanion_1 = require("clipanion");
8const crypto_1 = require("crypto");
9const diff_1 = require("diff");
10// @ts-expect-error
11const logic_solver_1 = tslib_1.__importDefault(require("logic-solver"));
12const p_limit_1 = tslib_1.__importDefault(require("p-limit"));
13const semver_1 = tslib_1.__importDefault(require("semver"));
14const util_1 = require("util");
15const v8_1 = tslib_1.__importDefault(require("v8"));
16const zlib_1 = tslib_1.__importDefault(require("zlib"));
17const Configuration_1 = require("./Configuration");
18const Installer_1 = require("./Installer");
19const LegacyMigrationResolver_1 = require("./LegacyMigrationResolver");
20const LockfileResolver_1 = require("./LockfileResolver");
21const Manifest_1 = require("./Manifest");
22const MessageName_1 = require("./MessageName");
23const MultiResolver_1 = require("./MultiResolver");
24const Report_1 = require("./Report");
25const RunInstallPleaseResolver_1 = require("./RunInstallPleaseResolver");
26const ThrowReport_1 = require("./ThrowReport");
27const Workspace_1 = require("./Workspace");
28const folderUtils_1 = require("./folderUtils");
29const formatUtils = tslib_1.__importStar(require("./formatUtils"));
30const hashUtils = tslib_1.__importStar(require("./hashUtils"));
31const miscUtils = tslib_1.__importStar(require("./miscUtils"));
32const scriptUtils = tslib_1.__importStar(require("./scriptUtils"));
33const semverUtils = tslib_1.__importStar(require("./semverUtils"));
34const structUtils = tslib_1.__importStar(require("./structUtils"));
35const types_1 = require("./types");
36// When upgraded, the lockfile entries have to be resolved again (but the specific
37// versions are still pinned, no worry). Bump it when you change the fields within
38// the Package type; no more no less.
39const LOCKFILE_VERSION = 4;
40// Same thing but must be bumped when the members of the Project class changes (we
41// don't recommend our users to check-in this file, so it's fine to bump it even
42// between patch or minor releases).
43const INSTALL_STATE_VERSION = 1;
44const MULTIPLE_KEYS_REGEXP = / *, */g;
45const TRAILING_SLASH_REGEXP = /\/$/;
46const FETCHER_CONCURRENCY = 32;
47const gzip = util_1.promisify(zlib_1.default.gzip);
48const gunzip = util_1.promisify(zlib_1.default.gunzip);
49class Project {
50 constructor(projectCwd, { configuration }) {
51 /**
52 * Is meant to be populated by the consumer. Should the descriptor referenced
53 * by the key be requested, the descriptor referenced in the value will be
54 * resolved instead. The resolved data will then be used as final resolution
55 * for the initial descriptor.
56 *
57 * Note that the lockfile will contain the second descriptor but not the
58 * first one (meaning that if you remove the alias during a subsequent
59 * install, it'll be lost and the real package will be resolved / installed).
60 */
61 this.resolutionAliases = new Map();
62 this.workspaces = [];
63 this.workspacesByCwd = new Map();
64 this.workspacesByIdent = new Map();
65 this.storedResolutions = new Map();
66 this.storedDescriptors = new Map();
67 this.storedPackages = new Map();
68 this.storedChecksums = new Map();
69 this.accessibleLocators = new Set();
70 this.originalPackages = new Map();
71 this.optionalBuilds = new Set();
72 this.lockFileChecksum = null;
73 this.configuration = configuration;
74 this.cwd = projectCwd;
75 }
76 static async find(configuration, startingCwd) {
77 var _a, _b, _c;
78 if (!configuration.projectCwd)
79 throw new clipanion_1.UsageError(`No project found in ${startingCwd}`);
80 let packageCwd = configuration.projectCwd;
81 let nextCwd = startingCwd;
82 let currentCwd = null;
83 while (currentCwd !== configuration.projectCwd) {
84 currentCwd = nextCwd;
85 if (fslib_1.xfs.existsSync(fslib_1.ppath.join(currentCwd, fslib_1.Filename.manifest))) {
86 packageCwd = currentCwd;
87 break;
88 }
89 nextCwd = fslib_1.ppath.dirname(currentCwd);
90 }
91 const project = new Project(configuration.projectCwd, { configuration });
92 (_a = Configuration_1.Configuration.telemetry) === null || _a === void 0 ? void 0 : _a.reportProject(project.cwd);
93 await project.setupResolutions();
94 await project.setupWorkspaces();
95 (_b = Configuration_1.Configuration.telemetry) === null || _b === void 0 ? void 0 : _b.reportWorkspaceCount(project.workspaces.length);
96 (_c = Configuration_1.Configuration.telemetry) === null || _c === void 0 ? void 0 : _c.reportDependencyCount(project.workspaces.reduce((sum, workspace) => sum + workspace.manifest.dependencies.size + workspace.manifest.devDependencies.size, 0));
97 // If we're in a workspace, no need to go any further to find which package we're in
98 const workspace = project.tryWorkspaceByCwd(packageCwd);
99 if (workspace)
100 return { project, workspace, locator: workspace.anchoredLocator };
101 // Otherwise, we need to ask the project (which will in turn ask the linkers for help)
102 // Note: the trailing slash is caused by a quirk in the PnP implementation that requires folders to end with a trailing slash to disambiguate them from regular files
103 const locator = await project.findLocatorForLocation(`${packageCwd}/`, { strict: true });
104 if (locator)
105 return { project, locator, workspace: null };
106 throw new clipanion_1.UsageError(`The nearest package directory (${formatUtils.pretty(configuration, packageCwd, formatUtils.Type.PATH)}) doesn't seem to be part of the project declared in ${formatUtils.pretty(configuration, project.cwd, formatUtils.Type.PATH)}.\n\n- If the project directory is right, it might be that you forgot to list ${formatUtils.pretty(configuration, fslib_1.ppath.relative(project.cwd, packageCwd), formatUtils.Type.PATH)} as a workspace.\n- If it isn't, it's likely because you have a yarn.lock or package.json file there, confusing the project root detection.`);
107 }
108 static generateBuildStateFile(buildState, locatorStore) {
109 let bstateFile = `# Warning: This file is automatically generated. Removing it is fine, but will\n# cause all your builds to become invalidated.\n`;
110 const bstateData = [...buildState].map(([locatorHash, hash]) => {
111 const locator = locatorStore.get(locatorHash);
112 if (typeof locator === `undefined`)
113 throw new Error(`Assertion failed: The locator should have been registered`);
114 return [structUtils.stringifyLocator(locator), locator.locatorHash, hash];
115 });
116 for (const [locatorString, locatorHash, buildHash] of miscUtils.sortMap(bstateData, [d => d[0], d => d[1]])) {
117 bstateFile += `\n`;
118 bstateFile += `# ${locatorString}\n`;
119 bstateFile += `${JSON.stringify(locatorHash)}:\n`;
120 bstateFile += ` ${buildHash}\n`;
121 }
122 return bstateFile;
123 }
124 async setupResolutions() {
125 this.storedResolutions = new Map();
126 this.storedDescriptors = new Map();
127 this.storedPackages = new Map();
128 this.lockFileChecksum = null;
129 const lockfilePath = fslib_1.ppath.join(this.cwd, this.configuration.get(`lockfileFilename`));
130 const defaultLanguageName = this.configuration.get(`defaultLanguageName`);
131 if (fslib_1.xfs.existsSync(lockfilePath)) {
132 const content = await fslib_1.xfs.readFilePromise(lockfilePath, `utf8`);
133 // We store the salted checksum of the lockfile in order to invalidate the install state when needed
134 this.lockFileChecksum = hashUtils.makeHash(`${INSTALL_STATE_VERSION}`, content);
135 const parsed = parsers_1.parseSyml(content);
136 // Protects against v1 lockfiles
137 if (parsed.__metadata) {
138 const lockfileVersion = parsed.__metadata.version;
139 const cacheKey = parsed.__metadata.cacheKey;
140 for (const key of Object.keys(parsed)) {
141 if (key === `__metadata`)
142 continue;
143 const data = parsed[key];
144 if (typeof data.resolution === `undefined`)
145 throw new Error(`Assertion failed: Expected the lockfile entry to have a resolution field (${key})`);
146 const locator = structUtils.parseLocator(data.resolution, true);
147 const manifest = new Manifest_1.Manifest();
148 manifest.load(data);
149 const version = manifest.version;
150 const languageName = manifest.languageName || defaultLanguageName;
151 const linkType = data.linkType.toUpperCase();
152 const dependencies = manifest.dependencies;
153 const peerDependencies = manifest.peerDependencies;
154 const dependenciesMeta = manifest.dependenciesMeta;
155 const peerDependenciesMeta = manifest.peerDependenciesMeta;
156 const bin = manifest.bin;
157 if (data.checksum != null) {
158 const checksum = typeof cacheKey !== `undefined` && !data.checksum.includes(`/`)
159 ? `${cacheKey}/${data.checksum}`
160 : data.checksum;
161 this.storedChecksums.set(locator.locatorHash, checksum);
162 }
163 if (lockfileVersion >= LOCKFILE_VERSION) {
164 const pkg = { ...locator, version, languageName, linkType, dependencies, peerDependencies, dependenciesMeta, peerDependenciesMeta, bin };
165 this.originalPackages.set(pkg.locatorHash, pkg);
166 }
167 for (const entry of key.split(MULTIPLE_KEYS_REGEXP)) {
168 const descriptor = structUtils.parseDescriptor(entry);
169 this.storedDescriptors.set(descriptor.descriptorHash, descriptor);
170 if (lockfileVersion >= LOCKFILE_VERSION) {
171 // If the lockfile is up-to-date, we can simply register the
172 // resolution as a done deal.
173 this.storedResolutions.set(descriptor.descriptorHash, locator.locatorHash);
174 }
175 else {
176 // But if it isn't, then we instead setup an alias so that the
177 // descriptor will be re-resolved (so that we get to retrieve the
178 // new fields) while still resolving to the same locators.
179 const resolutionDescriptor = structUtils.convertLocatorToDescriptor(locator);
180 if (resolutionDescriptor.descriptorHash !== descriptor.descriptorHash) {
181 this.storedDescriptors.set(resolutionDescriptor.descriptorHash, resolutionDescriptor);
182 this.resolutionAliases.set(descriptor.descriptorHash, resolutionDescriptor.descriptorHash);
183 }
184 }
185 }
186 }
187 }
188 }
189 }
190 async setupWorkspaces() {
191 this.workspaces = [];
192 this.workspacesByCwd = new Map();
193 this.workspacesByIdent = new Map();
194 let workspaceCwds = [this.cwd];
195 while (workspaceCwds.length > 0) {
196 const passCwds = workspaceCwds;
197 workspaceCwds = [];
198 for (const workspaceCwd of passCwds) {
199 if (this.workspacesByCwd.has(workspaceCwd))
200 continue;
201 const workspace = await this.addWorkspace(workspaceCwd);
202 const workspacePkg = this.storedPackages.get(workspace.anchoredLocator.locatorHash);
203 if (workspacePkg)
204 workspace.dependencies = workspacePkg.dependencies;
205 for (const workspaceCwd of workspace.workspacesCwds) {
206 workspaceCwds.push(workspaceCwd);
207 }
208 }
209 }
210 }
211 async addWorkspace(workspaceCwd) {
212 const workspace = new Workspace_1.Workspace(workspaceCwd, { project: this });
213 await workspace.setup();
214 const dup = this.workspacesByIdent.get(workspace.locator.identHash);
215 if (typeof dup !== `undefined`)
216 throw new Error(`Duplicate workspace name ${structUtils.prettyIdent(this.configuration, workspace.locator)}: ${workspaceCwd} conflicts with ${dup.cwd}`);
217 this.workspaces.push(workspace);
218 this.workspacesByCwd.set(workspaceCwd, workspace);
219 this.workspacesByIdent.set(workspace.locator.identHash, workspace);
220 return workspace;
221 }
222 get topLevelWorkspace() {
223 return this.getWorkspaceByCwd(this.cwd);
224 }
225 tryWorkspaceByCwd(workspaceCwd) {
226 if (!fslib_1.ppath.isAbsolute(workspaceCwd))
227 workspaceCwd = fslib_1.ppath.resolve(this.cwd, workspaceCwd);
228 const workspace = this.workspacesByCwd.get(workspaceCwd);
229 if (!workspace)
230 return null;
231 return workspace;
232 }
233 getWorkspaceByCwd(workspaceCwd) {
234 const workspace = this.tryWorkspaceByCwd(workspaceCwd);
235 if (!workspace)
236 throw new Error(`Workspace not found (${workspaceCwd})`);
237 return workspace;
238 }
239 tryWorkspaceByFilePath(filePath) {
240 let bestWorkspace = null;
241 for (const workspace of this.workspaces) {
242 const rel = fslib_1.ppath.relative(workspace.cwd, filePath);
243 if (rel.startsWith(`../`))
244 continue;
245 if (bestWorkspace && bestWorkspace.cwd.length >= workspace.cwd.length)
246 continue;
247 bestWorkspace = workspace;
248 }
249 if (!bestWorkspace)
250 return null;
251 return bestWorkspace;
252 }
253 getWorkspaceByFilePath(filePath) {
254 const workspace = this.tryWorkspaceByFilePath(filePath);
255 if (!workspace)
256 throw new Error(`Workspace not found (${filePath})`);
257 return workspace;
258 }
259 tryWorkspaceByIdent(ident) {
260 const workspace = this.workspacesByIdent.get(ident.identHash);
261 if (typeof workspace === `undefined`)
262 return null;
263 return workspace;
264 }
265 getWorkspaceByIdent(ident) {
266 const workspace = this.tryWorkspaceByIdent(ident);
267 if (!workspace)
268 throw new Error(`Workspace not found (${structUtils.prettyIdent(this.configuration, ident)})`);
269 return workspace;
270 }
271 tryWorkspaceByDescriptor(descriptor) {
272 const workspace = this.tryWorkspaceByIdent(descriptor);
273 if (workspace === null || !workspace.accepts(descriptor.range))
274 return null;
275 return workspace;
276 }
277 getWorkspaceByDescriptor(descriptor) {
278 const workspace = this.tryWorkspaceByDescriptor(descriptor);
279 if (workspace === null)
280 throw new Error(`Workspace not found (${structUtils.prettyDescriptor(this.configuration, descriptor)})`);
281 return workspace;
282 }
283 tryWorkspaceByLocator(locator) {
284 if (structUtils.isVirtualLocator(locator))
285 locator = structUtils.devirtualizeLocator(locator);
286 const workspace = this.tryWorkspaceByIdent(locator);
287 if (workspace === null || (workspace.locator.locatorHash !== locator.locatorHash && workspace.anchoredLocator.locatorHash !== locator.locatorHash))
288 return null;
289 return workspace;
290 }
291 getWorkspaceByLocator(locator) {
292 const workspace = this.tryWorkspaceByLocator(locator);
293 if (!workspace)
294 throw new Error(`Workspace not found (${structUtils.prettyLocator(this.configuration, locator)})`);
295 return workspace;
296 }
297 /**
298 * Import the dependencies of each resolved workspace into their own
299 * `Workspace` instance.
300 */
301 refreshWorkspaceDependencies() {
302 for (const workspace of this.workspaces) {
303 const pkg = this.storedPackages.get(workspace.anchoredLocator.locatorHash);
304 if (!pkg)
305 throw new Error(`Assertion failed: Expected workspace to have been resolved`);
306 workspace.dependencies = new Map(pkg.dependencies);
307 }
308 }
309 forgetResolution(dataStructure) {
310 const deleteDescriptor = (descriptorHash) => {
311 this.storedResolutions.delete(descriptorHash);
312 this.storedDescriptors.delete(descriptorHash);
313 };
314 const deleteLocator = (locatorHash) => {
315 this.originalPackages.delete(locatorHash);
316 this.storedPackages.delete(locatorHash);
317 this.accessibleLocators.delete(locatorHash);
318 };
319 if (`descriptorHash` in dataStructure) {
320 const locatorHash = this.storedResolutions.get(dataStructure.descriptorHash);
321 deleteDescriptor(dataStructure.descriptorHash);
322 // We delete unused locators
323 const remainingResolutions = new Set(this.storedResolutions.values());
324 if (typeof locatorHash !== `undefined` && !remainingResolutions.has(locatorHash)) {
325 deleteLocator(locatorHash);
326 }
327 }
328 if (`locatorHash` in dataStructure) {
329 deleteLocator(dataStructure.locatorHash);
330 // We delete all of the descriptors that have been resolved to the locator
331 for (const [descriptorHash, locatorHash] of this.storedResolutions) {
332 if (locatorHash === dataStructure.locatorHash) {
333 deleteDescriptor(descriptorHash);
334 }
335 }
336 }
337 }
338 forgetTransientResolutions() {
339 const resolver = this.configuration.makeResolver();
340 for (const pkg of this.originalPackages.values()) {
341 let shouldPersistResolution;
342 try {
343 shouldPersistResolution = resolver.shouldPersistResolution(pkg, { project: this, resolver });
344 }
345 catch (_a) {
346 shouldPersistResolution = false;
347 }
348 if (!shouldPersistResolution) {
349 this.forgetResolution(pkg);
350 }
351 }
352 }
353 forgetVirtualResolutions() {
354 for (const pkg of this.storedPackages.values()) {
355 for (const [dependencyHash, dependency] of pkg.dependencies) {
356 if (structUtils.isVirtualDescriptor(dependency)) {
357 pkg.dependencies.set(dependencyHash, structUtils.devirtualizeDescriptor(dependency));
358 }
359 }
360 }
361 }
362 getDependencyMeta(ident, version) {
363 const dependencyMeta = {};
364 const dependenciesMeta = this.topLevelWorkspace.manifest.dependenciesMeta;
365 const dependencyMetaSet = dependenciesMeta.get(structUtils.stringifyIdent(ident));
366 if (!dependencyMetaSet)
367 return dependencyMeta;
368 const defaultMeta = dependencyMetaSet.get(null);
369 if (defaultMeta)
370 Object.assign(dependencyMeta, defaultMeta);
371 if (version === null || !semver_1.default.valid(version))
372 return dependencyMeta;
373 for (const [range, meta] of dependencyMetaSet)
374 if (range !== null && range === version)
375 Object.assign(dependencyMeta, meta);
376 return dependencyMeta;
377 }
378 async findLocatorForLocation(cwd, { strict = false } = {}) {
379 const report = new ThrowReport_1.ThrowReport();
380 const linkers = this.configuration.getLinkers();
381 const linkerOptions = { project: this, report };
382 for (const linker of linkers) {
383 const locator = await linker.findPackageLocator(cwd, linkerOptions);
384 if (locator) {
385 // If strict mode, the specified cwd must be a package,
386 // not merely contained in a package.
387 if (strict) {
388 const location = await linker.findPackageLocation(locator, linkerOptions);
389 if (location.replace(TRAILING_SLASH_REGEXP, ``) !== cwd.replace(TRAILING_SLASH_REGEXP, ``)) {
390 continue;
391 }
392 }
393 return locator;
394 }
395 }
396 return null;
397 }
398 async validateEverything(opts) {
399 for (const warning of opts.validationWarnings)
400 opts.report.reportWarning(warning.name, warning.text);
401 for (const error of opts.validationErrors) {
402 opts.report.reportError(error.name, error.text);
403 }
404 }
405 async resolveEverything(opts) {
406 if (!this.workspacesByCwd || !this.workspacesByIdent)
407 throw new Error(`Workspaces must have been setup before calling this function`);
408 // Reverts the changes that have been applied to the tree because of any previous virtual resolution pass
409 this.forgetVirtualResolutions();
410 // Ensures that we notice it when dependencies are added / removed from all sources coming from the filesystem
411 if (!opts.lockfileOnly)
412 this.forgetTransientResolutions();
413 // Note that the resolution process is "offline" until everything has been
414 // successfully resolved; all the processing is expected to have zero side
415 // effects until we're ready to set all the variables at once (the one
416 // exception being when a resolver needs to fetch a package, in which case
417 // we might need to populate the cache).
418 //
419 // This makes it possible to use the same Project instance for multiple
420 // purposes at the same time (since `resolveEverything` is async, it might
421 // happen that we want to do something while waiting for it to end; if we
422 // were to mutate the project then it would end up in a partial state that
423 // could lead to hard-to-debug issues).
424 const realResolver = opts.resolver || this.configuration.makeResolver();
425 const legacyMigrationResolver = new LegacyMigrationResolver_1.LegacyMigrationResolver();
426 await legacyMigrationResolver.setup(this, { report: opts.report });
427 const resolver = opts.lockfileOnly
428 ? new MultiResolver_1.MultiResolver([new LockfileResolver_1.LockfileResolver(), new RunInstallPleaseResolver_1.RunInstallPleaseResolver(realResolver)])
429 : new MultiResolver_1.MultiResolver([new LockfileResolver_1.LockfileResolver(), legacyMigrationResolver, realResolver]);
430 const fetcher = this.configuration.makeFetcher();
431 const resolveOptions = opts.lockfileOnly
432 ? { project: this, report: opts.report, resolver }
433 : { project: this, report: opts.report, resolver, fetchOptions: { project: this, cache: opts.cache, checksums: this.storedChecksums, report: opts.report, fetcher } };
434 const allDescriptors = new Map();
435 const allPackages = new Map();
436 const allResolutions = new Map();
437 const originalPackages = new Map();
438 const resolutionDependencies = new Map();
439 const haveBeenAliased = new Set();
440 let nextResolutionPass = new Set();
441 for (const workspace of this.workspaces) {
442 const workspaceDescriptor = workspace.anchoredDescriptor;
443 allDescriptors.set(workspaceDescriptor.descriptorHash, workspaceDescriptor);
444 nextResolutionPass.add(workspaceDescriptor.descriptorHash);
445 }
446 while (nextResolutionPass.size !== 0) {
447 const currentResolutionPass = nextResolutionPass;
448 nextResolutionPass = new Set();
449 // We remove from the "mustBeResolved" list all packages that have
450 // already been resolved previously.
451 for (const descriptorHash of currentResolutionPass)
452 if (allResolutions.has(descriptorHash))
453 currentResolutionPass.delete(descriptorHash);
454 if (currentResolutionPass.size === 0)
455 break;
456 // We check that the resolution dependencies have been resolved for all
457 // descriptors that we're about to resolve. Buffalo buffalo buffalo
458 // buffalo.
459 const deferredResolutions = new Set();
460 const resolvedDependencies = new Map();
461 for (const descriptorHash of currentResolutionPass) {
462 const descriptor = allDescriptors.get(descriptorHash);
463 if (!descriptor)
464 throw new Error(`Assertion failed: The descriptor should have been registered`);
465 let dependencies = resolutionDependencies.get(descriptorHash);
466 if (typeof dependencies === `undefined`) {
467 resolutionDependencies.set(descriptorHash, dependencies = new Set());
468 for (const dependency of resolver.getResolutionDependencies(descriptor, resolveOptions)) {
469 allDescriptors.set(dependency.descriptorHash, dependency);
470 dependencies.add(dependency.descriptorHash);
471 }
472 }
473 const resolved = miscUtils.getMapWithDefault(resolvedDependencies, descriptorHash);
474 for (const dependencyHash of dependencies) {
475 const resolution = allResolutions.get(dependencyHash);
476 if (typeof resolution !== `undefined`) {
477 const dependencyPkg = allPackages.get(resolution);
478 if (typeof dependencyPkg === `undefined`)
479 throw new Error(`Assertion failed: The package should have been registered`);
480 // The dependency is ready. We register it into the map so
481 // that we can pass that to getCandidates right after.
482 resolved.set(dependencyHash, dependencyPkg);
483 }
484 else {
485 // One of the resolution dependencies of this descriptor is
486 // missing; we need to postpone its resolution for now.
487 deferredResolutions.add(descriptorHash);
488 // For this pass however we'll want to schedule the resolution
489 // of the dependency (so that it's probably ready next pass).
490 currentResolutionPass.add(dependencyHash);
491 }
492 }
493 }
494 // Note: we're postponing the resolution only once we already know all
495 // those that are going to be postponed. This way we can detect
496 // potential cyclic dependencies.
497 for (const descriptorHash of deferredResolutions) {
498 currentResolutionPass.delete(descriptorHash);
499 nextResolutionPass.add(descriptorHash);
500 }
501 if (currentResolutionPass.size === 0)
502 throw new Error(`Assertion failed: Descriptors should not have cyclic dependencies`);
503 // Then we request the resolvers for the list of possible references that
504 // match the given ranges. That will give us a set of candidate references
505 // for each descriptor.
506 const passCandidates = new Map(await Promise.all(Array.from(currentResolutionPass).map(async (descriptorHash) => {
507 const descriptor = allDescriptors.get(descriptorHash);
508 if (typeof descriptor === `undefined`)
509 throw new Error(`Assertion failed: The descriptor should have been registered`);
510 const descriptorDependencies = resolvedDependencies.get(descriptor.descriptorHash);
511 if (typeof descriptorDependencies === `undefined`)
512 throw new Error(`Assertion failed: The descriptor dependencies should have been registered`);
513 let candidateLocators;
514 try {
515 candidateLocators = await resolver.getCandidates(descriptor, descriptorDependencies, resolveOptions);
516 }
517 catch (error) {
518 error.message = `${structUtils.prettyDescriptor(this.configuration, descriptor)}: ${error.message}`;
519 throw error;
520 }
521 if (candidateLocators.length === 0)
522 throw new Error(`No candidate found for ${structUtils.prettyDescriptor(this.configuration, descriptor)}`);
523 return [descriptor.descriptorHash, candidateLocators];
524 })));
525 // That's where we'll store our resolutions until everything has been
526 // resolved and can be injected into the various stores.
527 //
528 // The reason we're storing them in a temporary store instead of writing
529 // them directly into the global ones is that otherwise we would end up
530 // with different store orderings between dependency loaded from a
531 // lockfiles and those who don't (when using a lockfile all descriptors
532 // will fall into the next shortcut, but when no lockfile is there only
533 // some of them will; since maps are sorted by insertion, it would affect
534 // the way they would be ordered).
535 const passResolutions = new Map();
536 // We now make a pre-pass to automatically resolve the descriptors that
537 // can only be satisfied by a single reference.
538 for (const [descriptorHash, candidateLocators] of passCandidates) {
539 if (candidateLocators.length !== 1)
540 continue;
541 passResolutions.set(descriptorHash, candidateLocators[0]);
542 passCandidates.delete(descriptorHash);
543 }
544 // We make a second pre-pass to automatically resolve the descriptors
545 // that can be satisfied by a package we're already using (deduplication).
546 for (const [descriptorHash, candidateLocators] of passCandidates) {
547 const selectedLocator = candidateLocators.find(locator => allPackages.has(locator.locatorHash));
548 if (!selectedLocator)
549 continue;
550 passResolutions.set(descriptorHash, selectedLocator);
551 passCandidates.delete(descriptorHash);
552 }
553 // All entries that remain in "passCandidates" are from descriptors that
554 // we haven't been able to resolve in the first place. We'll now configure
555 // our SAT solver so that it can figure it out for us. To do this, we
556 // simply add a constraint for each descriptor that lists all the
557 // descriptors it would accept. We don't have to check whether the
558 // locators obtained have already been selected, because if they were the
559 // would have been resolved in the previous step (we never backtrace to
560 // try to find better solutions, it would be a too expensive process - we
561 // just want to get an acceptable solution, not the very best one).
562 if (passCandidates.size > 0) {
563 const solver = new logic_solver_1.default.Solver();
564 for (const candidateLocators of passCandidates.values())
565 solver.require(logic_solver_1.default.or(...candidateLocators.map(locator => locator.locatorHash)));
566 let remainingSolutions = 100;
567 let solution;
568 let bestSolution = null;
569 let bestScore = Infinity;
570 while (remainingSolutions > 0 && (solution = solver.solve()) !== null) {
571 const trueVars = solution.getTrueVars();
572 solver.forbid(solution.getFormula());
573 if (trueVars.length < bestScore) {
574 bestSolution = trueVars;
575 bestScore = trueVars.length;
576 }
577 remainingSolutions -= 1;
578 }
579 if (!bestSolution)
580 throw new Error(`Assertion failed: No resolution found by the SAT solver`);
581 const solutionSet = new Set(bestSolution);
582 for (const [descriptorHash, candidateLocators] of passCandidates.entries()) {
583 const selectedLocator = candidateLocators.find(locator => solutionSet.has(locator.locatorHash));
584 if (!selectedLocator)
585 throw new Error(`Assertion failed: The descriptor should have been solved during the previous step`);
586 passResolutions.set(descriptorHash, selectedLocator);
587 passCandidates.delete(descriptorHash);
588 }
589 }
590 // We now iterate over the locators we've got and, for each of them that
591 // hasn't been seen before, we fetch its dependency list and schedule
592 // them for the next cycle.
593 const newLocators = Array.from(passResolutions.values()).filter(locator => {
594 return !allPackages.has(locator.locatorHash);
595 });
596 const newPackages = new Map(await Promise.all(newLocators.map(async (locator) => {
597 const original = await miscUtils.prettifyAsyncErrors(async () => {
598 return await resolver.resolve(locator, resolveOptions);
599 }, message => {
600 return `${structUtils.prettyLocator(this.configuration, locator)}: ${message}`;
601 });
602 if (!structUtils.areLocatorsEqual(locator, original))
603 throw new Error(`Assertion failed: The locator cannot be changed by the resolver (went from ${structUtils.prettyLocator(this.configuration, locator)} to ${structUtils.prettyLocator(this.configuration, original)})`);
604 const pkg = this.configuration.normalizePackage(original);
605 for (const [identHash, descriptor] of pkg.dependencies) {
606 const dependency = await this.configuration.reduceHook(hooks => {
607 return hooks.reduceDependency;
608 }, descriptor, this, pkg, descriptor, {
609 resolver,
610 resolveOptions,
611 });
612 if (!structUtils.areIdentsEqual(descriptor, dependency))
613 throw new Error(`Assertion failed: The descriptor ident cannot be changed through aliases`);
614 const bound = resolver.bindDescriptor(dependency, locator, resolveOptions);
615 pkg.dependencies.set(identHash, bound);
616 }
617 return [pkg.locatorHash, { original, pkg }];
618 })));
619 // Now that the resolution is finished, we can finally insert the data
620 // stored inside our pass stores into the resolution ones (we now have
621 // the guarantee that they'll always be inserted into in the same order,
622 // since mustBeResolved is stable regardless of the order in which the
623 // resolvers return)
624 for (const descriptorHash of currentResolutionPass) {
625 const locator = passResolutions.get(descriptorHash);
626 if (!locator)
627 throw new Error(`Assertion failed: The locator should have been registered`);
628 allResolutions.set(descriptorHash, locator.locatorHash);
629 // If undefined it means that the package was already known and thus
630 // didn't need to be resolved again.
631 const resolutionEntry = newPackages.get(locator.locatorHash);
632 if (typeof resolutionEntry === `undefined`)
633 continue;
634 const { original, pkg } = resolutionEntry;
635 originalPackages.set(original.locatorHash, original);
636 allPackages.set(pkg.locatorHash, pkg);
637 for (const descriptor of pkg.dependencies.values()) {
638 allDescriptors.set(descriptor.descriptorHash, descriptor);
639 nextResolutionPass.add(descriptor.descriptorHash);
640 // We must check and make sure that the descriptor didn't get aliased
641 // to something else
642 const aliasHash = this.resolutionAliases.get(descriptor.descriptorHash);
643 if (aliasHash === undefined)
644 continue;
645 // It doesn't cost us much to support the case where a descriptor is
646 // equal to its own alias (which should mean "no alias")
647 if (descriptor.descriptorHash === aliasHash)
648 continue;
649 const alias = this.storedDescriptors.get(aliasHash);
650 if (!alias)
651 throw new Error(`Assertion failed: The alias should have been registered`);
652 // If it's already been "resolved" (in reality it will be the temporary
653 // resolution we've set in the next few lines) we simply must skip it
654 if (allResolutions.has(descriptor.descriptorHash))
655 continue;
656 // Temporarily set an invalid resolution so that it won't be resolved
657 // multiple times if it is found multiple times in the dependency
658 // tree (this is only temporary, we will replace it by the actual
659 // resolution after we've finished resolving everything)
660 allResolutions.set(descriptor.descriptorHash, `temporary`);
661 // We can now replace the descriptor by its alias in the list of
662 // descriptors that must be resolved
663 nextResolutionPass.delete(descriptor.descriptorHash);
664 nextResolutionPass.add(aliasHash);
665 allDescriptors.set(aliasHash, alias);
666 haveBeenAliased.add(descriptor.descriptorHash);
667 }
668 }
669 }
670 // Each package that should have been resolved but was skipped because it
671 // was aliased will now see the resolution for its alias propagated to it
672 while (haveBeenAliased.size > 0) {
673 let hasChanged = false;
674 for (const descriptorHash of haveBeenAliased) {
675 const descriptor = allDescriptors.get(descriptorHash);
676 if (!descriptor)
677 throw new Error(`Assertion failed: The descriptor should have been registered`);
678 const aliasHash = this.resolutionAliases.get(descriptorHash);
679 if (aliasHash === undefined)
680 throw new Error(`Assertion failed: The descriptor should have an alias`);
681 const resolution = allResolutions.get(aliasHash);
682 if (resolution === undefined)
683 throw new Error(`Assertion failed: The resolution should have been registered`);
684 // The following can happen if a package gets aliased to another package
685 // that's itself aliased - in this case we just process all those we can
686 // do, then make new passes until everything is resolved
687 if (resolution === `temporary`)
688 continue;
689 haveBeenAliased.delete(descriptorHash);
690 allResolutions.set(descriptorHash, resolution);
691 hasChanged = true;
692 }
693 if (!hasChanged) {
694 throw new Error(`Alias loop detected`);
695 }
696 }
697 // In this step we now create virtual packages for each package with at
698 // least one peer dependency. We also use it to search for the alias
699 // descriptors that aren't depended upon by anything and can be safely
700 // pruned.
701 const volatileDescriptors = new Set(this.resolutionAliases.values());
702 const optionalBuilds = new Set(allPackages.keys());
703 const accessibleLocators = new Set();
704 applyVirtualResolutionMutations({
705 project: this,
706 report: opts.report,
707 accessibleLocators,
708 volatileDescriptors,
709 optionalBuilds,
710 allDescriptors,
711 allResolutions,
712 allPackages,
713 });
714 // All descriptors still referenced within the volatileDescriptors set are
715 // descriptors that aren't depended upon by anything in the dependency tree.
716 for (const descriptorHash of volatileDescriptors) {
717 allDescriptors.delete(descriptorHash);
718 allResolutions.delete(descriptorHash);
719 }
720 // Everything is done, we can now update our internal resolutions to
721 // reference the new ones
722 this.storedResolutions = allResolutions;
723 this.storedDescriptors = allDescriptors;
724 this.storedPackages = allPackages;
725 this.accessibleLocators = accessibleLocators;
726 this.originalPackages = originalPackages;
727 this.optionalBuilds = optionalBuilds;
728 // Now that the internal resolutions have been updated, we can refresh the
729 // dependencies of each resolved workspace's `Workspace` instance.
730 this.refreshWorkspaceDependencies();
731 }
732 async fetchEverything({ cache, report, fetcher: userFetcher }) {
733 const fetcher = userFetcher || this.configuration.makeFetcher();
734 const fetcherOptions = { checksums: this.storedChecksums, project: this, cache, fetcher, report };
735 const locatorHashes = Array.from(new Set(miscUtils.sortMap(this.storedResolutions.values(), [
736 (locatorHash) => {
737 const pkg = this.storedPackages.get(locatorHash);
738 if (!pkg)
739 throw new Error(`Assertion failed: The locator should have been registered`);
740 return structUtils.stringifyLocator(pkg);
741 },
742 ])));
743 let firstError = false;
744 const progress = Report_1.Report.progressViaCounter(locatorHashes.length);
745 report.reportProgress(progress);
746 const limit = p_limit_1.default(FETCHER_CONCURRENCY);
747 await report.startCacheReport(async () => {
748 await Promise.all(locatorHashes.map(locatorHash => limit(async () => {
749 const pkg = this.storedPackages.get(locatorHash);
750 if (!pkg)
751 throw new Error(`Assertion failed: The locator should have been registered`);
752 if (structUtils.isVirtualLocator(pkg))
753 return;
754 let fetchResult;
755 try {
756 fetchResult = await fetcher.fetch(pkg, fetcherOptions);
757 }
758 catch (error) {
759 error.message = `${structUtils.prettyLocator(this.configuration, pkg)}: ${error.message}`;
760 report.reportExceptionOnce(error);
761 firstError = error;
762 return;
763 }
764 if (fetchResult.checksum)
765 this.storedChecksums.set(pkg.locatorHash, fetchResult.checksum);
766 else
767 this.storedChecksums.delete(pkg.locatorHash);
768 if (fetchResult.releaseFs) {
769 fetchResult.releaseFs();
770 }
771 }).finally(() => {
772 progress.tick();
773 })));
774 });
775 if (firstError) {
776 throw firstError;
777 }
778 }
779 async linkEverything({ cache, report, fetcher: optFetcher }) {
780 const fetcher = optFetcher || this.configuration.makeFetcher();
781 const fetcherOptions = { checksums: this.storedChecksums, project: this, cache, fetcher, report, skipIntegrityCheck: true };
782 const linkers = this.configuration.getLinkers();
783 const linkerOptions = { project: this, report };
784 const installers = new Map(linkers.map(linker => {
785 return [linker, linker.makeInstaller(linkerOptions)];
786 }));
787 const packageLinkers = new Map();
788 const packageLocations = new Map();
789 const packageBuildDirectives = new Map();
790 // Step 1: Installing the packages on the disk
791 for (const locatorHash of this.accessibleLocators) {
792 const pkg = this.storedPackages.get(locatorHash);
793 if (!pkg)
794 throw new Error(`Assertion failed: The locator should have been registered`);
795 const fetchResult = await fetcher.fetch(pkg, fetcherOptions);
796 if (this.tryWorkspaceByLocator(pkg) !== null) {
797 const buildScripts = [];
798 const { scripts } = await Manifest_1.Manifest.find(fetchResult.prefixPath, { baseFs: fetchResult.packageFs });
799 for (const scriptName of [`preinstall`, `install`, `postinstall`])
800 if (scripts.has(scriptName))
801 buildScripts.push([Installer_1.BuildType.SCRIPT, scriptName]);
802 try {
803 for (const installer of installers.values()) {
804 await installer.installPackage(pkg, fetchResult);
805 }
806 }
807 finally {
808 if (fetchResult.releaseFs) {
809 fetchResult.releaseFs();
810 }
811 }
812 const location = fslib_1.ppath.join(fetchResult.packageFs.getRealPath(), fetchResult.prefixPath);
813 packageLocations.set(pkg.locatorHash, location);
814 if (buildScripts.length > 0) {
815 packageBuildDirectives.set(pkg.locatorHash, {
816 directives: buildScripts,
817 buildLocations: [location],
818 });
819 }
820 }
821 else {
822 const linker = linkers.find(linker => linker.supportsPackage(pkg, linkerOptions));
823 if (!linker)
824 throw new Report_1.ReportError(MessageName_1.MessageName.LINKER_NOT_FOUND, `${structUtils.prettyLocator(this.configuration, pkg)} isn't supported by any available linker`);
825 const installer = installers.get(linker);
826 if (!installer)
827 throw new Error(`Assertion failed: The installer should have been registered`);
828 let installStatus;
829 try {
830 installStatus = await installer.installPackage(pkg, fetchResult);
831 }
832 finally {
833 if (fetchResult.releaseFs) {
834 fetchResult.releaseFs();
835 }
836 }
837 packageLinkers.set(pkg.locatorHash, linker);
838 packageLocations.set(pkg.locatorHash, installStatus.packageLocation);
839 if (installStatus.buildDirective && installStatus.packageLocation) {
840 packageBuildDirectives.set(pkg.locatorHash, {
841 directives: installStatus.buildDirective,
842 buildLocations: [installStatus.packageLocation],
843 });
844 }
845 }
846 }
847 // Step 2: Link packages together
848 const externalDependents = new Map();
849 for (const locatorHash of this.accessibleLocators) {
850 const pkg = this.storedPackages.get(locatorHash);
851 if (!pkg)
852 throw new Error(`Assertion failed: The locator should have been registered`);
853 const isWorkspace = this.tryWorkspaceByLocator(pkg) !== null;
854 const linkPackage = async (packageLinker, installer) => {
855 const packageLocation = packageLocations.get(pkg.locatorHash);
856 if (typeof packageLocation === `undefined`)
857 throw new Error(`Assertion failed: The package (${structUtils.prettyLocator(this.configuration, pkg)}) should have been registered`);
858 const internalDependencies = [];
859 for (const descriptor of pkg.dependencies.values()) {
860 const resolution = this.storedResolutions.get(descriptor.descriptorHash);
861 if (typeof resolution === `undefined`)
862 throw new Error(`Assertion failed: The resolution (${structUtils.prettyDescriptor(this.configuration, descriptor)}, from ${structUtils.prettyLocator(this.configuration, pkg)})should have been registered`);
863 const dependency = this.storedPackages.get(resolution);
864 if (typeof dependency === `undefined`)
865 throw new Error(`Assertion failed: The package (${resolution}, resolved from ${structUtils.prettyDescriptor(this.configuration, descriptor)}) should have been registered`);
866 const dependencyLinker = this.tryWorkspaceByLocator(dependency) === null
867 ? packageLinkers.get(resolution)
868 : null;
869 if (typeof dependencyLinker === `undefined`)
870 throw new Error(`Assertion failed: The package (${resolution}, resolved from ${structUtils.prettyDescriptor(this.configuration, descriptor)}) should have been registered`);
871 const isWorkspaceDependency = dependencyLinker === null;
872 if (dependencyLinker === packageLinker || isWorkspace || isWorkspaceDependency) {
873 if (packageLocations.get(dependency.locatorHash) !== null) {
874 internalDependencies.push([descriptor, dependency]);
875 }
876 }
877 else if (packageLocation !== null) {
878 const externalEntry = miscUtils.getArrayWithDefault(externalDependents, resolution);
879 externalEntry.push(packageLocation);
880 }
881 }
882 if (packageLocation !== null) {
883 await installer.attachInternalDependencies(pkg, internalDependencies);
884 }
885 };
886 if (isWorkspace) {
887 for (const [packageLinker, installer] of installers) {
888 await linkPackage(packageLinker, installer);
889 }
890 }
891 else {
892 const packageLinker = packageLinkers.get(pkg.locatorHash);
893 if (!packageLinker)
894 throw new Error(`Assertion failed: The linker should have been found`);
895 const installer = installers.get(packageLinker);
896 if (!installer)
897 throw new Error(`Assertion failed: The installer should have been registered`);
898 await linkPackage(packageLinker, installer);
899 }
900 }
901 for (const [locatorHash, dependentPaths] of externalDependents) {
902 const pkg = this.storedPackages.get(locatorHash);
903 if (!pkg)
904 throw new Error(`Assertion failed: The package should have been registered`);
905 const packageLinker = packageLinkers.get(pkg.locatorHash);
906 if (!packageLinker)
907 throw new Error(`Assertion failed: The linker should have been found`);
908 const installer = installers.get(packageLinker);
909 if (!installer)
910 throw new Error(`Assertion failed: The installer should have been registered`);
911 await installer.attachExternalDependents(pkg, dependentPaths);
912 }
913 // Step 3: Inform our linkers that they should have all the info needed
914 for (const installer of installers.values()) {
915 const installStatuses = await installer.finalizeInstall();
916 if (installStatuses) {
917 for (const installStatus of installStatuses) {
918 if (installStatus.buildDirective) {
919 packageBuildDirectives.set(installStatus.locatorHash, {
920 directives: installStatus.buildDirective,
921 buildLocations: installStatus.buildLocations,
922 });
923 }
924 }
925 }
926 }
927 // Step 4: Build the packages in multiple steps
928 const readyPackages = new Set(this.storedPackages.keys());
929 const buildablePackages = new Set(packageBuildDirectives.keys());
930 for (const locatorHash of buildablePackages)
931 readyPackages.delete(locatorHash);
932 const globalHashGenerator = crypto_1.createHash(`sha512`);
933 globalHashGenerator.update(process.versions.node);
934 this.configuration.triggerHook(hooks => {
935 return hooks.globalHashGeneration;
936 }, this, (data) => {
937 globalHashGenerator.update(`\0`);
938 globalHashGenerator.update(data);
939 });
940 const globalHash = globalHashGenerator.digest(`hex`);
941 const packageHashMap = new Map();
942 // We'll use this function is order to compute a hash for each package
943 // that exposes a build directive. If the hash changes compared to the
944 // previous run, the package is rebuilt. This has the advantage of making
945 // the rebuilds much more predictable than before, and to give us the tools
946 // later to improve this further by explaining *why* a rebuild happened.
947 const getBaseHash = (locator) => {
948 let hash = packageHashMap.get(locator.locatorHash);
949 if (typeof hash !== `undefined`)
950 return hash;
951 const pkg = this.storedPackages.get(locator.locatorHash);
952 if (typeof pkg === `undefined`)
953 throw new Error(`Assertion failed: The package should have been registered`);
954 const builder = crypto_1.createHash(`sha512`);
955 builder.update(locator.locatorHash);
956 // To avoid the case where one dependency depends on itself somehow
957 packageHashMap.set(locator.locatorHash, `<recursive>`);
958 for (const descriptor of pkg.dependencies.values()) {
959 const resolution = this.storedResolutions.get(descriptor.descriptorHash);
960 if (typeof resolution === `undefined`)
961 throw new Error(`Assertion failed: The resolution (${structUtils.prettyDescriptor(this.configuration, descriptor)}) should have been registered`);
962 const dependency = this.storedPackages.get(resolution);
963 if (typeof dependency === `undefined`)
964 throw new Error(`Assertion failed: The package should have been registered`);
965 builder.update(getBaseHash(dependency));
966 }
967 hash = builder.digest(`hex`);
968 packageHashMap.set(locator.locatorHash, hash);
969 return hash;
970 };
971 const getBuildHash = (locator, buildLocations) => {
972 const builder = crypto_1.createHash(`sha512`);
973 builder.update(globalHash);
974 builder.update(getBaseHash(locator));
975 for (const location of buildLocations)
976 builder.update(location);
977 return builder.digest(`hex`);
978 };
979 const bstatePath = this.configuration.get(`bstatePath`);
980 const bstate = fslib_1.xfs.existsSync(bstatePath)
981 ? parsers_1.parseSyml(await fslib_1.xfs.readFilePromise(bstatePath, `utf8`))
982 : {};
983 // We reconstruct the build state from an empty object because we want to
984 // remove the state from packages that got removed
985 const nextBState = new Map();
986 while (buildablePackages.size > 0) {
987 const savedSize = buildablePackages.size;
988 const buildPromises = [];
989 for (const locatorHash of buildablePackages) {
990 const pkg = this.storedPackages.get(locatorHash);
991 if (!pkg)
992 throw new Error(`Assertion failed: The package should have been registered`);
993 let isBuildable = true;
994 for (const dependency of pkg.dependencies.values()) {
995 const resolution = this.storedResolutions.get(dependency.descriptorHash);
996 if (!resolution)
997 throw new Error(`Assertion failed: The resolution (${structUtils.prettyDescriptor(this.configuration, dependency)}) should have been registered`);
998 if (buildablePackages.has(resolution)) {
999 isBuildable = false;
1000 break;
1001 }
1002 }
1003 // Wait until all dependencies of the current package have been built
1004 // before trying to build it (since it might need them to build itself)
1005 if (!isBuildable)
1006 continue;
1007 buildablePackages.delete(locatorHash);
1008 const buildInfo = packageBuildDirectives.get(pkg.locatorHash);
1009 if (!buildInfo)
1010 throw new Error(`Assertion failed: The build directive should have been registered`);
1011 const buildHash = getBuildHash(pkg, buildInfo.buildLocations);
1012 // No need to rebuild the package if its hash didn't change
1013 if (Object.prototype.hasOwnProperty.call(bstate, pkg.locatorHash) && bstate[pkg.locatorHash] === buildHash) {
1014 nextBState.set(pkg.locatorHash, buildHash);
1015 continue;
1016 }
1017 if (Object.prototype.hasOwnProperty.call(bstate, pkg.locatorHash))
1018 report.reportInfo(MessageName_1.MessageName.MUST_REBUILD, `${structUtils.prettyLocator(this.configuration, pkg)} must be rebuilt because its dependency tree changed`);
1019 else
1020 report.reportInfo(MessageName_1.MessageName.MUST_BUILD, `${structUtils.prettyLocator(this.configuration, pkg)} must be built because it never did before or the last one failed`);
1021 for (const location of buildInfo.buildLocations) {
1022 if (!fslib_1.ppath.isAbsolute(location))
1023 throw new Error(`Assertion failed: Expected the build location to be absolute (not ${location})`);
1024 buildPromises.push((async () => {
1025 for (const [buildType, scriptName] of buildInfo.directives) {
1026 let header = `# This file contains the result of Yarn building a package (${structUtils.stringifyLocator(pkg)})\n`;
1027 switch (buildType) {
1028 case Installer_1.BuildType.SCRIPT:
1029 {
1030 header += `# Script name: ${scriptName}\n`;
1031 }
1032 break;
1033 case Installer_1.BuildType.SHELLCODE:
1034 {
1035 header += `# Script code: ${scriptName}\n`;
1036 }
1037 break;
1038 }
1039 const stdin = null;
1040 await fslib_1.xfs.mktempPromise(async (logDir) => {
1041 const logFile = fslib_1.ppath.join(logDir, `build.log`);
1042 const { stdout, stderr } = this.configuration.getSubprocessStreams(logFile, {
1043 header,
1044 prefix: structUtils.prettyLocator(this.configuration, pkg),
1045 report,
1046 });
1047 let exitCode;
1048 try {
1049 switch (buildType) {
1050 case Installer_1.BuildType.SCRIPT:
1051 {
1052 exitCode = await scriptUtils.executePackageScript(pkg, scriptName, [], { cwd: location, project: this, stdin, stdout, stderr });
1053 }
1054 break;
1055 case Installer_1.BuildType.SHELLCODE:
1056 {
1057 exitCode = await scriptUtils.executePackageShellcode(pkg, scriptName, [], { cwd: location, project: this, stdin, stdout, stderr });
1058 }
1059 break;
1060 }
1061 }
1062 catch (error) {
1063 stderr.write(error.stack);
1064 exitCode = 1;
1065 }
1066 stdout.end();
1067 stderr.end();
1068 if (exitCode === 0) {
1069 nextBState.set(pkg.locatorHash, buildHash);
1070 return true;
1071 }
1072 fslib_1.xfs.detachTemp(logDir);
1073 const buildMessage = `${structUtils.prettyLocator(this.configuration, pkg)} couldn't be built successfully (exit code ${formatUtils.pretty(this.configuration, exitCode, formatUtils.Type.NUMBER)}, logs can be found here: ${formatUtils.pretty(this.configuration, logFile, formatUtils.Type.PATH)})`;
1074 report.reportInfo(MessageName_1.MessageName.BUILD_FAILED, buildMessage);
1075 if (this.optionalBuilds.has(pkg.locatorHash)) {
1076 nextBState.set(pkg.locatorHash, buildHash);
1077 return true;
1078 }
1079 report.reportError(MessageName_1.MessageName.BUILD_FAILED, buildMessage);
1080 return false;
1081 });
1082 }
1083 })());
1084 }
1085 }
1086 await Promise.all(buildPromises);
1087 // If we reach this code, it means that we have circular dependencies
1088 // somewhere. Worst, it means that the circular dependencies both have
1089 // build scripts, making them unsatisfiable.
1090 if (savedSize === buildablePackages.size) {
1091 const prettyLocators = Array.from(buildablePackages).map(locatorHash => {
1092 const pkg = this.storedPackages.get(locatorHash);
1093 if (!pkg)
1094 throw new Error(`Assertion failed: The package should have been registered`);
1095 return structUtils.prettyLocator(this.configuration, pkg);
1096 }).join(`, `);
1097 report.reportError(MessageName_1.MessageName.CYCLIC_DEPENDENCIES, `Some packages have circular dependencies that make their build order unsatisfiable - as a result they won't be built (affected packages are: ${prettyLocators})`);
1098 break;
1099 }
1100 }
1101 // We can now generate the bstate file, which will allow us to "remember"
1102 // what's the dependency tree subset that we used to build a specific
1103 // package (and avoid rebuilding it later if it didn't change).
1104 if (nextBState.size > 0) {
1105 const bstatePath = this.configuration.get(`bstatePath`);
1106 const bstateFile = Project.generateBuildStateFile(nextBState, this.storedPackages);
1107 await fslib_1.xfs.mkdirPromise(fslib_1.ppath.dirname(bstatePath), { recursive: true });
1108 await fslib_1.xfs.changeFilePromise(bstatePath, bstateFile, {
1109 automaticNewlines: true,
1110 });
1111 }
1112 else {
1113 await fslib_1.xfs.removePromise(bstatePath);
1114 }
1115 }
1116 async install(opts) {
1117 var _a, _b;
1118 const nodeLinker = this.configuration.get(`nodeLinker`);
1119 (_a = Configuration_1.Configuration.telemetry) === null || _a === void 0 ? void 0 : _a.reportInstall(nodeLinker);
1120 const validationWarnings = [];
1121 const validationErrors = [];
1122 await this.configuration.triggerHook(hooks => {
1123 return hooks.validateProject;
1124 }, this, {
1125 reportWarning: (name, text) => validationWarnings.push({ name, text }),
1126 reportError: (name, text) => validationErrors.push({ name, text }),
1127 });
1128 const problemCount = validationWarnings.length + validationErrors.length;
1129 if (problemCount > 0) {
1130 await opts.report.startTimerPromise(`Validation step`, async () => {
1131 await this.validateEverything({ validationWarnings, validationErrors, report: opts.report });
1132 });
1133 }
1134 for (const extensionsByIdent of this.configuration.packageExtensions.values())
1135 for (const [, extensionsByRange] of extensionsByIdent)
1136 for (const extension of extensionsByRange)
1137 extension.active = false;
1138 await opts.report.startTimerPromise(`Resolution step`, async () => {
1139 const lockfilePath = fslib_1.ppath.join(this.cwd, this.configuration.get(`lockfileFilename`));
1140 // If we operate with a frozen lockfile, we take a snapshot of it to later make sure it didn't change
1141 let initialLockfile = null;
1142 if (opts.immutable) {
1143 try {
1144 initialLockfile = await fslib_1.xfs.readFilePromise(lockfilePath, `utf8`);
1145 }
1146 catch (error) {
1147 if (error.code === `ENOENT`) {
1148 throw new Report_1.ReportError(MessageName_1.MessageName.FROZEN_LOCKFILE_EXCEPTION, `The lockfile would have been created by this install, which is explicitly forbidden.`);
1149 }
1150 else {
1151 throw error;
1152 }
1153 }
1154 }
1155 await this.resolveEverything(opts);
1156 if (initialLockfile !== null) {
1157 const newLockfile = fslib_1.normalizeLineEndings(initialLockfile, this.generateLockfile());
1158 if (newLockfile !== initialLockfile) {
1159 const diff = diff_1.structuredPatch(lockfilePath, lockfilePath, initialLockfile, newLockfile);
1160 opts.report.reportSeparator();
1161 for (const hunk of diff.hunks) {
1162 opts.report.reportInfo(null, `@@ -${hunk.oldStart},${hunk.oldLines} +${hunk.newStart},${hunk.newLines} @@`);
1163 for (const line of hunk.lines) {
1164 if (line.startsWith(`+`)) {
1165 opts.report.reportError(MessageName_1.MessageName.FROZEN_LOCKFILE_EXCEPTION, formatUtils.pretty(this.configuration, line, formatUtils.Type.ADDED));
1166 }
1167 else if (line.startsWith(`-`)) {
1168 opts.report.reportError(MessageName_1.MessageName.FROZEN_LOCKFILE_EXCEPTION, formatUtils.pretty(this.configuration, line, formatUtils.Type.REMOVED));
1169 }
1170 else {
1171 opts.report.reportInfo(null, formatUtils.pretty(this.configuration, line, `grey`));
1172 }
1173 }
1174 }
1175 opts.report.reportSeparator();
1176 throw new Report_1.ReportError(MessageName_1.MessageName.FROZEN_LOCKFILE_EXCEPTION, `The lockfile would have been modified by this install, which is explicitly forbidden.`);
1177 }
1178 }
1179 });
1180 for (const extensionsByIdent of this.configuration.packageExtensions.values())
1181 for (const [, extensionsByRange] of extensionsByIdent)
1182 for (const extension of extensionsByRange)
1183 if (extension.active)
1184 (_b = Configuration_1.Configuration.telemetry) === null || _b === void 0 ? void 0 : _b.reportPackageExtension(extension.description);
1185 await opts.report.startTimerPromise(`Fetch step`, async () => {
1186 await this.fetchEverything(opts);
1187 if (typeof opts.persistProject === `undefined` || opts.persistProject) {
1188 await this.cacheCleanup(opts);
1189 }
1190 });
1191 if (typeof opts.persistProject === `undefined` || opts.persistProject)
1192 await this.persist();
1193 await opts.report.startTimerPromise(`Link step`, async () => {
1194 const immutablePatterns = opts.immutable
1195 ? [...new Set(this.configuration.get(`immutablePatterns`))].sort()
1196 : [];
1197 const before = await Promise.all(immutablePatterns.map(async (pattern) => {
1198 return hashUtils.checksumPattern(pattern, { cwd: this.cwd });
1199 }));
1200 await this.linkEverything(opts);
1201 const after = await Promise.all(immutablePatterns.map(async (pattern) => {
1202 return hashUtils.checksumPattern(pattern, { cwd: this.cwd });
1203 }));
1204 for (let t = 0; t < immutablePatterns.length; ++t) {
1205 if (before[t] !== after[t]) {
1206 opts.report.reportError(MessageName_1.MessageName.FROZEN_ARTIFACT_EXCEPTION, `The checksum for ${immutablePatterns[t]} has been modified by this install, which is explicitly forbidden.`);
1207 }
1208 }
1209 });
1210 await this.configuration.triggerHook(hooks => {
1211 return hooks.afterAllInstalled;
1212 }, this, opts);
1213 }
1214 generateLockfile() {
1215 // We generate the data structure that will represent our lockfile. To do this, we create a
1216 // reverse lookup table, where the key will be the resolved locator and the value will be a set
1217 // of all the descriptors that resolved to it. Then we use it to construct an optimized version
1218 // if the final object.
1219 const reverseLookup = new Map();
1220 for (const [descriptorHash, locatorHash] of this.storedResolutions.entries()) {
1221 let descriptorHashes = reverseLookup.get(locatorHash);
1222 if (!descriptorHashes)
1223 reverseLookup.set(locatorHash, descriptorHashes = new Set());
1224 descriptorHashes.add(descriptorHash);
1225 }
1226 const optimizedLockfile = {};
1227 optimizedLockfile.__metadata = {
1228 version: LOCKFILE_VERSION,
1229 };
1230 for (const [locatorHash, descriptorHashes] of reverseLookup.entries()) {
1231 const pkg = this.originalPackages.get(locatorHash);
1232 // A resolution that isn't in `originalPackages` is a virtual packages.
1233 // Since virtual packages can be derived from the information stored in
1234 // the rest of the lockfile we don't want to bother storing them.
1235 if (!pkg)
1236 continue;
1237 const descriptors = [];
1238 for (const descriptorHash of descriptorHashes) {
1239 const descriptor = this.storedDescriptors.get(descriptorHash);
1240 if (!descriptor)
1241 throw new Error(`Assertion failed: The descriptor should have been registered`);
1242 descriptors.push(descriptor);
1243 }
1244 const key = descriptors.map(descriptor => {
1245 return structUtils.stringifyDescriptor(descriptor);
1246 }).sort().join(`, `);
1247 const manifest = new Manifest_1.Manifest();
1248 manifest.version = pkg.linkType === types_1.LinkType.HARD
1249 ? pkg.version
1250 : `0.0.0-use.local`;
1251 manifest.languageName = pkg.languageName;
1252 manifest.dependencies = new Map(pkg.dependencies);
1253 manifest.peerDependencies = new Map(pkg.peerDependencies);
1254 manifest.dependenciesMeta = new Map(pkg.dependenciesMeta);
1255 manifest.peerDependenciesMeta = new Map(pkg.peerDependenciesMeta);
1256 manifest.bin = new Map(pkg.bin);
1257 let entryChecksum;
1258 const checksum = this.storedChecksums.get(pkg.locatorHash);
1259 if (typeof checksum !== `undefined`) {
1260 const cacheKeyIndex = checksum.indexOf(`/`);
1261 if (cacheKeyIndex === -1)
1262 throw new Error(`Assertion failed: Expecte the checksum to reference its cache key`);
1263 const cacheKey = checksum.slice(0, cacheKeyIndex);
1264 const hash = checksum.slice(cacheKeyIndex + 1);
1265 if (typeof optimizedLockfile.__metadata.cacheKey === `undefined`)
1266 optimizedLockfile.__metadata.cacheKey = cacheKey;
1267 if (cacheKey === optimizedLockfile.__metadata.cacheKey) {
1268 entryChecksum = hash;
1269 }
1270 else {
1271 entryChecksum = checksum;
1272 }
1273 }
1274 optimizedLockfile[key] = {
1275 ...manifest.exportTo({}, {
1276 compatibilityMode: false,
1277 }),
1278 linkType: pkg.linkType.toLowerCase(),
1279 resolution: structUtils.stringifyLocator(pkg),
1280 checksum: entryChecksum,
1281 };
1282 }
1283 const header = `${[
1284 `# This file is generated by running "yarn install" inside your project.\n`,
1285 `# Manual changes might be lost - proceed with caution!\n`,
1286 ].join(``)}\n`;
1287 return header + parsers_1.stringifySyml(optimizedLockfile);
1288 }
1289 async persistLockfile() {
1290 const lockfilePath = fslib_1.ppath.join(this.cwd, this.configuration.get(`lockfileFilename`));
1291 const lockfileContent = this.generateLockfile();
1292 await fslib_1.xfs.changeFilePromise(lockfilePath, lockfileContent, {
1293 automaticNewlines: true,
1294 });
1295 }
1296 async persistInstallStateFile() {
1297 const { accessibleLocators, optionalBuilds, storedDescriptors, storedResolutions, storedPackages, lockFileChecksum } = this;
1298 const installState = { accessibleLocators, optionalBuilds, storedDescriptors, storedResolutions, storedPackages, lockFileChecksum };
1299 const serializedState = await gzip(v8_1.default.serialize(installState));
1300 const installStatePath = this.configuration.get(`installStatePath`);
1301 await fslib_1.xfs.mkdirPromise(fslib_1.ppath.dirname(installStatePath), { recursive: true });
1302 await fslib_1.xfs.writeFilePromise(installStatePath, serializedState);
1303 }
1304 async restoreInstallState() {
1305 const installStatePath = this.configuration.get(`installStatePath`);
1306 if (!fslib_1.xfs.existsSync(installStatePath)) {
1307 await this.applyLightResolution();
1308 return;
1309 }
1310 const serializedState = await fslib_1.xfs.readFilePromise(installStatePath);
1311 const installState = v8_1.default.deserialize(await gunzip(serializedState));
1312 if (installState.lockFileChecksum !== this.lockFileChecksum) {
1313 await this.applyLightResolution();
1314 return;
1315 }
1316 Object.assign(this, installState);
1317 this.refreshWorkspaceDependencies();
1318 }
1319 async applyLightResolution() {
1320 await this.resolveEverything({
1321 lockfileOnly: true,
1322 report: new ThrowReport_1.ThrowReport(),
1323 });
1324 await this.persistInstallStateFile();
1325 }
1326 async persist() {
1327 await this.persistLockfile();
1328 await this.persistInstallStateFile();
1329 for (const workspace of this.workspacesByCwd.values()) {
1330 await workspace.persistManifest();
1331 }
1332 }
1333 async cacheCleanup({ cache, report }) {
1334 const PRESERVED_FILES = new Set([
1335 `.gitignore`,
1336 ]);
1337 if (!fslib_1.xfs.existsSync(cache.cwd))
1338 return;
1339 if (!folderUtils_1.isFolderInside(cache.cwd, this.cwd))
1340 return;
1341 for (const entry of await fslib_1.xfs.readdirPromise(cache.cwd)) {
1342 if (PRESERVED_FILES.has(entry))
1343 continue;
1344 const entryPath = fslib_1.ppath.resolve(cache.cwd, entry);
1345 if (cache.markedFiles.has(entryPath))
1346 continue;
1347 if (cache.immutable) {
1348 report.reportError(MessageName_1.MessageName.IMMUTABLE_CACHE, `${formatUtils.pretty(this.configuration, fslib_1.ppath.basename(entryPath), `magenta`)} appears to be unused and would marked for deletion, but the cache is immutable`);
1349 }
1350 else {
1351 report.reportInfo(MessageName_1.MessageName.UNUSED_CACHE_ENTRY, `${formatUtils.pretty(this.configuration, fslib_1.ppath.basename(entryPath), `magenta`)} appears to be unused - removing`);
1352 await fslib_1.xfs.removePromise(entryPath);
1353 }
1354 }
1355 cache.markedFiles.clear();
1356 }
1357}
1358exports.Project = Project;
1359/**
1360 * This function is worth some documentation. It takes a set of packages,
1361 * traverses them all, and generates virtual packages for each package that
1362 * lists peer dependencies.
1363 *
1364 * We also take advantage of the tree traversal to detect which packages are
1365 * actually used and which have disappeared, and to know which packages truly
1366 * have an optional build (since a package may be optional in one part of the
1367 * tree but not another).
1368 */
1369function applyVirtualResolutionMutations({ project, allDescriptors, allResolutions, allPackages, accessibleLocators = new Set(), optionalBuilds = new Set(), volatileDescriptors = new Set(), report, tolerateMissingPackages = false, }) {
1370 const virtualStack = new Map();
1371 const resolutionStack = [];
1372 // We'll be keeping track of all virtual descriptors; once they have all
1373 // been generated we'll check whether they can be consolidated into one.
1374 const allVirtualInstances = new Map();
1375 const allVirtualDependents = new Map();
1376 // We must keep a copy of the workspaces original dependencies, because they
1377 // may be overriden during the virtual package resolution - cf Dragon Test #5
1378 const originalWorkspaceDefinitions = new Map(project.workspaces.map(workspace => {
1379 const locatorHash = workspace.anchoredLocator.locatorHash;
1380 const pkg = allPackages.get(locatorHash);
1381 if (typeof pkg === `undefined`) {
1382 if (tolerateMissingPackages) {
1383 return [locatorHash, null];
1384 }
1385 else {
1386 throw new Error(`Assertion failed: The workspace should have an associated package`);
1387 }
1388 }
1389 return [locatorHash, structUtils.copyPackage(pkg)];
1390 }));
1391 const reportStackOverflow = () => {
1392 const logDir = fslib_1.xfs.mktempSync();
1393 const logFile = fslib_1.ppath.join(logDir, `stacktrace.log`);
1394 const maxSize = String(resolutionStack.length + 1).length;
1395 const content = resolutionStack.map((locator, index) => {
1396 const prefix = `${index + 1}.`.padStart(maxSize, ` `);
1397 return `${prefix} ${structUtils.stringifyLocator(locator)}\n`;
1398 }).join(``);
1399 fslib_1.xfs.writeFileSync(logFile, content);
1400 throw new Report_1.ReportError(MessageName_1.MessageName.STACK_OVERFLOW_RESOLUTION, `Encountered a stack overflow when resolving peer dependencies; cf ${logFile}`);
1401 };
1402 const getPackageFromDescriptor = (descriptor) => {
1403 const resolution = allResolutions.get(descriptor.descriptorHash);
1404 if (typeof resolution === `undefined`)
1405 throw new Error(`Assertion failed: The resolution should have been registered`);
1406 const pkg = allPackages.get(resolution);
1407 if (!pkg)
1408 throw new Error(`Assertion failed: The package could not be found`);
1409 return pkg;
1410 };
1411 const resolvePeerDependencies = (parentLocator, first, optional) => {
1412 if (resolutionStack.length > 1000)
1413 reportStackOverflow();
1414 resolutionStack.push(parentLocator);
1415 const result = resolvePeerDependenciesImpl(parentLocator, first, optional);
1416 resolutionStack.pop();
1417 return result;
1418 };
1419 const resolvePeerDependenciesImpl = (parentLocator, first, optional) => {
1420 if (accessibleLocators.has(parentLocator.locatorHash))
1421 return;
1422 accessibleLocators.add(parentLocator.locatorHash);
1423 if (!optional)
1424 optionalBuilds.delete(parentLocator.locatorHash);
1425 const parentPackage = allPackages.get(parentLocator.locatorHash);
1426 if (!parentPackage) {
1427 if (tolerateMissingPackages) {
1428 return;
1429 }
1430 else {
1431 throw new Error(`Assertion failed: The package (${structUtils.prettyLocator(project.configuration, parentLocator)}) should have been registered`);
1432 }
1433 }
1434 const newVirtualInstances = [];
1435 const firstPass = [];
1436 const secondPass = [];
1437 const thirdPass = [];
1438 const fourthPass = [];
1439 // During this first pass we virtualize the descriptors. This allows us
1440 // to reference them from their sibling without being order-dependent,
1441 // which is required to solve cases where packages with peer dependencies
1442 // have peer dependencies themselves.
1443 for (const descriptor of Array.from(parentPackage.dependencies.values())) {
1444 // We shouldn't virtualize the package if it was obtained through a peer
1445 // dependency (which can't be the case for workspaces when resolved
1446 // through their top-level)
1447 if (parentPackage.peerDependencies.has(descriptor.identHash) && !first)
1448 continue;
1449 // We had some issues where virtual packages were incorrectly set inside
1450 // workspaces, causing leaks. Check the Dragon Test #5 for more details.
1451 if (structUtils.isVirtualDescriptor(descriptor))
1452 throw new Error(`Assertion failed: Virtual packages shouldn't be encountered when virtualizing a branch`);
1453 // Mark this package as being used (won't be removed from the lockfile)
1454 volatileDescriptors.delete(descriptor.descriptorHash);
1455 // Detect whether this package is being required
1456 let isOptional = optional;
1457 if (!isOptional) {
1458 const dependencyMetaSet = parentPackage.dependenciesMeta.get(structUtils.stringifyIdent(descriptor));
1459 if (typeof dependencyMetaSet !== `undefined`) {
1460 const dependencyMeta = dependencyMetaSet.get(null);
1461 if (typeof dependencyMeta !== `undefined` && dependencyMeta.optional) {
1462 isOptional = true;
1463 }
1464 }
1465 }
1466 const resolution = allResolutions.get(descriptor.descriptorHash);
1467 if (!resolution) {
1468 // Note that we can't use `getPackageFromDescriptor` (defined below,
1469 // because when doing the initial tree building right after loading the
1470 // project it's possible that we get some entries that haven't been
1471 // registered into the lockfile yet - for example when the user has
1472 // manually changed the package.json dependencies)
1473 if (tolerateMissingPackages) {
1474 continue;
1475 }
1476 else {
1477 throw new Error(`Assertion failed: The resolution (${structUtils.prettyDescriptor(project.configuration, descriptor)}) should have been registered`);
1478 }
1479 }
1480 const pkg = originalWorkspaceDefinitions.get(resolution) || allPackages.get(resolution);
1481 if (!pkg)
1482 throw new Error(`Assertion failed: The package (${resolution}, resolved from ${structUtils.prettyDescriptor(project.configuration, descriptor)}) should have been registered`);
1483 if (pkg.peerDependencies.size === 0) {
1484 resolvePeerDependencies(pkg, false, isOptional);
1485 continue;
1486 }
1487 // The stack overflow is checked against two level because a workspace
1488 // may have a dev dependency on another workspace that lists the first
1489 // one as a regular dependency. In this case the loop will break so we
1490 // don't need to throw an exception.
1491 const stackDepth = virtualStack.get(pkg.locatorHash);
1492 if (typeof stackDepth === `number` && stackDepth >= 2)
1493 reportStackOverflow();
1494 let virtualizedDescriptor;
1495 let virtualizedPackage;
1496 const missingPeerDependencies = new Set();
1497 firstPass.push(() => {
1498 virtualizedDescriptor = structUtils.virtualizeDescriptor(descriptor, parentLocator.locatorHash);
1499 virtualizedPackage = structUtils.virtualizePackage(pkg, parentLocator.locatorHash);
1500 parentPackage.dependencies.delete(descriptor.identHash);
1501 parentPackage.dependencies.set(virtualizedDescriptor.identHash, virtualizedDescriptor);
1502 allResolutions.set(virtualizedDescriptor.descriptorHash, virtualizedPackage.locatorHash);
1503 allDescriptors.set(virtualizedDescriptor.descriptorHash, virtualizedDescriptor);
1504 allPackages.set(virtualizedPackage.locatorHash, virtualizedPackage);
1505 // Keep track of all new virtual packages since we'll want to dedupe them
1506 newVirtualInstances.push([pkg, virtualizedDescriptor, virtualizedPackage]);
1507 });
1508 secondPass.push(() => {
1509 for (const peerRequest of virtualizedPackage.peerDependencies.values()) {
1510 let peerDescriptor = parentPackage.dependencies.get(peerRequest.identHash);
1511 if (!peerDescriptor && structUtils.areIdentsEqual(parentLocator, peerRequest)) {
1512 peerDescriptor = structUtils.convertLocatorToDescriptor(parentLocator);
1513 allDescriptors.set(peerDescriptor.descriptorHash, peerDescriptor);
1514 allResolutions.set(peerDescriptor.descriptorHash, parentLocator.locatorHash);
1515 volatileDescriptors.delete(peerDescriptor.descriptorHash);
1516 }
1517 if (!peerDescriptor && virtualizedPackage.dependencies.has(peerRequest.identHash)) {
1518 virtualizedPackage.peerDependencies.delete(peerRequest.identHash);
1519 continue;
1520 }
1521 if (!peerDescriptor) {
1522 if (!parentPackage.peerDependencies.has(peerRequest.identHash)) {
1523 const peerDependencyMeta = virtualizedPackage.peerDependenciesMeta.get(structUtils.stringifyIdent(peerRequest));
1524 if (report !== null && (!peerDependencyMeta || !peerDependencyMeta.optional)) {
1525 report.reportWarning(MessageName_1.MessageName.MISSING_PEER_DEPENDENCY, `${structUtils.prettyLocator(project.configuration, parentLocator)} doesn't provide ${structUtils.prettyDescriptor(project.configuration, peerRequest)} requested by ${structUtils.prettyLocator(project.configuration, pkg)}`);
1526 }
1527 }
1528 peerDescriptor = structUtils.makeDescriptor(peerRequest, `missing:`);
1529 }
1530 virtualizedPackage.dependencies.set(peerDescriptor.identHash, peerDescriptor);
1531 // Need to track when a virtual descriptor is set as a dependency in case
1532 // the descriptor will be consolidated.
1533 if (structUtils.isVirtualDescriptor(peerDescriptor)) {
1534 const dependents = miscUtils.getSetWithDefault(allVirtualDependents, peerDescriptor.descriptorHash);
1535 dependents.add(virtualizedPackage.locatorHash);
1536 }
1537 if (peerDescriptor.range === `missing:`) {
1538 missingPeerDependencies.add(peerDescriptor.identHash);
1539 }
1540 else if (report !== null) {
1541 // When the parent provides the peer dependency request it must be checked to ensure
1542 // it is a compatible version.
1543 const peerPackage = getPackageFromDescriptor(peerDescriptor);
1544 if (!semverUtils.satisfiesWithPrereleases(peerPackage.version, peerRequest.range)) {
1545 report.reportWarning(MessageName_1.MessageName.INCOMPATIBLE_PEER_DEPENDENCY, `${structUtils.prettyLocator(project.configuration, parentLocator)} provides ${structUtils.prettyLocator(project.configuration, peerPackage)} with version ${peerPackage.version} which doesn't satisfy ${structUtils.prettyRange(project.configuration, peerRequest.range)} requested by ${structUtils.prettyLocator(project.configuration, pkg)}`);
1546 }
1547 }
1548 }
1549 // Since we've had to add new dependencies we need to sort them all over again
1550 virtualizedPackage.dependencies = new Map(miscUtils.sortMap(virtualizedPackage.dependencies, ([identHash, descriptor]) => {
1551 return structUtils.stringifyIdent(descriptor);
1552 }));
1553 });
1554 thirdPass.push(() => {
1555 if (!allPackages.has(virtualizedPackage.locatorHash))
1556 return;
1557 const current = virtualStack.get(pkg.locatorHash);
1558 const next = typeof current !== `undefined` ? current + 1 : 1;
1559 virtualStack.set(pkg.locatorHash, next);
1560 resolvePeerDependencies(virtualizedPackage, false, isOptional);
1561 virtualStack.set(pkg.locatorHash, next - 1);
1562 });
1563 fourthPass.push(() => {
1564 if (!allPackages.has(virtualizedPackage.locatorHash))
1565 return;
1566 for (const missingPeerDependency of missingPeerDependencies) {
1567 virtualizedPackage.dependencies.delete(missingPeerDependency);
1568 }
1569 });
1570 }
1571 for (const fn of [...firstPass, ...secondPass])
1572 fn();
1573 let stable;
1574 do {
1575 stable = true;
1576 for (const [physicalLocator, virtualDescriptor, virtualPackage] of newVirtualInstances) {
1577 if (!allPackages.has(virtualPackage.locatorHash))
1578 continue;
1579 const otherVirtualInstances = miscUtils.getMapWithDefault(allVirtualInstances, physicalLocator.locatorHash);
1580 // We take all the dependencies from the new virtual instance and
1581 // generate a hash from it. By checking if this hash is already
1582 // registered, we know whether we can trim the new version.
1583 const dependencyHash = hashUtils.makeHash(...[...virtualPackage.dependencies.values()].map(descriptor => {
1584 const resolution = descriptor.range !== `missing:`
1585 ? allResolutions.get(descriptor.descriptorHash)
1586 : `missing:`;
1587 if (typeof resolution === `undefined`)
1588 throw new Error(`Assertion failed: Expected the resolution for ${structUtils.prettyDescriptor(project.configuration, descriptor)} to have been registered`);
1589 return resolution;
1590 }),
1591 // We use the identHash to disambiguate between virtual descriptors
1592 // with different base idents being resolved to the same virtual package.
1593 // Note: We don't use the descriptorHash because the whole point of duplicate
1594 // virtual descriptors is that they have different `virtual:` ranges.
1595 // This causes the virtual descriptors with different base idents
1596 // to be preserved, while the virtual package they resolve to gets deduped.
1597 virtualDescriptor.identHash);
1598 const masterDescriptor = otherVirtualInstances.get(dependencyHash);
1599 if (typeof masterDescriptor === `undefined`) {
1600 otherVirtualInstances.set(dependencyHash, virtualDescriptor);
1601 continue;
1602 }
1603 // Since we're applying multiple pass, we might have already registered
1604 // ourselves as the "master" descriptor in the previous pass.
1605 if (masterDescriptor === virtualDescriptor)
1606 continue;
1607 stable = false;
1608 allPackages.delete(virtualPackage.locatorHash);
1609 allDescriptors.delete(virtualDescriptor.descriptorHash);
1610 allResolutions.delete(virtualDescriptor.descriptorHash);
1611 accessibleLocators.delete(virtualPackage.locatorHash);
1612 const dependents = allVirtualDependents.get(virtualDescriptor.descriptorHash) || [];
1613 const allDependents = [parentPackage.locatorHash, ...dependents];
1614 for (const dependent of allDependents) {
1615 const pkg = allPackages.get(dependent);
1616 if (typeof pkg === `undefined`)
1617 continue;
1618 pkg.dependencies.set(virtualDescriptor.identHash, masterDescriptor);
1619 }
1620 }
1621 } while (!stable);
1622 for (const fn of [...thirdPass, ...fourthPass]) {
1623 fn();
1624 }
1625 };
1626 for (const workspace of project.workspaces) {
1627 volatileDescriptors.delete(workspace.anchoredDescriptor.descriptorHash);
1628 resolvePeerDependencies(workspace.anchoredLocator, true, false);
1629 }
1630}