UNPKG

18.8 kBJavaScriptView Raw
1"use strict";
2
3Object.defineProperty(exports, "__esModule", {
4 value: true
5});
6exports._cacheRepoAssure = exports._REMOTE_REPO_URL = exports._LAST_UPDATED_FILE = exports._CACHE_REPO_GIT_DIR = exports._CACHE_REPO_EXPIRY = exports._CACHE_REPO_DIR = exports.TEST_FILE_NAME_RE = void 0;
7exports._ensureCacheRepo = ensureCacheRepo;
8exports.filterLibDefs = filterLibDefs;
9exports.getCacheLibDefs = getCacheLibDefs;
10exports.getLibDefs = getLibDefs;
11exports.parseRepoDirItem = parseRepoDirItem;
12exports.updateCacheRepo = updateCacheRepo;
13
14var _semver = _interopRequireDefault(require("semver"));
15
16var _git = require("./git.js");
17
18var _fileUtils = require("./fileUtils.js");
19
20var _node = require("./node.js");
21
22var _semver2 = require("./semver.js");
23
24var _flowVersion = require("./flowVersion.js");
25
26var _ValidationError = require("./ValidationError");
27
28function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
29
30const P = Promise;
31const TEST_FILE_NAME_RE = /^test_.*\.js$/;
32exports.TEST_FILE_NAME_RE = TEST_FILE_NAME_RE;
33
34const CACHE_DIR = _node.path.join(_node.os.homedir(), '.flow-typed');
35
36const CACHE_REPO_DIR = _node.path.join(CACHE_DIR, 'repo');
37
38exports._CACHE_REPO_DIR = CACHE_REPO_DIR;
39const REMOTE_REPO_URL = 'https://github.com/flowtype/flow-typed.git';
40exports._REMOTE_REPO_URL = REMOTE_REPO_URL;
41
42const LAST_UPDATED_FILE = _node.path.join(CACHE_DIR, 'lastUpdated');
43
44exports._LAST_UPDATED_FILE = LAST_UPDATED_FILE;
45
46async function cloneCacheRepo(verbose) {
47 await (0, _fileUtils.mkdirp)(CACHE_REPO_DIR);
48
49 try {
50 await (0, _git.cloneInto)(REMOTE_REPO_URL, CACHE_REPO_DIR);
51 } catch (e) {
52 writeVerbose(verbose, 'ERROR: Unable to clone the local cache repo.');
53 throw e;
54 }
55
56 await _node.fs.writeFile(LAST_UPDATED_FILE, String(Date.now()));
57}
58
59const CACHE_REPO_GIT_DIR = _node.path.join(CACHE_REPO_DIR, '.git');
60
61exports._CACHE_REPO_GIT_DIR = CACHE_REPO_GIT_DIR;
62
63async function rebaseCacheRepo(verbose) {
64 if ((await _node.fs.exists(CACHE_REPO_DIR)) && (await _node.fs.exists(CACHE_REPO_GIT_DIR))) {
65 try {
66 await (0, _git.rebaseRepoMainline)(CACHE_REPO_DIR);
67 } catch (e) {
68 writeVerbose(verbose, 'ERROR: Unable to rebase the local cache repo. ' + e.message);
69 return false;
70 }
71
72 await _node.fs.writeFile(LAST_UPDATED_FILE, String(Date.now()));
73 return true;
74 } else {
75 await cloneCacheRepo(verbose);
76 return true;
77 }
78}
79/**
80 * Utility wrapper for ensureCacheRepo with an update expiry of 0 hours.
81 */
82
83
84async function updateCacheRepo(verbose) {
85 return await ensureCacheRepo(verbose, 0);
86}
87/**
88 * Ensure that the CACHE_REPO_DIR exists and is recently rebased.
89 * (else: create/rebase it)
90 */
91
92
93const CACHE_REPO_EXPIRY = 1000 * 60; // 1 minute
94
95exports._CACHE_REPO_EXPIRY = CACHE_REPO_EXPIRY;
96const _cacheRepoAssure = {
97 lastAssured: 0,
98 pendingAssure: Promise.resolve()
99};
100exports._cacheRepoAssure = _cacheRepoAssure;
101
102async function ensureCacheRepo(verbose, cacheRepoExpiry = CACHE_REPO_EXPIRY) {
103 // Only re-run rebase checks if a check hasn't been run in the last 5 minutes
104 if (_cacheRepoAssure.lastAssured + 5 * 1000 * 60 >= Date.now()) {
105 return _cacheRepoAssure.pendingAssure;
106 }
107
108 _cacheRepoAssure.lastAssured = Date.now();
109 const prevAssure = _cacheRepoAssure.pendingAssure;
110 return _cacheRepoAssure.pendingAssure = prevAssure.then(() => async function () {
111 const repoDirExists = _node.fs.exists(CACHE_REPO_DIR);
112
113 const repoGitDirExists = _node.fs.exists(CACHE_REPO_GIT_DIR);
114
115 if (!(await repoDirExists) || !(await repoGitDirExists)) {
116 writeVerbose(verbose, '• flow-typed cache not found, fetching from GitHub...', false);
117 await cloneCacheRepo(verbose);
118 writeVerbose(verbose, 'done.');
119 } else {
120 let lastUpdated = 0;
121
122 if (await _node.fs.exists(LAST_UPDATED_FILE)) {
123 // If the LAST_UPDATED_FILE has anything other than just a number in
124 // it, just assume we need to update.
125 const lastUpdatedRaw = await _node.fs.readFile(LAST_UPDATED_FILE);
126 const lastUpdatedNum = parseInt(lastUpdatedRaw, 10);
127
128 if (String(lastUpdatedNum) === String(lastUpdatedRaw)) {
129 lastUpdated = lastUpdatedNum;
130 }
131 }
132
133 if (lastUpdated + cacheRepoExpiry < Date.now()) {
134 writeVerbose(verbose, '• rebasing flow-typed cache...', false);
135 const rebaseSuccessful = await rebaseCacheRepo(verbose);
136
137 if (rebaseSuccessful) {
138 writeVerbose(verbose, 'done.');
139 } else {
140 writeVerbose(verbose, "\nNOTE: Unable to rebase local cache! If you don't currently " + "have internet connectivity, no worries -- we'll update the " + 'local cache the next time you do.\n');
141 }
142 }
143 }
144 }());
145} // Exported for tests -- since we really want this part well-tested.
146
147
148async function addLibDefs(pkgDirPath, libDefs) {
149 const parsedDirItem = parseRepoDirItem(pkgDirPath);
150 (await parseLibDefsFromPkgDir(parsedDirItem, pkgDirPath)).forEach(libDef => libDefs.push(libDef));
151}
152/**
153 * Given a 'definitions/...' dir, return a list of LibDefs that it contains.
154 */
155
156
157async function getLibDefs(defsDir) {
158 const libDefs = [];
159 const defsDirItems = await _node.fs.readdir(defsDir);
160 await P.all(defsDirItems.map(async item => {
161 if ((0, _fileUtils.isExcludedFile)(item)) return;
162
163 const itemPath = _node.path.join(defsDir, item);
164
165 const itemStat = await _node.fs.stat(itemPath);
166
167 if (itemStat.isDirectory()) {
168 if (item.charAt(0) === '@') {
169 // directory is of the form '@<scope>', so go one level deeper
170 const scope = item;
171 const defsDirItems = await _node.fs.readdir(itemPath);
172 await P.all(defsDirItems.map(async item => {
173 if ((0, _fileUtils.isExcludedFile)(item)) return;
174
175 const itemPath = _node.path.join(defsDir, scope, item);
176
177 const itemStat = await _node.fs.stat(itemPath);
178
179 if (itemStat.isDirectory()) {
180 // itemPath is a lib dir
181 await addLibDefs(itemPath, libDefs);
182 } else {
183 const error = `Expected only directories in the 'definitions/npm/@<scope>' directory! Please remove or change ${itemPath}`;
184 throw new _ValidationError.ValidationError(error);
185 }
186 }));
187 } else {
188 // itemPath is a lib dir
189 await addLibDefs(itemPath, libDefs);
190 }
191 } else {
192 const error = `Expected only directories in the 'definitions/npm' directory! Please remove or change ${itemPath}`;
193 throw new _ValidationError.ValidationError(error);
194 }
195 }));
196 return libDefs;
197}
198
199function parsePkgFlowDirVersion(pkgFlowDirPath) {
200 const pkgFlowDirName = _node.path.basename(pkgFlowDirPath);
201
202 return (0, _flowVersion.parseDirString)(pkgFlowDirName);
203}
204/**
205 * Given a parsed package name and version and a path to the package directory
206 * on disk, scan the directory and generate a list of LibDefs for each
207 * flow-versioned definition file.
208 */
209
210
211async function parseLibDefsFromPkgDir({
212 pkgName,
213 pkgVersion
214}, pkgDirPath) {
215 const pkgVersionStr = (0, _semver2.versionToString)(pkgVersion);
216 const pkgDirItems = await _node.fs.readdir(pkgDirPath);
217 const commonTestFiles = [];
218 const flowDirs = [];
219 pkgDirItems.forEach(pkgDirItem => {
220 const pkgDirItemPath = _node.path.join(pkgDirPath, pkgDirItem);
221
222 const pkgDirItemStat = _node.fs.statSync(pkgDirItemPath);
223
224 if (pkgDirItemStat.isFile()) {
225 if (_node.path.extname(pkgDirItem) === '.swp') {
226 return;
227 }
228
229 const isValidTestFile = validateTestFile(pkgDirItemPath);
230
231 if (isValidTestFile) {
232 commonTestFiles.push(pkgDirItemPath);
233 }
234 } else if (pkgDirItemStat.isDirectory()) {
235 flowDirs.push([pkgDirItemPath, parsePkgFlowDirVersion(pkgDirItemPath)]);
236 } else {
237 throw new _ValidationError.ValidationError('Unexpected directory item: ' + pkgDirItemPath);
238 }
239 });
240
241 if (!(0, _flowVersion.disjointVersionsAll)(flowDirs.map(([_, ver]) => ver))) {
242 throw new _ValidationError.ValidationError(`Flow versions not disjoint on ${pkgName}`);
243 }
244
245 if (flowDirs.length === 0) {
246 throw new _ValidationError.ValidationError(`No libdef files found in ${pkgDirPath}!`);
247 }
248
249 const libDefs = [];
250 await P.all(flowDirs.map(async ([flowDirPath, flowVersion]) => {
251 var _configPath;
252
253 const testFilePaths = [].concat(commonTestFiles);
254 const basePkgName = pkgName.charAt(0) === '@' ? pkgName.split(_node.path.sep).pop() : pkgName;
255 const libDefFileName = pkgVersionStr === 'vx.x.x' ? `${basePkgName}.js` : `${basePkgName}_${pkgVersionStr}.js`;
256 let libDefFilePath;
257 let configPath;
258 (await _node.fs.readdir(flowDirPath)).forEach(flowDirItem => {
259 const flowDirItemPath = _node.path.join(flowDirPath, flowDirItem);
260
261 const flowDirItemStat = _node.fs.statSync(flowDirItemPath);
262
263 if (flowDirItemStat.isFile()) {
264 // If we couldn't discern the package name, we've already recorded an
265 // error for that -- so try to avoid spurious downstream errors.
266 if (pkgName === 'ERROR') {
267 return;
268 }
269
270 if (_node.path.extname(flowDirItem) === '.swp') {
271 return;
272 }
273
274 if (flowDirItem === 'config.json') {
275 configPath = _node.path.join(flowDirPath, flowDirItem);
276 return;
277 }
278
279 if (flowDirItem === libDefFileName) {
280 libDefFilePath = _node.path.join(flowDirPath, flowDirItem);
281 return;
282 }
283
284 const isValidTestFile = validateTestFile(flowDirItemPath);
285
286 if (isValidTestFile) {
287 testFilePaths.push(flowDirItemPath);
288 }
289 } else {
290 const error = 'Unexpected directory item: ' + flowDirItemPath;
291 throw new _ValidationError.ValidationError(error);
292 }
293 });
294
295 if (libDefFilePath == null) {
296 libDefFilePath = _node.path.join(flowDirPath, libDefFileName);
297
298 if (pkgName !== 'ERROR') {
299 const error = `No libdef file found in ${flowDirPath}`;
300 throw new _ValidationError.ValidationError(error);
301 }
302
303 return;
304 }
305
306 libDefs.push({
307 pkgName,
308 pkgVersionStr,
309 configPath: (_configPath = configPath) !== null && _configPath !== void 0 ? _configPath : null,
310 flowVersion: flowVersion,
311 flowVersionStr: (0, _flowVersion.toDirString)(flowVersion),
312 path: libDefFilePath,
313 testFilePaths
314 });
315 }));
316 return libDefs;
317}
318/**
319 * Given the path to a directory item in the 'definitions' directory, parse the
320 * directory's name into a package name and version.
321 */
322
323
324const REPO_DIR_ITEM_NAME_RE = /^(.*)_v([0-9]+)\.([0-9]+|x)\.([0-9]+|x)(-.*)?$/;
325
326function parseRepoDirItem(dirItemPath) {
327 const dirItem = _node.path.basename(dirItemPath); // env definitions don't have versions nor need any sort of name validation
328
329
330 if (dirItemPath.includes('definitions/environments')) {
331 return {
332 pkgName: dirItem,
333 pkgVersion: {
334 major: 'x',
335 minor: 'x',
336 patch: 'x',
337 prerel: null
338 }
339 };
340 }
341
342 const itemMatches = dirItem.match(REPO_DIR_ITEM_NAME_RE);
343
344 if (itemMatches == null) {
345 const error = `'${dirItem}' is a malformed definitions/npm/ directory name! ` + `Expected the name to be formatted as <PKGNAME>_v<MAJOR>.<MINOR>.<PATCH>`;
346 throw new _ValidationError.ValidationError(error);
347 }
348
349 const [_, pkgName, majorStr, minorStr, patchStr, prerel] = itemMatches;
350
351 const item = _node.path.dirname(dirItemPath).split(_node.path.sep).pop();
352
353 const major = validateVersionNumPart(majorStr, 'major', dirItemPath);
354 const minor = validateVersionPart(minorStr, 'minor', dirItemPath);
355 const patch = validateVersionPart(patchStr, 'patch', dirItemPath);
356 return {
357 pkgName: item.charAt(0) === '@' ? `${item}${_node.path.sep}${pkgName}` : pkgName,
358 pkgVersion: {
359 major,
360 minor,
361 patch,
362 prerel: prerel != null ? prerel.substr(1) : prerel
363 }
364 };
365}
366/**
367 * Given a path to an assumed test file, ensure that it is named as expected.
368 */
369
370
371function validateTestFile(testFilePath) {
372 const testFileName = _node.path.basename(testFilePath);
373
374 return TEST_FILE_NAME_RE.test(testFileName);
375}
376/**
377 * Given a number-only part of a version string (i.e. the `major` part), parse
378 * the string into a number.
379 */
380
381
382function validateVersionNumPart(part, partName, context) {
383 const num = parseInt(part, 10);
384
385 if (String(num) !== part) {
386 const error = `'${context}': Invalid ${partName} number: '${part}'. Expected a number.`;
387 throw new _ValidationError.ValidationError(error);
388 }
389
390 return num;
391}
392/**
393 * Given a number-or-wildcard part of a version string (i.e. a `minor` or
394 * `patch` part), parse the string into either a number or 'x'.
395 */
396
397
398function validateVersionPart(part, partName, context) {
399 if (part === 'x') {
400 return part;
401 }
402
403 return validateVersionNumPart(part, partName, context);
404}
405/**
406 * Given a path to a 'definitions' dir, assert that the currently-running
407 * version of the CLI is compatible with the repo.
408 */
409
410
411async function verifyCLIVersion(defsDirPath) {
412 var _semver$coerce;
413
414 const metadataFilePath = _node.path.join(defsDirPath, '.cli-metadata.json');
415
416 const metadata = await _node.fs.readJson(metadataFilePath);
417
418 if (!metadata.compatibleCLIRange) {
419 throw new Error(`Unable to find the 'compatibleCLIRange' property in ` + `${metadataFilePath}. You might need to update to a newer version of ` + `the Flow CLI.`);
420 }
421
422 const minCLIVersion = metadata.compatibleCLIRange;
423
424 const thisCLIVersion = require('../../package.json').version;
425
426 if (!_semver.default.satisfies((_semver$coerce = _semver.default.coerce(thisCLIVersion)) !== null && _semver$coerce !== void 0 ? _semver$coerce : thisCLIVersion, minCLIVersion)) {
427 throw new Error(`Please upgrade your CLI version! This CLI is version ` + `${thisCLIVersion}, but the latest flow-typed definitions are only ` + `compatible with flow-typed@${minCLIVersion}`);
428 }
429}
430/**
431 * Helper function to write verbose output only when an output stream was
432 * provided.
433 */
434
435
436function writeVerbose(stream, msg, writeNewline = true) {
437 if (stream != null) {
438 stream.write(msg + (writeNewline ? '\n' : ''));
439 }
440}
441/**
442 * Get a list of LibDefs from the flow-typed cache repo checkout.
443 *
444 * If the repo checkout does not exist or is out of date, it will be
445 * created/updated automatically first.
446 */
447
448
449const CACHE_REPO_DEFS_DIR = _node.path.join(CACHE_REPO_DIR, 'definitions', 'npm');
450
451async function getCacheLibDefs(verbose = process.stdout) {
452 await ensureCacheRepo(verbose);
453 await verifyCLIVersion(_node.path.join(CACHE_REPO_DIR, 'definitions'));
454 return getLibDefs(CACHE_REPO_DEFS_DIR);
455}
456
457function packageNameMatch(a, b) {
458 return a.toLowerCase() === b.toLowerCase();
459}
460
461function libdefMatchesPackageVersion(pkgSemver, defVersionRaw) {
462 // The libdef version should be treated as a semver prefixed by a carat
463 // (i.e: "foo_v2.2.x" is the same range as "^2.2.x")
464 // UNLESS it is prefixed by the equals character (i.e. "foo_=v2.2.x")
465 let defVersion = defVersionRaw;
466
467 if (defVersionRaw[0] !== '=' && defVersionRaw[0] !== '^') {
468 defVersion = '^' + defVersionRaw;
469 }
470
471 if (_semver.default.valid(pkgSemver)) {
472 // test the single package version against the libdef range
473 return _semver.default.satisfies(pkgSemver, defVersion);
474 }
475
476 if (_semver.default.valid(defVersion)) {
477 // test the single defVersion agains the package range
478 return _semver.default.satisfies(defVersion, pkgSemver);
479 }
480
481 const pkgRange = new _semver.default.Range(pkgSemver);
482 const defRange = new _semver.default.Range(defVersion);
483
484 if (defRange.set[0].length !== 2) {
485 throw Error('Invalid libDef version, It appears to be a non-contiguous range.');
486 }
487
488 const defLowerB = defRange.set[0][0].semver.version;
489 const defUpperB = defRange.set[0][1].semver.version;
490
491 if (_semver.default.gtr(defLowerB, pkgSemver) || _semver.default.ltr(defUpperB, pkgSemver)) {
492 return false;
493 }
494
495 const pkgLowerB = pkgRange.set[0][0].semver.version;
496 return defRange.test(pkgLowerB);
497}
498/**
499 * Filter a given list of LibDefs down using a specified filter.
500 */
501
502
503function filterLibDefs(defs, filter) {
504 return defs.filter(def => {
505 let filterMatch = false;
506
507 switch (filter.type) {
508 case 'exact':
509 filterMatch = packageNameMatch(def.pkgName, filter.pkgName) && libdefMatchesPackageVersion(filter.pkgVersionStr, def.pkgVersionStr);
510 break;
511
512 case 'exact-name':
513 filterMatch = packageNameMatch(def.pkgName, filter.term);
514 break;
515
516 case 'fuzzy':
517 filterMatch = def.pkgName.toLowerCase().indexOf(filter.term.toLowerCase()) !== -1;
518 break;
519
520 default:
521 throw new Error(`'${filter.type}' is an unexpected filter type! This should never ` + `happen!`);
522 }
523
524 if (!filterMatch) {
525 return false;
526 }
527
528 const filterFlowVerStr = filter.flowVersionStr;
529
530 if (filterFlowVerStr) {
531 const {
532 flowVersion
533 } = def;
534
535 switch (flowVersion.kind) {
536 case 'all':
537 return _semver.default.satisfies(filterFlowVerStr, def.flowVersionStr);
538
539 case 'specific':
540 return _semver.default.satisfies(filterFlowVerStr, def.flowVersionStr);
541
542 case 'ranged':
543 const {
544 upper
545 } = flowVersion;
546
547 if (upper) {
548 const lowerSpecific = {
549 kind: 'ranged',
550 upper: null,
551 lower: flowVersion.lower
552 };
553 const lowerSpecificSemver = (0, _flowVersion.toSemverString)(lowerSpecific);
554 const upperSpecificSemver = (0, _flowVersion.toSemverString)({
555 kind: 'specific',
556 ver: upper
557 });
558 return _semver.default.satisfies(filterFlowVerStr, lowerSpecificSemver) && _semver.default.satisfies(filterFlowVerStr, upperSpecificSemver);
559 } else {
560 return _semver.default.satisfies(filterFlowVerStr, (0, _flowVersion.toSemverString)(def.flowVersion));
561 }
562
563 default:
564 flowVersion;
565 throw new Error('Unexpected FlowVersion kind!');
566 }
567 }
568
569 return true;
570 }).sort((a, b) => {
571 const aZeroed = a.pkgVersionStr.replace(/x/g, '0');
572 const bZeroed = b.pkgVersionStr.replace(/x/g, '0');
573
574 const pkgCompare = _semver.default.compare(aZeroed, bZeroed);
575
576 if (pkgCompare !== 0) return -pkgCompare;
577 const aFlowVersionStr = a.flowVersionStr;
578 const bFlowVersionStr = b.flowVersionStr;
579 if (aFlowVersionStr == null) return 1;
580 if (bFlowVersionStr == null) return -1;
581 const aFlowVersion = (0, _flowVersion.parseDirString)(aFlowVersionStr);
582 const bFlowVersion = (0, _flowVersion.parseDirString)(bFlowVersionStr);
583 return -1 * (0, _flowVersion.compareFlowVersionAsc)(aFlowVersion, bFlowVersion);
584 });
585}
\No newline at end of file