UNPKG

5.8 kBJavaScriptView Raw
1"use strict";
2/**
3 * @license
4 * Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
5 * This code may only be used under the BSD style license found at
6 * http://polymer.github.io/LICENSE.txt
7 * The complete set of authors may be found at
8 * http://polymer.github.io/AUTHORS.txt
9 * The complete set of contributors may be found at
10 * http://polymer.github.io/CONTRIBUTORS.txt
11 * Code distributed by Google as part of the polymer project is also
12 * subject to an additional IP rights grant found at
13 * http://polymer.github.io/PATENTS.txt
14 */
15var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
16 return new (P || (P = Promise))(function (resolve, reject) {
17 function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
18 function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
19 function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
20 step((generator = generator.apply(thisArg, _arguments || [])).next());
21 });
22};
23Object.defineProperty(exports, "__esModule", { value: true });
24const fs = require("fs");
25const path = require("path");
26const analyzer_1 = require("../core/analyzer");
27const fs_url_loader_1 = require("../url-loader/fs-url-loader");
28const overlay_loader_1 = require("../url-loader/overlay-loader");
29const now = require("performance-now");
30const bowerDir = path.resolve(__dirname, `../../bower_components`);
31const inMemoryOverlay = new overlay_loader_1.InMemoryOverlayUrlLoader(new fs_url_loader_1.FsUrlLoader(bowerDir));
32const analyzer = new analyzer_1.Analyzer({ urlLoader: inMemoryOverlay });
33const filesToAnalyze = [];
34for (const baseDir of fs.readdirSync(bowerDir)) {
35 const bowerJsonPath = path.join(bowerDir, baseDir, 'bower.json');
36 let bowerJson;
37 try {
38 bowerJson = JSON.parse(fs.readFileSync(bowerJsonPath, 'utf-8'));
39 }
40 catch (e) {
41 continue;
42 }
43 const main = bowerJson.main || [];
44 const mains = Array.isArray(main) ? main : [main];
45 for (const mainFile of mains) {
46 if (existsSync(path.join(bowerDir, baseDir, mainFile))) {
47 filesToAnalyze.push(path.join(baseDir, mainFile));
48 }
49 }
50}
51const fakeFileContents = filesToAnalyze.map((fn) => `<link rel="import" href="${fn}">`).join('\n');
52inMemoryOverlay.urlContentsMap.set(analyzer.resolveUrl('ephemeral.html'), fakeFileContents);
53function existsSync(fn) {
54 try {
55 fs.statSync(fn);
56 return true;
57 }
58 catch (_) {
59 return false;
60 }
61}
62function padLeft(str, num) {
63 if (str.length < num) {
64 return padLeft(' ' + str, num);
65 }
66 return str;
67}
68function MiB(usage) {
69 return `${(usage / (1024 * 1024)).toFixed(1)}MiB`;
70}
71function measure() {
72 return __awaiter(this, void 0, void 0, function* () {
73 if (!global.gc) {
74 throw new Error('This benchmark must be run with node --expose-gc.\n' +
75 ' Just do:\n npm run benchmark');
76 }
77 global.gc();
78 const initialMemUse = process.memoryUsage().rss;
79 console.log(`Initial rss: ${MiB(initialMemUse)}`);
80 const start = now();
81 let document;
82 const measurements = [];
83 for (let i = 0; i < 10; i++) {
84 const before = now();
85 yield analyzer.filesChanged(['ephemeral.html']);
86 document = yield analyzer.analyze(['ephemeral.html']);
87 measurements.push(now() - before);
88 }
89 global.gc();
90 const afterInitialAnalyses = process.memoryUsage().rss;
91 printMeasurements(measurements);
92 console.log(`\n\n\n${document.getFeatures().size} total features resolved.`);
93 console.log(`${((now() - start) / 1000).toFixed(2)} seconds total elapsed time`);
94 for (let i = 0; i < 100; i++) {
95 yield analyzer.filesChanged(['ephemeral.html']);
96 yield analyzer.analyze(['ephemeral.html']);
97 }
98 global.gc();
99 const afterMoreAnalyses = process.memoryUsage().rss;
100 console.log(`Additional memory used in analyzing all Polymer-owned code: ${MiB(afterInitialAnalyses - initialMemUse)}`);
101 const leakedMemory = afterMoreAnalyses - afterInitialAnalyses;
102 console.log(`Additional memory used after 100 more incremental analyses: ${MiB(afterMoreAnalyses - afterInitialAnalyses)}`);
103 // TODO(rictic): looks like we've got a memory leak. Need to track this down.
104 // This should be < 10MiB, not < 100 MiB.
105 const threshold = 300 * (1024 * 1024);
106 if (leakedMemory > threshold) {
107 console.error(`\n\n==========================================\n` +
108 `ERROR: Leaked ${MiB(leakedMemory)}, ` +
109 `which is more than the threshold of ${MiB(threshold)}. ` +
110 `Exiting with error code 1.` +
111 `\n==========================================\n\n`);
112 process.exit(1);
113 }
114 });
115}
116function printMeasurements(measurements) {
117 console.log(`\n\n\n\n
118 The most important thing to benchmark is the resolve step, as everything
119 else is cacheable. Here are times for resolving every element in the
120 PolymerElements org.
121
122 The total time for this benchmark will also include the initial parse and
123 scan and so should be much much longer.
124 `);
125 console.log(`${padLeft('ms to analyze file that imports all polymer team\'s elements', 10)}}`);
126 for (const elapsed of measurements) {
127 console.log(`${padLeft(elapsed.toFixed(0), 10)}`);
128 }
129}
130measure().catch((err) => {
131 console.log(err.stack);
132 process.exit(1);
133});
134//# sourceMappingURL=parse-all-benchmark.js.map
\No newline at end of file