1 | "use strict";
|
2 |
|
3 |
|
4 |
|
5 |
|
6 |
|
7 |
|
8 |
|
9 |
|
10 |
|
11 |
|
12 |
|
13 |
|
14 |
|
15 | var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
16 | return new (P || (P = Promise))(function (resolve, reject) {
|
17 | function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
18 | function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
19 | function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
|
20 | step((generator = generator.apply(thisArg, _arguments || [])).next());
|
21 | });
|
22 | };
|
23 | Object.defineProperty(exports, "__esModule", { value: true });
|
24 | const fs = require("fs");
|
25 | const path = require("path");
|
26 | const analyzer_1 = require("../core/analyzer");
|
27 | const fs_url_loader_1 = require("../url-loader/fs-url-loader");
|
28 | const overlay_loader_1 = require("../url-loader/overlay-loader");
|
29 | const now = require("performance-now");
|
30 | const bowerDir = path.resolve(__dirname, `../../bower_components`);
|
31 | const inMemoryOverlay = new overlay_loader_1.InMemoryOverlayUrlLoader(new fs_url_loader_1.FsUrlLoader(bowerDir));
|
32 | const analyzer = new analyzer_1.Analyzer({ urlLoader: inMemoryOverlay });
|
33 | const filesToAnalyze = [];
|
34 | for (const baseDir of fs.readdirSync(bowerDir)) {
|
35 | const bowerJsonPath = path.join(bowerDir, baseDir, 'bower.json');
|
36 | let bowerJson;
|
37 | try {
|
38 | bowerJson = JSON.parse(fs.readFileSync(bowerJsonPath, 'utf-8'));
|
39 | }
|
40 | catch (e) {
|
41 | continue;
|
42 | }
|
43 | const main = bowerJson.main || [];
|
44 | const mains = Array.isArray(main) ? main : [main];
|
45 | for (const mainFile of mains) {
|
46 | if (existsSync(path.join(bowerDir, baseDir, mainFile))) {
|
47 | filesToAnalyze.push(path.join(baseDir, mainFile));
|
48 | }
|
49 | }
|
50 | }
|
51 | const fakeFileContents = filesToAnalyze.map((fn) => `<link rel="import" href="${fn}">`).join('\n');
|
52 | inMemoryOverlay.urlContentsMap.set(analyzer.resolveUrl('ephemeral.html'), fakeFileContents);
|
53 | function existsSync(fn) {
|
54 | try {
|
55 | fs.statSync(fn);
|
56 | return true;
|
57 | }
|
58 | catch (_) {
|
59 | return false;
|
60 | }
|
61 | }
|
62 | function padLeft(str, num) {
|
63 | if (str.length < num) {
|
64 | return padLeft(' ' + str, num);
|
65 | }
|
66 | return str;
|
67 | }
|
68 | function MiB(usage) {
|
69 | return `${(usage / (1024 * 1024)).toFixed(1)}MiB`;
|
70 | }
|
71 | function measure() {
|
72 | return __awaiter(this, void 0, void 0, function* () {
|
73 | if (!global.gc) {
|
74 | throw new Error('This benchmark must be run with node --expose-gc.\n' +
|
75 | ' Just do:\n npm run benchmark');
|
76 | }
|
77 | global.gc();
|
78 | const initialMemUse = process.memoryUsage().rss;
|
79 | console.log(`Initial rss: ${MiB(initialMemUse)}`);
|
80 | const start = now();
|
81 | let document;
|
82 | const measurements = [];
|
83 | for (let i = 0; i < 10; i++) {
|
84 | const before = now();
|
85 | yield analyzer.filesChanged(['ephemeral.html']);
|
86 | document = yield analyzer.analyze(['ephemeral.html']);
|
87 | measurements.push(now() - before);
|
88 | }
|
89 | global.gc();
|
90 | const afterInitialAnalyses = process.memoryUsage().rss;
|
91 | printMeasurements(measurements);
|
92 | console.log(`\n\n\n${document.getFeatures().size} total features resolved.`);
|
93 | console.log(`${((now() - start) / 1000).toFixed(2)} seconds total elapsed time`);
|
94 | for (let i = 0; i < 100; i++) {
|
95 | yield analyzer.filesChanged(['ephemeral.html']);
|
96 | yield analyzer.analyze(['ephemeral.html']);
|
97 | }
|
98 | global.gc();
|
99 | const afterMoreAnalyses = process.memoryUsage().rss;
|
100 | console.log(`Additional memory used in analyzing all Polymer-owned code: ${MiB(afterInitialAnalyses - initialMemUse)}`);
|
101 | const leakedMemory = afterMoreAnalyses - afterInitialAnalyses;
|
102 | console.log(`Additional memory used after 100 more incremental analyses: ${MiB(afterMoreAnalyses - afterInitialAnalyses)}`);
|
103 |
|
104 |
|
105 | const threshold = 300 * (1024 * 1024);
|
106 | if (leakedMemory > threshold) {
|
107 | console.error(`\n\n==========================================\n` +
|
108 | `ERROR: Leaked ${MiB(leakedMemory)}, ` +
|
109 | `which is more than the threshold of ${MiB(threshold)}. ` +
|
110 | `Exiting with error code 1.` +
|
111 | `\n==========================================\n\n`);
|
112 | process.exit(1);
|
113 | }
|
114 | });
|
115 | }
|
116 | function printMeasurements(measurements) {
|
117 | console.log(`\n\n\n\n
|
118 | The most important thing to benchmark is the resolve step, as everything
|
119 | else is cacheable. Here are times for resolving every element in the
|
120 | PolymerElements org.
|
121 |
|
122 | The total time for this benchmark will also include the initial parse and
|
123 | scan and so should be much much longer.
|
124 | `);
|
125 | console.log(`${padLeft('ms to analyze file that imports all polymer team\'s elements', 10)}}`);
|
126 | for (const elapsed of measurements) {
|
127 | console.log(`${padLeft(elapsed.toFixed(0), 10)}`);
|
128 | }
|
129 | }
|
130 | measure().catch((err) => {
|
131 | console.log(err.stack);
|
132 | process.exit(1);
|
133 | });
|
134 |
|
\ | No newline at end of file |