UNPKG

13 kBJavaScriptView Raw
1"use strict";
2
3Object.defineProperty(exports, "__esModule", {
4 value: true
5});
6exports.runAllBenchmarks = runAllBenchmarks;
7exports.benchmark = void 0;
8
9var _perf_hooks = require("perf_hooks");
10
11var _chalk = _interopRequireDefault(require("chalk"));
12
13var _debug = _interopRequireDefault(require("debug"));
14
15var ansi = _interopRequireWildcard(require("ansi-escapes"));
16
17var microtime = _interopRequireWildcard(require("microtime"));
18
19var _utils = require("./utils");
20
21var _config = require("./config");
22
23function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; return newObj; } }
24
25function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
26
27// For documentation on benchmarks, please see: `src/commands/bench.js`
28const TableUtils = require('cli-table/lib/utils');
29
30TableUtils.truncate = str => str;
31
32const Table = require('cli-table');
33
34const debug = (0, _debug.default)('wiz');
35const kHasRunSerially = Symbol('kHasRunSerially');
36let benchmarksScheduled = false;
37let onlyAcceptOnlys = false;
38let registeredBenchmarks = new Map();
39let longestBenchmarkTitleLength = 0;
40let benchmarkRunningHasBegun = false;
41const cliTable = new Table({
42 chars: {
43 top: '',
44 'top-mid': '',
45 'top-left': '',
46 'top-right': '',
47 bottom: '',
48 'bottom-mid': '',
49 'bottom-left': '',
50 'bottom-right': '',
51 left: '',
52 'left-mid': '',
53 mid: '',
54 'mid-mid': '',
55 right: '',
56 'right-mid': '',
57 middle: ' '
58 },
59 colAligns: ['left', 'right', 'right']
60});
61const benchConfig = JSON.parse(process.env.WIZ_BENCH || '{}');
62
63function appendTable(row) {
64 if (!process.stdout.isTTY || _config.isCI) {
65 if (row.length === 1) {
66 return console.log(row[0]);
67 }
68
69 return console.log(`${row[0]}${' '.repeat(Math.max(1, longestBenchmarkTitleLength + 13 - row[0].length))}${row[1]}\t${row.slice(2).join('\t')}`);
70 }
71
72 cliTable.push(row);
73 (0, _utils.ttywrite)('');
74
75 if (cliTable.length > 1) {
76 (0, _utils.ttywrite)(ansi.cursorUp(cliTable.length - 1));
77 }
78
79 (0, _utils.ttywrite)(cliTable.toString() + '\n');
80 cliTable.options.colWidths = [];
81}
82
83function fibonacci(n) {
84 if (n <= 2) {
85 return 1;
86 }
87
88 let a = 1;
89 let b = 1;
90 let c = a + b;
91
92 for (let i = 3; i < n; i++) {
93 a = b;
94 b = c;
95 c = a + b;
96 }
97
98 return c;
99}
100
101function magnitude(n) {
102 return 10 ** n;
103}
104
105function ms(time) {
106 if (time >= 1000 * 1000 * 60) {
107 return {
108 time: Math.round(time / (1000 * 1000 * 60) * 10) / 10,
109 unit: 'm'
110 };
111 } else if (time >= 1000 * 1000) {
112 return {
113 time: Math.round(time / (1000 * 1000) * 10) / 10,
114 unit: 's'
115 };
116 } else if (time >= 1000) {
117 return {
118 time: Math.round(time / 1000 * 10) / 10,
119 unit: 'ms'
120 };
121 }
122
123 return {
124 time: Math.round(time * 10) / 10,
125 unit: 'µs'
126 };
127}
128
129function prettyNumber(num) {
130 num = String(num);
131 let string = '';
132 let numDigits = 0;
133
134 for (let i = num.length - 1; i > -1; --i) {
135 string = num[i] + string;
136
137 if (++numDigits % 3 === 0) {
138 string = ' ' + string;
139 }
140 }
141
142 return string.trimLeft();
143}
144
145function isDefined(value) {
146 return value !== undefined && value !== null;
147}
148
149function loadBenchConfig() {
150 const config = {
151 growthFn: magnitude,
152 benchTime: 1000 * 1000,
153 minIterations: 1,
154 maxIterations: Infinity,
155 forceExit: false,
156 perfHooks: false
157 };
158
159 if (config.growthFn === 'fibonacci') {
160 config.growthFn = fibonacci;
161 }
162
163 if (isDefined(benchConfig.benchTime)) {
164 config.benchTime = benchConfig.benchTime;
165 }
166
167 if (isDefined(benchConfig.minIterations)) {
168 config.minIterations = benchConfig.minIterations;
169 }
170
171 if (isDefined(benchConfig.maxIterations)) {
172 config.maxIterations = benchConfig.maxIterations;
173 }
174
175 if (isDefined(benchConfig.forceExit)) {
176 config.forceExit = benchConfig.forceExit;
177 }
178
179 if (isDefined(benchConfig.perfHooks)) {
180 config.perfHooks = benchConfig.perfHooks;
181 }
182
183 debug(`Benchmark config loaded => %O`, config);
184 return config;
185}
186
187async function runAllBenchmarks() {
188 const globalConfig = loadBenchConfig();
189 let allBenchmarksSucceeded = true;
190 benchmarkRunningHasBegun = true; // sort so that like-named benchmarks are next to each other for easier
191 // comparison
192
193 const entries = Array.from(registeredBenchmarks.entries()).sort((a, b) => {
194 return a[0] >= b[0] ? 1 : -1;
195 });
196
197 for (let i = 0; i < entries.length; ++i) {
198 const [title, handlers] = entries[i];
199 let options = { ...globalConfig
200 };
201 const benchmarkHasOverrides = typeof handlers[handlers.length - 1] === 'object';
202
203 if (benchmarkHasOverrides) {
204 Object.assign(options, handlers[handlers.length - 1]);
205
206 if (typeof options.growthFn === 'string') {
207 options.growthFn = options.growthFn === 'fibonacci' ? fibonacci : magnitude;
208 }
209 }
210
211 try {
212 let startTime;
213 let endTime;
214 let avgDurationPerOp = 0;
215 let avgOpsPerSecond = 0;
216 let numIterations = options.minIterations;
217 let runNumber = 1;
218 let numIterationsWasChecked;
219 let timerIsRunning = true;
220 let ranSerially = false;
221 const b = {
222 N() {
223 numIterationsWasChecked = true;
224 return numIterations;
225 },
226
227 async runConcurrently(fn) {
228 if (handlers[kHasRunSerially]) {
229 const goals = new Array(b.N());
230
231 for (let i = 0; i < goals.length; ++i) {
232 goals[i] = fn(i);
233 }
234
235 await Promise.all(goals);
236 } else {
237 ranSerially = true;
238 handlers[kHasRunSerially] = true; // reset the index of the entry so benchmark
239 // will repeat
240
241 i--;
242
243 for (let i = 0; i < b.N(); ++i) {
244 await fn(i);
245 }
246 }
247 },
248
249 resetTimer() {
250 startTime = microtime.now();
251 timerIsRunning = true;
252 debug(`Timer reset to: ${startTime}`);
253 },
254
255 stopTimer() {
256 if (!timerIsRunning) {
257 throw new Error(`Timer stopped twice`);
258 }
259
260 endTime = microtime.now();
261 timerIsRunning = false;
262 debug(`Timer stopped at: ${endTime} (+${endTime - startTime}µs)`);
263 }
264
265 };
266 const fn = benchmarkHasOverrides ? handlers[handlers.length - 2] : handlers[handlers.length - 1];
267 let args = [b];
268 (0, _utils.ttywrite)(`preparing: ${title}`);
269 let lastElement = handlers.length - 1;
270
271 if (benchmarkHasOverrides) {
272 lastElement--;
273 }
274
275 for (let i = 0; i < lastElement; i++) {
276 args = await handlers[i](args);
277 }
278
279 let numPerfEvents = 0;
280 let numPerfEventTypes = 0;
281 const perfEvents = new Map();
282 const perfObserver = new _perf_hooks.PerformanceObserver(events => {
283 events.getEntries().forEach(event => {
284 if (!perfEvents.has(event.name)) {
285 numPerfEventTypes++;
286 perfEvents.set(event.name, []);
287 }
288
289 numPerfEvents++;
290 perfEvents.get(event.name).push(event.duration);
291 });
292 });
293 perfObserver.observe({
294 entryTypes: ['measure']
295 });
296
297 while (true) {
298 numIterationsWasChecked = false;
299 (0, _utils.ttywrite)(`running: ${title} (N = ${prettyNumber(numIterations)})`);
300 b.resetTimer();
301 await fn.apply(global, args);
302
303 if (timerIsRunning) {
304 b.stopTimer();
305 }
306
307 if (!numIterationsWasChecked) {
308 throw new Error(`Benchmark '${title}' ran without calling b.N() - please see documentation`);
309 }
310
311 const duration = endTime - startTime;
312 process.stderr.write('\r');
313 debug(`${title} completed with N = ${numIterations} in ${duration}`);
314 avgDurationPerOp = duration / numIterations;
315
316 if (duration > 0) {
317 avgOpsPerSecond = 1000 * 1000 / (duration / numIterations);
318 }
319
320 if (duration >= options.benchTime || numIterations >= options.maxIterations) {
321 debug(`${title} benchmark concluded (duration: ${duration}; iterations: ${numIterations}; config: %O)`, {
322 benchTime: options.benchTime,
323 maxIterations: options.maxIterations,
324 growthFn: options.growthFn
325 });
326 break;
327 }
328
329 numIterations = options.growthFn(++runNumber);
330 }
331
332 perfObserver.disconnect();
333 const {
334 time,
335 unit
336 } = ms(avgDurationPerOp);
337 appendTable(['\t' + title + (ranSerially ? ' (serial)' : handlers[kHasRunSerially] ? ' (concurrent)' : ''), prettyNumber(Math.floor(avgOpsPerSecond)) + ' ops/s', `${time} ${unit}/op`]);
338
339 if (numPerfEventTypes > 0) {
340 if (options.perfHooks) {
341 const entries = Array.from(perfEvents.entries()).map(([eventType, durations]) => {
342 const totalMSDuration = durations.reduce((a, b) => a + b, 0);
343 const duration = ms(1e3 * totalMSDuration / durations.length);
344 const opsPerSecond = durations.length / (totalMSDuration / 1e3);
345 return [eventType, opsPerSecond, duration];
346 }).sort((a, b) => {
347 return b[1] - a[1];
348 });
349
350 for (const [eventType, opsPerSecond, {
351 time,
352 unit
353 }] of entries) {
354 appendTable([`\t ↪ ${eventType}`, `${prettyNumber(Math.floor(opsPerSecond))} ops/s`, `${time} ${unit}/op`]);
355 }
356 } else {
357 appendTable([_chalk.default.gray(`\tObserved ${numPerfEventTypes} events with ${prettyNumber(numPerfEvents)} occurrences.`)]);
358 }
359 }
360 } catch (error) {
361 allBenchmarksSucceeded = false;
362 console.error(`\r${ansi.eraseEndLine}\t${title}\tFailed with: ${String(error.stack).split('\n').map((line, index) => {
363 if (index === 0) {
364 return line;
365 }
366
367 return '\t' + line;
368 }).join('\n')}`);
369 }
370 }
371
372 if (!allBenchmarksSucceeded) {
373 process.exit(1);
374 } else if (globalConfig.forceExit) {
375 console.warn(`warn: forcing exit`);
376 process.exit();
377 }
378}
379
380function addBenchmark(title, handlers) {
381 if (benchmarkRunningHasBegun) {
382 throw new Error(`Benchmark "${title}" registered after execution has already begun`);
383 }
384
385 if (registeredBenchmarks.has(title)) {
386 throw new Error(`Duplicate benchmark registered with title: '${title}'`);
387 }
388
389 longestBenchmarkTitleLength = Math.max(longestBenchmarkTitleLength, title.length);
390 registeredBenchmarks.set(title, handlers);
391}
392/**
393 * This function registers benchmarks to the benchmark runner. For most basic use,
394 * pass a string title describing the benchmark and a handler to run the benchmark.
395 *
396 * Benchmark titles should be unique across your codebase. This is verified by the
397 * benchmark registration and the process will fail if you use a non-unique title.
398 *
399 * **Advanced: Using currying**
400 *
401 * The benchmark function also supports currying handlers to perform custom setup.
402 * You can think of this as synonymous to `beforeEach()` in mocha. The way that
403 * this works is that functions will be executed in the order that they are passed
404 * in order to create a set of arguments that should be passed to the final handler
405 * upon each invocation of the benchmark. The `b` object is never re-instantiated, but
406 * the values returned by `b.N()` will change and should not be cached by setup
407 * handlers.
408 *
409 * ###### Example
410 *
411 * ```javascript
412 * import { benchmark } from '@karimsa/wiz/bench'
413 *
414 * import { createApi } from '../__tests__/helpers'
415 *
416 * async function setup(b) {
417 * const api = await createApi({ version: 'v1' })
418 * await api.setupUsers(10)
419 *
420 * return {
421 * b,
422 * api,
423 * }
424 * }
425 *
426 * benchmark('my custom benchmark', setup, async ({ b, api }) => {
427 * // b.resetTimer() is unnecessary here since the execution
428 * // time of 'setup()' is completely ignored by the runner
429 *
430 * for (let i = 0; i < b.N(); ++i) {
431 * await api.addRecord({ i })
432 * }
433 * })
434 * ```
435 *
436 * @type function
437 */
438
439
440const benchmark = Object.assign(function (title, ...handlers) {
441 if (!onlyAcceptOnlys) {
442 addBenchmark(title, handlers);
443 }
444
445 if (!benchmarksScheduled) {
446 benchmarksScheduled = true;
447 process.nextTick(runAllBenchmarks);
448 }
449}, {
450 only(title, ...handlers) {
451 if (!onlyAcceptOnlys) {
452 onlyAcceptOnlys = true;
453 registeredBenchmarks = new Map();
454 longestBenchmarkTitleLength = 0;
455 }
456
457 addBenchmark(title, handlers);
458
459 if (!benchmarksScheduled) {
460 benchmarksScheduled = true;
461 process.nextTick(runAllBenchmarks);
462 }
463 }
464
465});
466exports.benchmark = benchmark;
\No newline at end of file