UNPKG

10.2 kBJavaScriptView Raw
1/**
2 * This module is to be used to benchmark loki binary index lifecycle
3 *
4 * Attempt to simulate and benchmark effects of various rebuild strategies on
5 * insert, find, remove, and update to be used to instrument refactorings/optimizations.
6 *
7 * Ideally a little overhead would be added to insert/update/remove ops to maintain the index
8 * rather than flag for full rebuild.
9 *
10 * This approach would definitely improve performance when insert/find ops are interlaced 1:1.
11 *
12 * We should also wish to guage penalty of overhead when items are inserted consecutively, but
13 * not in batch array.
14 *
15 * Analysis of results might determine whether different rebuild strategies are configured via options.
16 *
17 * Refactorings of index rebuild strategies also need to be coordinated with extensive unit testing.
18 */
19
20
21var loki = require('../src/lokijs.js'),
22 db = new loki('binary index perf'),
23 samplecoll = null,
24 uniquecoll = null,
25 arraySize = 5000, // default, we usually override when calling init functions
26 totalIterations = 20000, // how many times we search it
27 results = [],
28 getIterations = 2000000; // get is crazy fast due to binary search so this needs separate scale
29
30function genRandomVal() {
31 var text = "";
32 var possible = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
33
34 for (var i = 0; i < 20; i++)
35 text += possible.charAt(Math.floor(Math.random() * possible.length));
36
37 return text;
38}
39
40// in addition to the loki id we will create a key of our own
41// (customId) which is number from 1- totalIterations
42// we will later perform find() queries against customId with and
43// without an index
44
45function createDatabaseUnindexed() {
46 db = new loki('binary index perf');
47
48 samplecoll = db.addCollection('samplecoll');
49}
50
51function createDatabaseIndexed(adaptive) {
52 db = new loki('binary index perf');
53
54 samplecoll = db.addCollection('samplecoll', {
55 adaptiveBinaryIndices: (adaptive === true),
56 indices: ['customId']
57 });
58}
59
60// scenario for many individual, consecutive inserts
61function initializeDatabase(silent, docCount) {
62 var start, end, totalTime;
63 var totalTimes = [];
64 var totalMS = 0.0;
65
66 if (typeof docCount === "undefined") {
67 docCount = 5000;
68 }
69
70 arraySize = docCount;
71
72 for (var idx = 0; idx < arraySize; idx++) {
73 var v1 = genRandomVal();
74 var v2 = genRandomVal();
75
76 start = process.hrtime();
77 samplecoll.insert({
78 customId: idx,
79 val: v1,
80 val2: v2,
81 val3: "more data 1234567890"
82 });
83 end = process.hrtime(start);
84 totalTimes.push(end);
85 }
86
87 if (silent === true) {
88 return;
89 }
90
91 for (var idx = 0; idx < totalTimes.length; idx++) {
92 totalMS += totalTimes[idx][0] * 1e3 + totalTimes[idx][1] / 1e6;
93 }
94
95 // let's include final find which will probably punish lazy indexes more
96 start = process.hrtime();
97 samplecoll.find({customIdx: 50});
98 end = process.hrtime(start);
99 totalTimes.push(end);
100
101 //var totalMS = end[0] * 1e3 + end[1]/1e6;
102 totalMS = totalMS.toFixed(2);
103 var rate = arraySize * 1000 / totalMS;
104 rate = rate.toFixed(2);
105 console.log("load (individual inserts) : " + totalMS + "ms (" + rate + ") ops/s (" + arraySize + " documents)");
106}
107
108// silent : if true we wont log timing info to console
109// docCount : number of random documents to generate collection with
110function initializeDatabaseBatch(silent, docCount) {
111 var start, end, totalTime;
112 var totalTimes = [];
113 var totalMS = 0.0;
114 var batch = [];
115
116 if (typeof docCount === "undefined") {
117 docCount = 5000;
118 }
119
120 arraySize = docCount;
121
122 for (var idx = 0; idx < arraySize; idx++) {
123 var v1 = genRandomVal();
124 var v2 = genRandomVal();
125
126 batch.push({
127 customId: idx,
128 val: v1,
129 val2: v2,
130 val3: "more data 1234567890"
131 });
132 }
133
134 start = process.hrtime();
135 samplecoll.insert(batch);
136 end = process.hrtime(start);
137 totalTimes.push(end);
138
139 if (silent === true) {
140 return;
141 }
142
143 for (var idx = 0; idx < totalTimes.length; idx++) {
144 totalMS += totalTimes[idx][0] * 1e3 + totalTimes[idx][1] / 1e6;
145 }
146
147 //var totalMS = end[0] * 1e3 + end[1]/1e6;
148 totalMS = totalMS.toFixed(2);
149 var rate = arraySize* 1000 / totalMS;
150 rate = rate.toFixed(2);
151 console.log("load (batch insert) : " + totalMS + "ms (" + rate + ") ops/s (" + arraySize + " documents)");
152}
153
154function perfFind(multiplier) {
155 var start, end;
156 var totalTimes = [];
157 var totalMS = 0;
158
159 var loopIterations = arraySize;
160 if (typeof (multiplier) != "undefined") {
161 loopIterations = loopIterations * multiplier;
162 }
163
164 for (var idx = 0; idx < loopIterations; idx++) {
165 var customidx = Math.floor(Math.random() * arraySize) + 1;
166
167 start = process.hrtime();
168 var results = samplecoll.find({
169 'customId': customidx
170 });
171 end = process.hrtime(start);
172 totalTimes.push(end);
173 }
174
175 for (var idx = 0; idx < totalTimes.length; idx++) {
176 totalMS += totalTimes[idx][0] * 1e3 + totalTimes[idx][1] / 1e6;
177 }
178
179 totalMS = totalMS.toFixed(2);
180 var rate = loopIterations * 1000 / totalMS;
181 rate = rate.toFixed(2);
182 console.log("contiguous coll.find() : " + totalMS + "ms (" + rate + " ops/s) " + loopIterations + " iterations");
183}
184
185// Find Interlaced Inserts -> insert 5000, for 5000 more iterations insert same test obj after
186function perfFindInterlacedInserts(multiplier) {
187 var start, end;
188 var totalTimes = [];
189 var totalMS = 0;
190
191 var loopIterations = arraySize;
192 if (typeof (multiplier) != "undefined") {
193 loopIterations = loopIterations * multiplier;
194 }
195
196 for (var idx = 0; idx < loopIterations; idx++) {
197 var customidx = Math.floor(Math.random() * arraySize) + 1;
198
199 start = process.hrtime();
200 var results = samplecoll.find({
201 'customId': customidx
202 });
203 // insert junk record, now (outside timing routine) to force index rebuild
204 var obj = samplecoll.insert({
205 customId: 999,
206 val: 999,
207 val2: 999,
208 val3: "more data 1234567890"
209 });
210
211 end = process.hrtime(start);
212 totalTimes.push(end);
213
214 }
215
216 for (var idx = 0; idx < totalTimes.length; idx++) {
217 totalMS += totalTimes[idx][0] * 1e3 + totalTimes[idx][1] / 1e6;
218 }
219
220 totalMS = totalMS.toFixed(2);
221 var rate = loopIterations * 1000 / totalMS;
222 rate = rate.toFixed(2);
223 console.log("interlaced inserts + coll.find() : " + totalMS + "ms (" + rate + " ops/s) " + loopIterations + " iterations");
224
225}
226
227// Find Interlaced Removes -> use linear customid for() loop find() and delete that obj when found
228function perfFindInterlacedRemoves() {
229 var start, end;
230 var totalTimes = [];
231 var totalMS = 0;
232
233 for (var idx = 0; idx < arraySize; idx++) {
234 //var customidx = Math.floor(Math.random() * arraySize) + 1;
235
236 start = process.hrtime();
237 var result = samplecoll.findOne({
238 'customId': idx
239 });
240 // remove document now (outside timing routine) to force index rebuild
241 samplecoll.remove(result);
242
243 end = process.hrtime(start);
244 totalTimes.push(end);
245 }
246
247 for (var idx = 0; idx < totalTimes.length; idx++) {
248 totalMS += totalTimes[idx][0] * 1e3 + totalTimes[idx][1] / 1e6;
249 }
250
251 totalMS = totalMS.toFixed(2);
252 var rate = arraySize * 1000 / totalMS;
253 rate = rate.toFixed(2);
254 console.log("interlaced removes + coll.find() : " + totalMS + "ms (" + rate + " ops/s) " + arraySize + " iterations");
255}
256
257// Find Interlaced Updates -> same as now except mix up customId val (increment by 10000?)
258function perfFindInterlacesUpdates() {
259 var start, end;
260 var totalTimes = [];
261 var totalMS = 0;
262
263 for (var idx = 0; idx < arraySize; idx++) {
264 //var customidx = Math.floor(Math.random() * arraySize) + 1;
265
266 start = process.hrtime();
267 var results = samplecoll.find({
268 'customId': idx
269 });
270 // just call update on document to force index rebuild
271 samplecoll.update(results[0]);
272
273 end = process.hrtime(start);
274 totalTimes.push(end);
275 }
276
277 for (var idx = 0; idx < totalTimes.length; idx++) {
278 totalMS += totalTimes[idx][0] * 1e3 + totalTimes[idx][1] / 1e6;
279 }
280
281 totalMS = totalMS.toFixed(2);
282 var rate = arraySize * 1000 / totalMS;
283 rate = rate.toFixed(2);
284 console.log("interlaced updates + coll.find() : " + totalMS + "ms (" + rate + " ops/s) " + arraySize + " iterations");
285}
286
287console.log("");
288console.log("Perf: Unindexed Inserts---------------------");
289createDatabaseUnindexed();
290initializeDatabase(false, 100000);
291
292createDatabaseUnindexed();
293initializeDatabaseBatch(false, 100000);
294
295console.log("");
296console.log("Perf: Indexed Inserts (Lazy) ------------------------");
297createDatabaseIndexed();
298initializeDatabase(false, 100000);
299
300createDatabaseIndexed();
301initializeDatabaseBatch(false, 100000);
302
303console.log("");
304console.log("Perf: Indexed Inserts (Adaptive) ------------------------");
305createDatabaseIndexed(true);
306initializeDatabase(false, 100000);
307
308createDatabaseIndexed(true);
309initializeDatabaseBatch(false, 100000);
310
311console.log("");
312console.log("Perf: Unindexed finds ------------------------");
313
314createDatabaseUnindexed();
315initializeDatabase(true, 10000);
316perfFind();
317
318createDatabaseUnindexed();
319initializeDatabase(true, 10000);
320perfFindInterlacedInserts();
321
322createDatabaseUnindexed();
323initializeDatabase(true, 10000);
324perfFindInterlacedRemoves();
325
326createDatabaseUnindexed();
327initializeDatabase(true, 10000);
328perfFindInterlacesUpdates();
329
330console.log("");
331console.log("Perf: Indexed finds (Nightmare Lazy Index Thrashing Test) ------");
332
333createDatabaseIndexed(false);
334initializeDatabase(true, 80000);
335perfFind();
336
337createDatabaseIndexed(false);
338initializeDatabase(true, 3000);
339perfFindInterlacedInserts(1);
340
341createDatabaseIndexed(false);
342initializeDatabase(true, 3000);
343perfFindInterlacedRemoves();
344
345createDatabaseIndexed(false);
346initializeDatabase(true, 3000);
347perfFindInterlacesUpdates();
348
349console.log("");
350console.log("Perf: Indexed finds (Nightmare Adaptive Index Thrashing Test) ---");
351
352createDatabaseIndexed(true);
353initializeDatabase(true, 80000);
354perfFind();
355
356createDatabaseIndexed(true);
357initializeDatabase(true, 10000);
358perfFindInterlacedInserts(1);
359
360createDatabaseIndexed(true);
361initializeDatabase(true, 10000);
362perfFindInterlacedRemoves();
363
364createDatabaseIndexed(true);
365initializeDatabase(true, 10000);
366perfFindInterlacesUpdates();
\No newline at end of file