UNPKG

155 kBJavaScriptView Raw
1// PouchDB indexeddb plugin 9.0.0
2(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(_dereq_,module,exports){
3// Copyright Joyent, Inc. and other Node contributors.
4//
5// Permission is hereby granted, free of charge, to any person obtaining a
6// copy of this software and associated documentation files (the
7// "Software"), to deal in the Software without restriction, including
8// without limitation the rights to use, copy, modify, merge, publish,
9// distribute, sublicense, and/or sell copies of the Software, and to permit
10// persons to whom the Software is furnished to do so, subject to the
11// following conditions:
12//
13// The above copyright notice and this permission notice shall be included
14// in all copies or substantial portions of the Software.
15//
16// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
19// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
20// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
21// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
22// USE OR OTHER DEALINGS IN THE SOFTWARE.
23
24var objectCreate = Object.create || objectCreatePolyfill
25var objectKeys = Object.keys || objectKeysPolyfill
26var bind = Function.prototype.bind || functionBindPolyfill
27
28function EventEmitter() {
29 if (!this._events || !Object.prototype.hasOwnProperty.call(this, '_events')) {
30 this._events = objectCreate(null);
31 this._eventsCount = 0;
32 }
33
34 this._maxListeners = this._maxListeners || undefined;
35}
36module.exports = EventEmitter;
37
38// Backwards-compat with node 0.10.x
39EventEmitter.EventEmitter = EventEmitter;
40
41EventEmitter.prototype._events = undefined;
42EventEmitter.prototype._maxListeners = undefined;
43
44// By default EventEmitters will print a warning if more than 10 listeners are
45// added to it. This is a useful default which helps finding memory leaks.
46var defaultMaxListeners = 10;
47
48var hasDefineProperty;
49try {
50 var o = {};
51 if (Object.defineProperty) Object.defineProperty(o, 'x', { value: 0 });
52 hasDefineProperty = o.x === 0;
53} catch (err) { hasDefineProperty = false }
54if (hasDefineProperty) {
55 Object.defineProperty(EventEmitter, 'defaultMaxListeners', {
56 enumerable: true,
57 get: function() {
58 return defaultMaxListeners;
59 },
60 set: function(arg) {
61 // check whether the input is a positive number (whose value is zero or
62 // greater and not a NaN).
63 if (typeof arg !== 'number' || arg < 0 || arg !== arg)
64 throw new TypeError('"defaultMaxListeners" must be a positive number');
65 defaultMaxListeners = arg;
66 }
67 });
68} else {
69 EventEmitter.defaultMaxListeners = defaultMaxListeners;
70}
71
72// Obviously not all Emitters should be limited to 10. This function allows
73// that to be increased. Set to zero for unlimited.
74EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) {
75 if (typeof n !== 'number' || n < 0 || isNaN(n))
76 throw new TypeError('"n" argument must be a positive number');
77 this._maxListeners = n;
78 return this;
79};
80
81function $getMaxListeners(that) {
82 if (that._maxListeners === undefined)
83 return EventEmitter.defaultMaxListeners;
84 return that._maxListeners;
85}
86
87EventEmitter.prototype.getMaxListeners = function getMaxListeners() {
88 return $getMaxListeners(this);
89};
90
91// These standalone emit* functions are used to optimize calling of event
92// handlers for fast cases because emit() itself often has a variable number of
93// arguments and can be deoptimized because of that. These functions always have
94// the same number of arguments and thus do not get deoptimized, so the code
95// inside them can execute faster.
96function emitNone(handler, isFn, self) {
97 if (isFn)
98 handler.call(self);
99 else {
100 var len = handler.length;
101 var listeners = arrayClone(handler, len);
102 for (var i = 0; i < len; ++i)
103 listeners[i].call(self);
104 }
105}
106function emitOne(handler, isFn, self, arg1) {
107 if (isFn)
108 handler.call(self, arg1);
109 else {
110 var len = handler.length;
111 var listeners = arrayClone(handler, len);
112 for (var i = 0; i < len; ++i)
113 listeners[i].call(self, arg1);
114 }
115}
116function emitTwo(handler, isFn, self, arg1, arg2) {
117 if (isFn)
118 handler.call(self, arg1, arg2);
119 else {
120 var len = handler.length;
121 var listeners = arrayClone(handler, len);
122 for (var i = 0; i < len; ++i)
123 listeners[i].call(self, arg1, arg2);
124 }
125}
126function emitThree(handler, isFn, self, arg1, arg2, arg3) {
127 if (isFn)
128 handler.call(self, arg1, arg2, arg3);
129 else {
130 var len = handler.length;
131 var listeners = arrayClone(handler, len);
132 for (var i = 0; i < len; ++i)
133 listeners[i].call(self, arg1, arg2, arg3);
134 }
135}
136
137function emitMany(handler, isFn, self, args) {
138 if (isFn)
139 handler.apply(self, args);
140 else {
141 var len = handler.length;
142 var listeners = arrayClone(handler, len);
143 for (var i = 0; i < len; ++i)
144 listeners[i].apply(self, args);
145 }
146}
147
148EventEmitter.prototype.emit = function emit(type) {
149 var er, handler, len, args, i, events;
150 var doError = (type === 'error');
151
152 events = this._events;
153 if (events)
154 doError = (doError && events.error == null);
155 else if (!doError)
156 return false;
157
158 // If there is no 'error' event listener then throw.
159 if (doError) {
160 if (arguments.length > 1)
161 er = arguments[1];
162 if (er instanceof Error) {
163 throw er; // Unhandled 'error' event
164 } else {
165 // At least give some kind of context to the user
166 var err = new Error('Unhandled "error" event. (' + er + ')');
167 err.context = er;
168 throw err;
169 }
170 return false;
171 }
172
173 handler = events[type];
174
175 if (!handler)
176 return false;
177
178 var isFn = typeof handler === 'function';
179 len = arguments.length;
180 switch (len) {
181 // fast cases
182 case 1:
183 emitNone(handler, isFn, this);
184 break;
185 case 2:
186 emitOne(handler, isFn, this, arguments[1]);
187 break;
188 case 3:
189 emitTwo(handler, isFn, this, arguments[1], arguments[2]);
190 break;
191 case 4:
192 emitThree(handler, isFn, this, arguments[1], arguments[2], arguments[3]);
193 break;
194 // slower
195 default:
196 args = new Array(len - 1);
197 for (i = 1; i < len; i++)
198 args[i - 1] = arguments[i];
199 emitMany(handler, isFn, this, args);
200 }
201
202 return true;
203};
204
205function _addListener(target, type, listener, prepend) {
206 var m;
207 var events;
208 var existing;
209
210 if (typeof listener !== 'function')
211 throw new TypeError('"listener" argument must be a function');
212
213 events = target._events;
214 if (!events) {
215 events = target._events = objectCreate(null);
216 target._eventsCount = 0;
217 } else {
218 // To avoid recursion in the case that type === "newListener"! Before
219 // adding it to the listeners, first emit "newListener".
220 if (events.newListener) {
221 target.emit('newListener', type,
222 listener.listener ? listener.listener : listener);
223
224 // Re-assign `events` because a newListener handler could have caused the
225 // this._events to be assigned to a new object
226 events = target._events;
227 }
228 existing = events[type];
229 }
230
231 if (!existing) {
232 // Optimize the case of one listener. Don't need the extra array object.
233 existing = events[type] = listener;
234 ++target._eventsCount;
235 } else {
236 if (typeof existing === 'function') {
237 // Adding the second element, need to change to array.
238 existing = events[type] =
239 prepend ? [listener, existing] : [existing, listener];
240 } else {
241 // If we've already got an array, just append.
242 if (prepend) {
243 existing.unshift(listener);
244 } else {
245 existing.push(listener);
246 }
247 }
248
249 // Check for listener leak
250 if (!existing.warned) {
251 m = $getMaxListeners(target);
252 if (m && m > 0 && existing.length > m) {
253 existing.warned = true;
254 var w = new Error('Possible EventEmitter memory leak detected. ' +
255 existing.length + ' "' + String(type) + '" listeners ' +
256 'added. Use emitter.setMaxListeners() to ' +
257 'increase limit.');
258 w.name = 'MaxListenersExceededWarning';
259 w.emitter = target;
260 w.type = type;
261 w.count = existing.length;
262 if (typeof console === 'object' && console.warn) {
263 console.warn('%s: %s', w.name, w.message);
264 }
265 }
266 }
267 }
268
269 return target;
270}
271
272EventEmitter.prototype.addListener = function addListener(type, listener) {
273 return _addListener(this, type, listener, false);
274};
275
276EventEmitter.prototype.on = EventEmitter.prototype.addListener;
277
278EventEmitter.prototype.prependListener =
279 function prependListener(type, listener) {
280 return _addListener(this, type, listener, true);
281 };
282
283function onceWrapper() {
284 if (!this.fired) {
285 this.target.removeListener(this.type, this.wrapFn);
286 this.fired = true;
287 switch (arguments.length) {
288 case 0:
289 return this.listener.call(this.target);
290 case 1:
291 return this.listener.call(this.target, arguments[0]);
292 case 2:
293 return this.listener.call(this.target, arguments[0], arguments[1]);
294 case 3:
295 return this.listener.call(this.target, arguments[0], arguments[1],
296 arguments[2]);
297 default:
298 var args = new Array(arguments.length);
299 for (var i = 0; i < args.length; ++i)
300 args[i] = arguments[i];
301 this.listener.apply(this.target, args);
302 }
303 }
304}
305
306function _onceWrap(target, type, listener) {
307 var state = { fired: false, wrapFn: undefined, target: target, type: type, listener: listener };
308 var wrapped = bind.call(onceWrapper, state);
309 wrapped.listener = listener;
310 state.wrapFn = wrapped;
311 return wrapped;
312}
313
314EventEmitter.prototype.once = function once(type, listener) {
315 if (typeof listener !== 'function')
316 throw new TypeError('"listener" argument must be a function');
317 this.on(type, _onceWrap(this, type, listener));
318 return this;
319};
320
321EventEmitter.prototype.prependOnceListener =
322 function prependOnceListener(type, listener) {
323 if (typeof listener !== 'function')
324 throw new TypeError('"listener" argument must be a function');
325 this.prependListener(type, _onceWrap(this, type, listener));
326 return this;
327 };
328
329// Emits a 'removeListener' event if and only if the listener was removed.
330EventEmitter.prototype.removeListener =
331 function removeListener(type, listener) {
332 var list, events, position, i, originalListener;
333
334 if (typeof listener !== 'function')
335 throw new TypeError('"listener" argument must be a function');
336
337 events = this._events;
338 if (!events)
339 return this;
340
341 list = events[type];
342 if (!list)
343 return this;
344
345 if (list === listener || list.listener === listener) {
346 if (--this._eventsCount === 0)
347 this._events = objectCreate(null);
348 else {
349 delete events[type];
350 if (events.removeListener)
351 this.emit('removeListener', type, list.listener || listener);
352 }
353 } else if (typeof list !== 'function') {
354 position = -1;
355
356 for (i = list.length - 1; i >= 0; i--) {
357 if (list[i] === listener || list[i].listener === listener) {
358 originalListener = list[i].listener;
359 position = i;
360 break;
361 }
362 }
363
364 if (position < 0)
365 return this;
366
367 if (position === 0)
368 list.shift();
369 else
370 spliceOne(list, position);
371
372 if (list.length === 1)
373 events[type] = list[0];
374
375 if (events.removeListener)
376 this.emit('removeListener', type, originalListener || listener);
377 }
378
379 return this;
380 };
381
382EventEmitter.prototype.removeAllListeners =
383 function removeAllListeners(type) {
384 var listeners, events, i;
385
386 events = this._events;
387 if (!events)
388 return this;
389
390 // not listening for removeListener, no need to emit
391 if (!events.removeListener) {
392 if (arguments.length === 0) {
393 this._events = objectCreate(null);
394 this._eventsCount = 0;
395 } else if (events[type]) {
396 if (--this._eventsCount === 0)
397 this._events = objectCreate(null);
398 else
399 delete events[type];
400 }
401 return this;
402 }
403
404 // emit removeListener for all listeners on all events
405 if (arguments.length === 0) {
406 var keys = objectKeys(events);
407 var key;
408 for (i = 0; i < keys.length; ++i) {
409 key = keys[i];
410 if (key === 'removeListener') continue;
411 this.removeAllListeners(key);
412 }
413 this.removeAllListeners('removeListener');
414 this._events = objectCreate(null);
415 this._eventsCount = 0;
416 return this;
417 }
418
419 listeners = events[type];
420
421 if (typeof listeners === 'function') {
422 this.removeListener(type, listeners);
423 } else if (listeners) {
424 // LIFO order
425 for (i = listeners.length - 1; i >= 0; i--) {
426 this.removeListener(type, listeners[i]);
427 }
428 }
429
430 return this;
431 };
432
433function _listeners(target, type, unwrap) {
434 var events = target._events;
435
436 if (!events)
437 return [];
438
439 var evlistener = events[type];
440 if (!evlistener)
441 return [];
442
443 if (typeof evlistener === 'function')
444 return unwrap ? [evlistener.listener || evlistener] : [evlistener];
445
446 return unwrap ? unwrapListeners(evlistener) : arrayClone(evlistener, evlistener.length);
447}
448
449EventEmitter.prototype.listeners = function listeners(type) {
450 return _listeners(this, type, true);
451};
452
453EventEmitter.prototype.rawListeners = function rawListeners(type) {
454 return _listeners(this, type, false);
455};
456
457EventEmitter.listenerCount = function(emitter, type) {
458 if (typeof emitter.listenerCount === 'function') {
459 return emitter.listenerCount(type);
460 } else {
461 return listenerCount.call(emitter, type);
462 }
463};
464
465EventEmitter.prototype.listenerCount = listenerCount;
466function listenerCount(type) {
467 var events = this._events;
468
469 if (events) {
470 var evlistener = events[type];
471
472 if (typeof evlistener === 'function') {
473 return 1;
474 } else if (evlistener) {
475 return evlistener.length;
476 }
477 }
478
479 return 0;
480}
481
482EventEmitter.prototype.eventNames = function eventNames() {
483 return this._eventsCount > 0 ? Reflect.ownKeys(this._events) : [];
484};
485
486// About 1.5x faster than the two-arg version of Array#splice().
487function spliceOne(list, index) {
488 for (var i = index, k = i + 1, n = list.length; k < n; i += 1, k += 1)
489 list[i] = list[k];
490 list.pop();
491}
492
493function arrayClone(arr, n) {
494 var copy = new Array(n);
495 for (var i = 0; i < n; ++i)
496 copy[i] = arr[i];
497 return copy;
498}
499
500function unwrapListeners(arr) {
501 var ret = new Array(arr.length);
502 for (var i = 0; i < ret.length; ++i) {
503 ret[i] = arr[i].listener || arr[i];
504 }
505 return ret;
506}
507
508function objectCreatePolyfill(proto) {
509 var F = function() {};
510 F.prototype = proto;
511 return new F;
512}
513function objectKeysPolyfill(obj) {
514 var keys = [];
515 for (var k in obj) if (Object.prototype.hasOwnProperty.call(obj, k)) {
516 keys.push(k);
517 }
518 return k;
519}
520function functionBindPolyfill(context) {
521 var fn = this;
522 return function () {
523 return fn.apply(context, arguments);
524 };
525}
526
527},{}],2:[function(_dereq_,module,exports){
528(function (factory) {
529 if (typeof exports === 'object') {
530 // Node/CommonJS
531 module.exports = factory();
532 } else if (typeof define === 'function' && define.amd) {
533 // AMD
534 define(factory);
535 } else {
536 // Browser globals (with support for web workers)
537 var glob;
538
539 try {
540 glob = window;
541 } catch (e) {
542 glob = self;
543 }
544
545 glob.SparkMD5 = factory();
546 }
547}(function (undefined) {
548
549 'use strict';
550
551 /*
552 * Fastest md5 implementation around (JKM md5).
553 * Credits: Joseph Myers
554 *
555 * @see http://www.myersdaily.org/joseph/javascript/md5-text.html
556 * @see http://jsperf.com/md5-shootout/7
557 */
558
559 /* this function is much faster,
560 so if possible we use it. Some IEs
561 are the only ones I know of that
562 need the idiotic second function,
563 generated by an if clause. */
564 var add32 = function (a, b) {
565 return (a + b) & 0xFFFFFFFF;
566 },
567 hex_chr = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'];
568
569
570 function cmn(q, a, b, x, s, t) {
571 a = add32(add32(a, q), add32(x, t));
572 return add32((a << s) | (a >>> (32 - s)), b);
573 }
574
575 function md5cycle(x, k) {
576 var a = x[0],
577 b = x[1],
578 c = x[2],
579 d = x[3];
580
581 a += (b & c | ~b & d) + k[0] - 680876936 | 0;
582 a = (a << 7 | a >>> 25) + b | 0;
583 d += (a & b | ~a & c) + k[1] - 389564586 | 0;
584 d = (d << 12 | d >>> 20) + a | 0;
585 c += (d & a | ~d & b) + k[2] + 606105819 | 0;
586 c = (c << 17 | c >>> 15) + d | 0;
587 b += (c & d | ~c & a) + k[3] - 1044525330 | 0;
588 b = (b << 22 | b >>> 10) + c | 0;
589 a += (b & c | ~b & d) + k[4] - 176418897 | 0;
590 a = (a << 7 | a >>> 25) + b | 0;
591 d += (a & b | ~a & c) + k[5] + 1200080426 | 0;
592 d = (d << 12 | d >>> 20) + a | 0;
593 c += (d & a | ~d & b) + k[6] - 1473231341 | 0;
594 c = (c << 17 | c >>> 15) + d | 0;
595 b += (c & d | ~c & a) + k[7] - 45705983 | 0;
596 b = (b << 22 | b >>> 10) + c | 0;
597 a += (b & c | ~b & d) + k[8] + 1770035416 | 0;
598 a = (a << 7 | a >>> 25) + b | 0;
599 d += (a & b | ~a & c) + k[9] - 1958414417 | 0;
600 d = (d << 12 | d >>> 20) + a | 0;
601 c += (d & a | ~d & b) + k[10] - 42063 | 0;
602 c = (c << 17 | c >>> 15) + d | 0;
603 b += (c & d | ~c & a) + k[11] - 1990404162 | 0;
604 b = (b << 22 | b >>> 10) + c | 0;
605 a += (b & c | ~b & d) + k[12] + 1804603682 | 0;
606 a = (a << 7 | a >>> 25) + b | 0;
607 d += (a & b | ~a & c) + k[13] - 40341101 | 0;
608 d = (d << 12 | d >>> 20) + a | 0;
609 c += (d & a | ~d & b) + k[14] - 1502002290 | 0;
610 c = (c << 17 | c >>> 15) + d | 0;
611 b += (c & d | ~c & a) + k[15] + 1236535329 | 0;
612 b = (b << 22 | b >>> 10) + c | 0;
613
614 a += (b & d | c & ~d) + k[1] - 165796510 | 0;
615 a = (a << 5 | a >>> 27) + b | 0;
616 d += (a & c | b & ~c) + k[6] - 1069501632 | 0;
617 d = (d << 9 | d >>> 23) + a | 0;
618 c += (d & b | a & ~b) + k[11] + 643717713 | 0;
619 c = (c << 14 | c >>> 18) + d | 0;
620 b += (c & a | d & ~a) + k[0] - 373897302 | 0;
621 b = (b << 20 | b >>> 12) + c | 0;
622 a += (b & d | c & ~d) + k[5] - 701558691 | 0;
623 a = (a << 5 | a >>> 27) + b | 0;
624 d += (a & c | b & ~c) + k[10] + 38016083 | 0;
625 d = (d << 9 | d >>> 23) + a | 0;
626 c += (d & b | a & ~b) + k[15] - 660478335 | 0;
627 c = (c << 14 | c >>> 18) + d | 0;
628 b += (c & a | d & ~a) + k[4] - 405537848 | 0;
629 b = (b << 20 | b >>> 12) + c | 0;
630 a += (b & d | c & ~d) + k[9] + 568446438 | 0;
631 a = (a << 5 | a >>> 27) + b | 0;
632 d += (a & c | b & ~c) + k[14] - 1019803690 | 0;
633 d = (d << 9 | d >>> 23) + a | 0;
634 c += (d & b | a & ~b) + k[3] - 187363961 | 0;
635 c = (c << 14 | c >>> 18) + d | 0;
636 b += (c & a | d & ~a) + k[8] + 1163531501 | 0;
637 b = (b << 20 | b >>> 12) + c | 0;
638 a += (b & d | c & ~d) + k[13] - 1444681467 | 0;
639 a = (a << 5 | a >>> 27) + b | 0;
640 d += (a & c | b & ~c) + k[2] - 51403784 | 0;
641 d = (d << 9 | d >>> 23) + a | 0;
642 c += (d & b | a & ~b) + k[7] + 1735328473 | 0;
643 c = (c << 14 | c >>> 18) + d | 0;
644 b += (c & a | d & ~a) + k[12] - 1926607734 | 0;
645 b = (b << 20 | b >>> 12) + c | 0;
646
647 a += (b ^ c ^ d) + k[5] - 378558 | 0;
648 a = (a << 4 | a >>> 28) + b | 0;
649 d += (a ^ b ^ c) + k[8] - 2022574463 | 0;
650 d = (d << 11 | d >>> 21) + a | 0;
651 c += (d ^ a ^ b) + k[11] + 1839030562 | 0;
652 c = (c << 16 | c >>> 16) + d | 0;
653 b += (c ^ d ^ a) + k[14] - 35309556 | 0;
654 b = (b << 23 | b >>> 9) + c | 0;
655 a += (b ^ c ^ d) + k[1] - 1530992060 | 0;
656 a = (a << 4 | a >>> 28) + b | 0;
657 d += (a ^ b ^ c) + k[4] + 1272893353 | 0;
658 d = (d << 11 | d >>> 21) + a | 0;
659 c += (d ^ a ^ b) + k[7] - 155497632 | 0;
660 c = (c << 16 | c >>> 16) + d | 0;
661 b += (c ^ d ^ a) + k[10] - 1094730640 | 0;
662 b = (b << 23 | b >>> 9) + c | 0;
663 a += (b ^ c ^ d) + k[13] + 681279174 | 0;
664 a = (a << 4 | a >>> 28) + b | 0;
665 d += (a ^ b ^ c) + k[0] - 358537222 | 0;
666 d = (d << 11 | d >>> 21) + a | 0;
667 c += (d ^ a ^ b) + k[3] - 722521979 | 0;
668 c = (c << 16 | c >>> 16) + d | 0;
669 b += (c ^ d ^ a) + k[6] + 76029189 | 0;
670 b = (b << 23 | b >>> 9) + c | 0;
671 a += (b ^ c ^ d) + k[9] - 640364487 | 0;
672 a = (a << 4 | a >>> 28) + b | 0;
673 d += (a ^ b ^ c) + k[12] - 421815835 | 0;
674 d = (d << 11 | d >>> 21) + a | 0;
675 c += (d ^ a ^ b) + k[15] + 530742520 | 0;
676 c = (c << 16 | c >>> 16) + d | 0;
677 b += (c ^ d ^ a) + k[2] - 995338651 | 0;
678 b = (b << 23 | b >>> 9) + c | 0;
679
680 a += (c ^ (b | ~d)) + k[0] - 198630844 | 0;
681 a = (a << 6 | a >>> 26) + b | 0;
682 d += (b ^ (a | ~c)) + k[7] + 1126891415 | 0;
683 d = (d << 10 | d >>> 22) + a | 0;
684 c += (a ^ (d | ~b)) + k[14] - 1416354905 | 0;
685 c = (c << 15 | c >>> 17) + d | 0;
686 b += (d ^ (c | ~a)) + k[5] - 57434055 | 0;
687 b = (b << 21 |b >>> 11) + c | 0;
688 a += (c ^ (b | ~d)) + k[12] + 1700485571 | 0;
689 a = (a << 6 | a >>> 26) + b | 0;
690 d += (b ^ (a | ~c)) + k[3] - 1894986606 | 0;
691 d = (d << 10 | d >>> 22) + a | 0;
692 c += (a ^ (d | ~b)) + k[10] - 1051523 | 0;
693 c = (c << 15 | c >>> 17) + d | 0;
694 b += (d ^ (c | ~a)) + k[1] - 2054922799 | 0;
695 b = (b << 21 |b >>> 11) + c | 0;
696 a += (c ^ (b | ~d)) + k[8] + 1873313359 | 0;
697 a = (a << 6 | a >>> 26) + b | 0;
698 d += (b ^ (a | ~c)) + k[15] - 30611744 | 0;
699 d = (d << 10 | d >>> 22) + a | 0;
700 c += (a ^ (d | ~b)) + k[6] - 1560198380 | 0;
701 c = (c << 15 | c >>> 17) + d | 0;
702 b += (d ^ (c | ~a)) + k[13] + 1309151649 | 0;
703 b = (b << 21 |b >>> 11) + c | 0;
704 a += (c ^ (b | ~d)) + k[4] - 145523070 | 0;
705 a = (a << 6 | a >>> 26) + b | 0;
706 d += (b ^ (a | ~c)) + k[11] - 1120210379 | 0;
707 d = (d << 10 | d >>> 22) + a | 0;
708 c += (a ^ (d | ~b)) + k[2] + 718787259 | 0;
709 c = (c << 15 | c >>> 17) + d | 0;
710 b += (d ^ (c | ~a)) + k[9] - 343485551 | 0;
711 b = (b << 21 | b >>> 11) + c | 0;
712
713 x[0] = a + x[0] | 0;
714 x[1] = b + x[1] | 0;
715 x[2] = c + x[2] | 0;
716 x[3] = d + x[3] | 0;
717 }
718
719 function md5blk(s) {
720 var md5blks = [],
721 i; /* Andy King said do it this way. */
722
723 for (i = 0; i < 64; i += 4) {
724 md5blks[i >> 2] = s.charCodeAt(i) + (s.charCodeAt(i + 1) << 8) + (s.charCodeAt(i + 2) << 16) + (s.charCodeAt(i + 3) << 24);
725 }
726 return md5blks;
727 }
728
729 function md5blk_array(a) {
730 var md5blks = [],
731 i; /* Andy King said do it this way. */
732
733 for (i = 0; i < 64; i += 4) {
734 md5blks[i >> 2] = a[i] + (a[i + 1] << 8) + (a[i + 2] << 16) + (a[i + 3] << 24);
735 }
736 return md5blks;
737 }
738
739 function md51(s) {
740 var n = s.length,
741 state = [1732584193, -271733879, -1732584194, 271733878],
742 i,
743 length,
744 tail,
745 tmp,
746 lo,
747 hi;
748
749 for (i = 64; i <= n; i += 64) {
750 md5cycle(state, md5blk(s.substring(i - 64, i)));
751 }
752 s = s.substring(i - 64);
753 length = s.length;
754 tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
755 for (i = 0; i < length; i += 1) {
756 tail[i >> 2] |= s.charCodeAt(i) << ((i % 4) << 3);
757 }
758 tail[i >> 2] |= 0x80 << ((i % 4) << 3);
759 if (i > 55) {
760 md5cycle(state, tail);
761 for (i = 0; i < 16; i += 1) {
762 tail[i] = 0;
763 }
764 }
765
766 // Beware that the final length might not fit in 32 bits so we take care of that
767 tmp = n * 8;
768 tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
769 lo = parseInt(tmp[2], 16);
770 hi = parseInt(tmp[1], 16) || 0;
771
772 tail[14] = lo;
773 tail[15] = hi;
774
775 md5cycle(state, tail);
776 return state;
777 }
778
779 function md51_array(a) {
780 var n = a.length,
781 state = [1732584193, -271733879, -1732584194, 271733878],
782 i,
783 length,
784 tail,
785 tmp,
786 lo,
787 hi;
788
789 for (i = 64; i <= n; i += 64) {
790 md5cycle(state, md5blk_array(a.subarray(i - 64, i)));
791 }
792
793 // Not sure if it is a bug, however IE10 will always produce a sub array of length 1
794 // containing the last element of the parent array if the sub array specified starts
795 // beyond the length of the parent array - weird.
796 // https://connect.microsoft.com/IE/feedback/details/771452/typed-array-subarray-issue
797 a = (i - 64) < n ? a.subarray(i - 64) : new Uint8Array(0);
798
799 length = a.length;
800 tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
801 for (i = 0; i < length; i += 1) {
802 tail[i >> 2] |= a[i] << ((i % 4) << 3);
803 }
804
805 tail[i >> 2] |= 0x80 << ((i % 4) << 3);
806 if (i > 55) {
807 md5cycle(state, tail);
808 for (i = 0; i < 16; i += 1) {
809 tail[i] = 0;
810 }
811 }
812
813 // Beware that the final length might not fit in 32 bits so we take care of that
814 tmp = n * 8;
815 tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
816 lo = parseInt(tmp[2], 16);
817 hi = parseInt(tmp[1], 16) || 0;
818
819 tail[14] = lo;
820 tail[15] = hi;
821
822 md5cycle(state, tail);
823
824 return state;
825 }
826
827 function rhex(n) {
828 var s = '',
829 j;
830 for (j = 0; j < 4; j += 1) {
831 s += hex_chr[(n >> (j * 8 + 4)) & 0x0F] + hex_chr[(n >> (j * 8)) & 0x0F];
832 }
833 return s;
834 }
835
836 function hex(x) {
837 var i;
838 for (i = 0; i < x.length; i += 1) {
839 x[i] = rhex(x[i]);
840 }
841 return x.join('');
842 }
843
844 // In some cases the fast add32 function cannot be used..
845 if (hex(md51('hello')) !== '5d41402abc4b2a76b9719d911017c592') {
846 add32 = function (x, y) {
847 var lsw = (x & 0xFFFF) + (y & 0xFFFF),
848 msw = (x >> 16) + (y >> 16) + (lsw >> 16);
849 return (msw << 16) | (lsw & 0xFFFF);
850 };
851 }
852
853 // ---------------------------------------------------
854
855 /**
856 * ArrayBuffer slice polyfill.
857 *
858 * @see https://github.com/ttaubert/node-arraybuffer-slice
859 */
860
861 if (typeof ArrayBuffer !== 'undefined' && !ArrayBuffer.prototype.slice) {
862 (function () {
863 function clamp(val, length) {
864 val = (val | 0) || 0;
865
866 if (val < 0) {
867 return Math.max(val + length, 0);
868 }
869
870 return Math.min(val, length);
871 }
872
873 ArrayBuffer.prototype.slice = function (from, to) {
874 var length = this.byteLength,
875 begin = clamp(from, length),
876 end = length,
877 num,
878 target,
879 targetArray,
880 sourceArray;
881
882 if (to !== undefined) {
883 end = clamp(to, length);
884 }
885
886 if (begin > end) {
887 return new ArrayBuffer(0);
888 }
889
890 num = end - begin;
891 target = new ArrayBuffer(num);
892 targetArray = new Uint8Array(target);
893
894 sourceArray = new Uint8Array(this, begin, num);
895 targetArray.set(sourceArray);
896
897 return target;
898 };
899 })();
900 }
901
902 // ---------------------------------------------------
903
904 /**
905 * Helpers.
906 */
907
908 function toUtf8(str) {
909 if (/[\u0080-\uFFFF]/.test(str)) {
910 str = unescape(encodeURIComponent(str));
911 }
912
913 return str;
914 }
915
916 function utf8Str2ArrayBuffer(str, returnUInt8Array) {
917 var length = str.length,
918 buff = new ArrayBuffer(length),
919 arr = new Uint8Array(buff),
920 i;
921
922 for (i = 0; i < length; i += 1) {
923 arr[i] = str.charCodeAt(i);
924 }
925
926 return returnUInt8Array ? arr : buff;
927 }
928
929 function arrayBuffer2Utf8Str(buff) {
930 return String.fromCharCode.apply(null, new Uint8Array(buff));
931 }
932
933 function concatenateArrayBuffers(first, second, returnUInt8Array) {
934 var result = new Uint8Array(first.byteLength + second.byteLength);
935
936 result.set(new Uint8Array(first));
937 result.set(new Uint8Array(second), first.byteLength);
938
939 return returnUInt8Array ? result : result.buffer;
940 }
941
942 function hexToBinaryString(hex) {
943 var bytes = [],
944 length = hex.length,
945 x;
946
947 for (x = 0; x < length - 1; x += 2) {
948 bytes.push(parseInt(hex.substr(x, 2), 16));
949 }
950
951 return String.fromCharCode.apply(String, bytes);
952 }
953
954 // ---------------------------------------------------
955
956 /**
957 * SparkMD5 OOP implementation.
958 *
959 * Use this class to perform an incremental md5, otherwise use the
960 * static methods instead.
961 */
962
963 function SparkMD5() {
964 // call reset to init the instance
965 this.reset();
966 }
967
968 /**
969 * Appends a string.
970 * A conversion will be applied if an utf8 string is detected.
971 *
972 * @param {String} str The string to be appended
973 *
974 * @return {SparkMD5} The instance itself
975 */
976 SparkMD5.prototype.append = function (str) {
977 // Converts the string to utf8 bytes if necessary
978 // Then append as binary
979 this.appendBinary(toUtf8(str));
980
981 return this;
982 };
983
984 /**
985 * Appends a binary string.
986 *
987 * @param {String} contents The binary string to be appended
988 *
989 * @return {SparkMD5} The instance itself
990 */
991 SparkMD5.prototype.appendBinary = function (contents) {
992 this._buff += contents;
993 this._length += contents.length;
994
995 var length = this._buff.length,
996 i;
997
998 for (i = 64; i <= length; i += 64) {
999 md5cycle(this._hash, md5blk(this._buff.substring(i - 64, i)));
1000 }
1001
1002 this._buff = this._buff.substring(i - 64);
1003
1004 return this;
1005 };
1006
1007 /**
1008 * Finishes the incremental computation, reseting the internal state and
1009 * returning the result.
1010 *
1011 * @param {Boolean} raw True to get the raw string, false to get the hex string
1012 *
1013 * @return {String} The result
1014 */
1015 SparkMD5.prototype.end = function (raw) {
1016 var buff = this._buff,
1017 length = buff.length,
1018 i,
1019 tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
1020 ret;
1021
1022 for (i = 0; i < length; i += 1) {
1023 tail[i >> 2] |= buff.charCodeAt(i) << ((i % 4) << 3);
1024 }
1025
1026 this._finish(tail, length);
1027 ret = hex(this._hash);
1028
1029 if (raw) {
1030 ret = hexToBinaryString(ret);
1031 }
1032
1033 this.reset();
1034
1035 return ret;
1036 };
1037
1038 /**
1039 * Resets the internal state of the computation.
1040 *
1041 * @return {SparkMD5} The instance itself
1042 */
1043 SparkMD5.prototype.reset = function () {
1044 this._buff = '';
1045 this._length = 0;
1046 this._hash = [1732584193, -271733879, -1732584194, 271733878];
1047
1048 return this;
1049 };
1050
1051 /**
1052 * Gets the internal state of the computation.
1053 *
1054 * @return {Object} The state
1055 */
1056 SparkMD5.prototype.getState = function () {
1057 return {
1058 buff: this._buff,
1059 length: this._length,
1060 hash: this._hash.slice()
1061 };
1062 };
1063
1064 /**
1065 * Gets the internal state of the computation.
1066 *
1067 * @param {Object} state The state
1068 *
1069 * @return {SparkMD5} The instance itself
1070 */
1071 SparkMD5.prototype.setState = function (state) {
1072 this._buff = state.buff;
1073 this._length = state.length;
1074 this._hash = state.hash;
1075
1076 return this;
1077 };
1078
1079 /**
1080 * Releases memory used by the incremental buffer and other additional
1081 * resources. If you plan to use the instance again, use reset instead.
1082 */
1083 SparkMD5.prototype.destroy = function () {
1084 delete this._hash;
1085 delete this._buff;
1086 delete this._length;
1087 };
1088
1089 /**
1090 * Finish the final calculation based on the tail.
1091 *
1092 * @param {Array} tail The tail (will be modified)
1093 * @param {Number} length The length of the remaining buffer
1094 */
1095 SparkMD5.prototype._finish = function (tail, length) {
1096 var i = length,
1097 tmp,
1098 lo,
1099 hi;
1100
1101 tail[i >> 2] |= 0x80 << ((i % 4) << 3);
1102 if (i > 55) {
1103 md5cycle(this._hash, tail);
1104 for (i = 0; i < 16; i += 1) {
1105 tail[i] = 0;
1106 }
1107 }
1108
1109 // Do the final computation based on the tail and length
1110 // Beware that the final length may not fit in 32 bits so we take care of that
1111 tmp = this._length * 8;
1112 tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
1113 lo = parseInt(tmp[2], 16);
1114 hi = parseInt(tmp[1], 16) || 0;
1115
1116 tail[14] = lo;
1117 tail[15] = hi;
1118 md5cycle(this._hash, tail);
1119 };
1120
1121 /**
1122 * Performs the md5 hash on a string.
1123 * A conversion will be applied if utf8 string is detected.
1124 *
1125 * @param {String} str The string
1126 * @param {Boolean} [raw] True to get the raw string, false to get the hex string
1127 *
1128 * @return {String} The result
1129 */
1130 SparkMD5.hash = function (str, raw) {
1131 // Converts the string to utf8 bytes if necessary
1132 // Then compute it using the binary function
1133 return SparkMD5.hashBinary(toUtf8(str), raw);
1134 };
1135
1136 /**
1137 * Performs the md5 hash on a binary string.
1138 *
1139 * @param {String} content The binary string
1140 * @param {Boolean} [raw] True to get the raw string, false to get the hex string
1141 *
1142 * @return {String} The result
1143 */
1144 SparkMD5.hashBinary = function (content, raw) {
1145 var hash = md51(content),
1146 ret = hex(hash);
1147
1148 return raw ? hexToBinaryString(ret) : ret;
1149 };
1150
1151 // ---------------------------------------------------
1152
1153 /**
1154 * SparkMD5 OOP implementation for array buffers.
1155 *
1156 * Use this class to perform an incremental md5 ONLY for array buffers.
1157 */
1158 SparkMD5.ArrayBuffer = function () {
1159 // call reset to init the instance
1160 this.reset();
1161 };
1162
1163 /**
1164 * Appends an array buffer.
1165 *
1166 * @param {ArrayBuffer} arr The array to be appended
1167 *
1168 * @return {SparkMD5.ArrayBuffer} The instance itself
1169 */
1170 SparkMD5.ArrayBuffer.prototype.append = function (arr) {
1171 var buff = concatenateArrayBuffers(this._buff.buffer, arr, true),
1172 length = buff.length,
1173 i;
1174
1175 this._length += arr.byteLength;
1176
1177 for (i = 64; i <= length; i += 64) {
1178 md5cycle(this._hash, md5blk_array(buff.subarray(i - 64, i)));
1179 }
1180
1181 this._buff = (i - 64) < length ? new Uint8Array(buff.buffer.slice(i - 64)) : new Uint8Array(0);
1182
1183 return this;
1184 };
1185
1186 /**
1187 * Finishes the incremental computation, reseting the internal state and
1188 * returning the result.
1189 *
1190 * @param {Boolean} raw True to get the raw string, false to get the hex string
1191 *
1192 * @return {String} The result
1193 */
1194 SparkMD5.ArrayBuffer.prototype.end = function (raw) {
1195 var buff = this._buff,
1196 length = buff.length,
1197 tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
1198 i,
1199 ret;
1200
1201 for (i = 0; i < length; i += 1) {
1202 tail[i >> 2] |= buff[i] << ((i % 4) << 3);
1203 }
1204
1205 this._finish(tail, length);
1206 ret = hex(this._hash);
1207
1208 if (raw) {
1209 ret = hexToBinaryString(ret);
1210 }
1211
1212 this.reset();
1213
1214 return ret;
1215 };
1216
1217 /**
1218 * Resets the internal state of the computation.
1219 *
1220 * @return {SparkMD5.ArrayBuffer} The instance itself
1221 */
1222 SparkMD5.ArrayBuffer.prototype.reset = function () {
1223 this._buff = new Uint8Array(0);
1224 this._length = 0;
1225 this._hash = [1732584193, -271733879, -1732584194, 271733878];
1226
1227 return this;
1228 };
1229
1230 /**
1231 * Gets the internal state of the computation.
1232 *
1233 * @return {Object} The state
1234 */
1235 SparkMD5.ArrayBuffer.prototype.getState = function () {
1236 var state = SparkMD5.prototype.getState.call(this);
1237
1238 // Convert buffer to a string
1239 state.buff = arrayBuffer2Utf8Str(state.buff);
1240
1241 return state;
1242 };
1243
1244 /**
1245 * Gets the internal state of the computation.
1246 *
1247 * @param {Object} state The state
1248 *
1249 * @return {SparkMD5.ArrayBuffer} The instance itself
1250 */
1251 SparkMD5.ArrayBuffer.prototype.setState = function (state) {
1252 // Convert string to buffer
1253 state.buff = utf8Str2ArrayBuffer(state.buff, true);
1254
1255 return SparkMD5.prototype.setState.call(this, state);
1256 };
1257
1258 SparkMD5.ArrayBuffer.prototype.destroy = SparkMD5.prototype.destroy;
1259
1260 SparkMD5.ArrayBuffer.prototype._finish = SparkMD5.prototype._finish;
1261
1262 /**
1263 * Performs the md5 hash on an array buffer.
1264 *
1265 * @param {ArrayBuffer} arr The array buffer
1266 * @param {Boolean} [raw] True to get the raw string, false to get the hex one
1267 *
1268 * @return {String} The result
1269 */
1270 SparkMD5.ArrayBuffer.hash = function (arr, raw) {
1271 var hash = md51_array(new Uint8Array(arr)),
1272 ret = hex(hash);
1273
1274 return raw ? hexToBinaryString(ret) : ret;
1275 };
1276
1277 return SparkMD5;
1278}));
1279
1280},{}],3:[function(_dereq_,module,exports){
1281"use strict";
1282
1283Object.defineProperty(exports, "__esModule", {
1284 value: true
1285});
1286Object.defineProperty(exports, "v1", {
1287 enumerable: true,
1288 get: function () {
1289 return _v.default;
1290 }
1291});
1292Object.defineProperty(exports, "v3", {
1293 enumerable: true,
1294 get: function () {
1295 return _v2.default;
1296 }
1297});
1298Object.defineProperty(exports, "v4", {
1299 enumerable: true,
1300 get: function () {
1301 return _v3.default;
1302 }
1303});
1304Object.defineProperty(exports, "v5", {
1305 enumerable: true,
1306 get: function () {
1307 return _v4.default;
1308 }
1309});
1310Object.defineProperty(exports, "NIL", {
1311 enumerable: true,
1312 get: function () {
1313 return _nil.default;
1314 }
1315});
1316Object.defineProperty(exports, "version", {
1317 enumerable: true,
1318 get: function () {
1319 return _version.default;
1320 }
1321});
1322Object.defineProperty(exports, "validate", {
1323 enumerable: true,
1324 get: function () {
1325 return _validate.default;
1326 }
1327});
1328Object.defineProperty(exports, "stringify", {
1329 enumerable: true,
1330 get: function () {
1331 return _stringify.default;
1332 }
1333});
1334Object.defineProperty(exports, "parse", {
1335 enumerable: true,
1336 get: function () {
1337 return _parse.default;
1338 }
1339});
1340
1341var _v = _interopRequireDefault(_dereq_("./v1.js"));
1342
1343var _v2 = _interopRequireDefault(_dereq_("./v3.js"));
1344
1345var _v3 = _interopRequireDefault(_dereq_("./v4.js"));
1346
1347var _v4 = _interopRequireDefault(_dereq_("./v5.js"));
1348
1349var _nil = _interopRequireDefault(_dereq_("./nil.js"));
1350
1351var _version = _interopRequireDefault(_dereq_("./version.js"));
1352
1353var _validate = _interopRequireDefault(_dereq_("./validate.js"));
1354
1355var _stringify = _interopRequireDefault(_dereq_("./stringify.js"));
1356
1357var _parse = _interopRequireDefault(_dereq_("./parse.js"));
1358
1359function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
1360},{"./nil.js":5,"./parse.js":6,"./stringify.js":10,"./v1.js":11,"./v3.js":12,"./v4.js":14,"./v5.js":15,"./validate.js":16,"./version.js":17}],4:[function(_dereq_,module,exports){
1361"use strict";
1362
1363Object.defineProperty(exports, "__esModule", {
1364 value: true
1365});
1366exports.default = void 0;
1367
1368/*
1369 * Browser-compatible JavaScript MD5
1370 *
1371 * Modification of JavaScript MD5
1372 * https://github.com/blueimp/JavaScript-MD5
1373 *
1374 * Copyright 2011, Sebastian Tschan
1375 * https://blueimp.net
1376 *
1377 * Licensed under the MIT license:
1378 * https://opensource.org/licenses/MIT
1379 *
1380 * Based on
1381 * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message
1382 * Digest Algorithm, as defined in RFC 1321.
1383 * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009
1384 * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
1385 * Distributed under the BSD License
1386 * See http://pajhome.org.uk/crypt/md5 for more info.
1387 */
1388function md5(bytes) {
1389 if (typeof bytes === 'string') {
1390 const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
1391
1392 bytes = new Uint8Array(msg.length);
1393
1394 for (let i = 0; i < msg.length; ++i) {
1395 bytes[i] = msg.charCodeAt(i);
1396 }
1397 }
1398
1399 return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8));
1400}
1401/*
1402 * Convert an array of little-endian words to an array of bytes
1403 */
1404
1405
1406function md5ToHexEncodedArray(input) {
1407 const output = [];
1408 const length32 = input.length * 32;
1409 const hexTab = '0123456789abcdef';
1410
1411 for (let i = 0; i < length32; i += 8) {
1412 const x = input[i >> 5] >>> i % 32 & 0xff;
1413 const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16);
1414 output.push(hex);
1415 }
1416
1417 return output;
1418}
1419/**
1420 * Calculate output length with padding and bit length
1421 */
1422
1423
1424function getOutputLength(inputLength8) {
1425 return (inputLength8 + 64 >>> 9 << 4) + 14 + 1;
1426}
1427/*
1428 * Calculate the MD5 of an array of little-endian words, and a bit length.
1429 */
1430
1431
1432function wordsToMd5(x, len) {
1433 /* append padding */
1434 x[len >> 5] |= 0x80 << len % 32;
1435 x[getOutputLength(len) - 1] = len;
1436 let a = 1732584193;
1437 let b = -271733879;
1438 let c = -1732584194;
1439 let d = 271733878;
1440
1441 for (let i = 0; i < x.length; i += 16) {
1442 const olda = a;
1443 const oldb = b;
1444 const oldc = c;
1445 const oldd = d;
1446 a = md5ff(a, b, c, d, x[i], 7, -680876936);
1447 d = md5ff(d, a, b, c, x[i + 1], 12, -389564586);
1448 c = md5ff(c, d, a, b, x[i + 2], 17, 606105819);
1449 b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330);
1450 a = md5ff(a, b, c, d, x[i + 4], 7, -176418897);
1451 d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426);
1452 c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341);
1453 b = md5ff(b, c, d, a, x[i + 7], 22, -45705983);
1454 a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416);
1455 d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417);
1456 c = md5ff(c, d, a, b, x[i + 10], 17, -42063);
1457 b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162);
1458 a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682);
1459 d = md5ff(d, a, b, c, x[i + 13], 12, -40341101);
1460 c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290);
1461 b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329);
1462 a = md5gg(a, b, c, d, x[i + 1], 5, -165796510);
1463 d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632);
1464 c = md5gg(c, d, a, b, x[i + 11], 14, 643717713);
1465 b = md5gg(b, c, d, a, x[i], 20, -373897302);
1466 a = md5gg(a, b, c, d, x[i + 5], 5, -701558691);
1467 d = md5gg(d, a, b, c, x[i + 10], 9, 38016083);
1468 c = md5gg(c, d, a, b, x[i + 15], 14, -660478335);
1469 b = md5gg(b, c, d, a, x[i + 4], 20, -405537848);
1470 a = md5gg(a, b, c, d, x[i + 9], 5, 568446438);
1471 d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690);
1472 c = md5gg(c, d, a, b, x[i + 3], 14, -187363961);
1473 b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501);
1474 a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467);
1475 d = md5gg(d, a, b, c, x[i + 2], 9, -51403784);
1476 c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473);
1477 b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734);
1478 a = md5hh(a, b, c, d, x[i + 5], 4, -378558);
1479 d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463);
1480 c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562);
1481 b = md5hh(b, c, d, a, x[i + 14], 23, -35309556);
1482 a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060);
1483 d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353);
1484 c = md5hh(c, d, a, b, x[i + 7], 16, -155497632);
1485 b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640);
1486 a = md5hh(a, b, c, d, x[i + 13], 4, 681279174);
1487 d = md5hh(d, a, b, c, x[i], 11, -358537222);
1488 c = md5hh(c, d, a, b, x[i + 3], 16, -722521979);
1489 b = md5hh(b, c, d, a, x[i + 6], 23, 76029189);
1490 a = md5hh(a, b, c, d, x[i + 9], 4, -640364487);
1491 d = md5hh(d, a, b, c, x[i + 12], 11, -421815835);
1492 c = md5hh(c, d, a, b, x[i + 15], 16, 530742520);
1493 b = md5hh(b, c, d, a, x[i + 2], 23, -995338651);
1494 a = md5ii(a, b, c, d, x[i], 6, -198630844);
1495 d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415);
1496 c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905);
1497 b = md5ii(b, c, d, a, x[i + 5], 21, -57434055);
1498 a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571);
1499 d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606);
1500 c = md5ii(c, d, a, b, x[i + 10], 15, -1051523);
1501 b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799);
1502 a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359);
1503 d = md5ii(d, a, b, c, x[i + 15], 10, -30611744);
1504 c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380);
1505 b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649);
1506 a = md5ii(a, b, c, d, x[i + 4], 6, -145523070);
1507 d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379);
1508 c = md5ii(c, d, a, b, x[i + 2], 15, 718787259);
1509 b = md5ii(b, c, d, a, x[i + 9], 21, -343485551);
1510 a = safeAdd(a, olda);
1511 b = safeAdd(b, oldb);
1512 c = safeAdd(c, oldc);
1513 d = safeAdd(d, oldd);
1514 }
1515
1516 return [a, b, c, d];
1517}
1518/*
1519 * Convert an array bytes to an array of little-endian words
1520 * Characters >255 have their high-byte silently ignored.
1521 */
1522
1523
1524function bytesToWords(input) {
1525 if (input.length === 0) {
1526 return [];
1527 }
1528
1529 const length8 = input.length * 8;
1530 const output = new Uint32Array(getOutputLength(length8));
1531
1532 for (let i = 0; i < length8; i += 8) {
1533 output[i >> 5] |= (input[i / 8] & 0xff) << i % 32;
1534 }
1535
1536 return output;
1537}
1538/*
1539 * Add integers, wrapping at 2^32. This uses 16-bit operations internally
1540 * to work around bugs in some JS interpreters.
1541 */
1542
1543
1544function safeAdd(x, y) {
1545 const lsw = (x & 0xffff) + (y & 0xffff);
1546 const msw = (x >> 16) + (y >> 16) + (lsw >> 16);
1547 return msw << 16 | lsw & 0xffff;
1548}
1549/*
1550 * Bitwise rotate a 32-bit number to the left.
1551 */
1552
1553
1554function bitRotateLeft(num, cnt) {
1555 return num << cnt | num >>> 32 - cnt;
1556}
1557/*
1558 * These functions implement the four basic operations the algorithm uses.
1559 */
1560
1561
1562function md5cmn(q, a, b, x, s, t) {
1563 return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b);
1564}
1565
1566function md5ff(a, b, c, d, x, s, t) {
1567 return md5cmn(b & c | ~b & d, a, b, x, s, t);
1568}
1569
1570function md5gg(a, b, c, d, x, s, t) {
1571 return md5cmn(b & d | c & ~d, a, b, x, s, t);
1572}
1573
1574function md5hh(a, b, c, d, x, s, t) {
1575 return md5cmn(b ^ c ^ d, a, b, x, s, t);
1576}
1577
1578function md5ii(a, b, c, d, x, s, t) {
1579 return md5cmn(c ^ (b | ~d), a, b, x, s, t);
1580}
1581
1582var _default = md5;
1583exports.default = _default;
1584},{}],5:[function(_dereq_,module,exports){
1585"use strict";
1586
1587Object.defineProperty(exports, "__esModule", {
1588 value: true
1589});
1590exports.default = void 0;
1591var _default = '00000000-0000-0000-0000-000000000000';
1592exports.default = _default;
1593},{}],6:[function(_dereq_,module,exports){
1594"use strict";
1595
1596Object.defineProperty(exports, "__esModule", {
1597 value: true
1598});
1599exports.default = void 0;
1600
1601var _validate = _interopRequireDefault(_dereq_("./validate.js"));
1602
1603function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
1604
1605function parse(uuid) {
1606 if (!(0, _validate.default)(uuid)) {
1607 throw TypeError('Invalid UUID');
1608 }
1609
1610 let v;
1611 const arr = new Uint8Array(16); // Parse ########-....-....-....-............
1612
1613 arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;
1614 arr[1] = v >>> 16 & 0xff;
1615 arr[2] = v >>> 8 & 0xff;
1616 arr[3] = v & 0xff; // Parse ........-####-....-....-............
1617
1618 arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;
1619 arr[5] = v & 0xff; // Parse ........-....-####-....-............
1620
1621 arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;
1622 arr[7] = v & 0xff; // Parse ........-....-....-####-............
1623
1624 arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;
1625 arr[9] = v & 0xff; // Parse ........-....-....-....-############
1626 // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
1627
1628 arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;
1629 arr[11] = v / 0x100000000 & 0xff;
1630 arr[12] = v >>> 24 & 0xff;
1631 arr[13] = v >>> 16 & 0xff;
1632 arr[14] = v >>> 8 & 0xff;
1633 arr[15] = v & 0xff;
1634 return arr;
1635}
1636
1637var _default = parse;
1638exports.default = _default;
1639},{"./validate.js":16}],7:[function(_dereq_,module,exports){
1640"use strict";
1641
1642Object.defineProperty(exports, "__esModule", {
1643 value: true
1644});
1645exports.default = void 0;
1646var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;
1647exports.default = _default;
1648},{}],8:[function(_dereq_,module,exports){
1649"use strict";
1650
1651Object.defineProperty(exports, "__esModule", {
1652 value: true
1653});
1654exports.default = rng;
1655// Unique ID creation requires a high quality random # generator. In the browser we therefore
1656// require the crypto API and do not support built-in fallback to lower quality random number
1657// generators (like Math.random()).
1658let getRandomValues;
1659const rnds8 = new Uint8Array(16);
1660
1661function rng() {
1662 // lazy load so that environments that need to polyfill have a chance to do so
1663 if (!getRandomValues) {
1664 // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also,
1665 // find the complete implementation of crypto (msCrypto) on IE11.
1666 getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto);
1667
1668 if (!getRandomValues) {
1669 throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported');
1670 }
1671 }
1672
1673 return getRandomValues(rnds8);
1674}
1675},{}],9:[function(_dereq_,module,exports){
1676"use strict";
1677
1678Object.defineProperty(exports, "__esModule", {
1679 value: true
1680});
1681exports.default = void 0;
1682
1683// Adapted from Chris Veness' SHA1 code at
1684// http://www.movable-type.co.uk/scripts/sha1.html
1685function f(s, x, y, z) {
1686 switch (s) {
1687 case 0:
1688 return x & y ^ ~x & z;
1689
1690 case 1:
1691 return x ^ y ^ z;
1692
1693 case 2:
1694 return x & y ^ x & z ^ y & z;
1695
1696 case 3:
1697 return x ^ y ^ z;
1698 }
1699}
1700
1701function ROTL(x, n) {
1702 return x << n | x >>> 32 - n;
1703}
1704
1705function sha1(bytes) {
1706 const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6];
1707 const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0];
1708
1709 if (typeof bytes === 'string') {
1710 const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
1711
1712 bytes = [];
1713
1714 for (let i = 0; i < msg.length; ++i) {
1715 bytes.push(msg.charCodeAt(i));
1716 }
1717 } else if (!Array.isArray(bytes)) {
1718 // Convert Array-like to Array
1719 bytes = Array.prototype.slice.call(bytes);
1720 }
1721
1722 bytes.push(0x80);
1723 const l = bytes.length / 4 + 2;
1724 const N = Math.ceil(l / 16);
1725 const M = new Array(N);
1726
1727 for (let i = 0; i < N; ++i) {
1728 const arr = new Uint32Array(16);
1729
1730 for (let j = 0; j < 16; ++j) {
1731 arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3];
1732 }
1733
1734 M[i] = arr;
1735 }
1736
1737 M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32);
1738 M[N - 1][14] = Math.floor(M[N - 1][14]);
1739 M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff;
1740
1741 for (let i = 0; i < N; ++i) {
1742 const W = new Uint32Array(80);
1743
1744 for (let t = 0; t < 16; ++t) {
1745 W[t] = M[i][t];
1746 }
1747
1748 for (let t = 16; t < 80; ++t) {
1749 W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1);
1750 }
1751
1752 let a = H[0];
1753 let b = H[1];
1754 let c = H[2];
1755 let d = H[3];
1756 let e = H[4];
1757
1758 for (let t = 0; t < 80; ++t) {
1759 const s = Math.floor(t / 20);
1760 const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0;
1761 e = d;
1762 d = c;
1763 c = ROTL(b, 30) >>> 0;
1764 b = a;
1765 a = T;
1766 }
1767
1768 H[0] = H[0] + a >>> 0;
1769 H[1] = H[1] + b >>> 0;
1770 H[2] = H[2] + c >>> 0;
1771 H[3] = H[3] + d >>> 0;
1772 H[4] = H[4] + e >>> 0;
1773 }
1774
1775 return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff];
1776}
1777
1778var _default = sha1;
1779exports.default = _default;
1780},{}],10:[function(_dereq_,module,exports){
1781"use strict";
1782
1783Object.defineProperty(exports, "__esModule", {
1784 value: true
1785});
1786exports.default = void 0;
1787
1788var _validate = _interopRequireDefault(_dereq_("./validate.js"));
1789
1790function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
1791
1792/**
1793 * Convert array of 16 byte values to UUID string format of the form:
1794 * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
1795 */
1796const byteToHex = [];
1797
1798for (let i = 0; i < 256; ++i) {
1799 byteToHex.push((i + 0x100).toString(16).substr(1));
1800}
1801
1802function stringify(arr, offset = 0) {
1803 // Note: Be careful editing this code! It's been tuned for performance
1804 // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
1805 const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one
1806 // of the following:
1807 // - One or more input array values don't map to a hex octet (leading to
1808 // "undefined" in the uuid)
1809 // - Invalid input values for the RFC `version` or `variant` fields
1810
1811 if (!(0, _validate.default)(uuid)) {
1812 throw TypeError('Stringified UUID is invalid');
1813 }
1814
1815 return uuid;
1816}
1817
1818var _default = stringify;
1819exports.default = _default;
1820},{"./validate.js":16}],11:[function(_dereq_,module,exports){
1821"use strict";
1822
1823Object.defineProperty(exports, "__esModule", {
1824 value: true
1825});
1826exports.default = void 0;
1827
1828var _rng = _interopRequireDefault(_dereq_("./rng.js"));
1829
1830var _stringify = _interopRequireDefault(_dereq_("./stringify.js"));
1831
1832function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
1833
1834// **`v1()` - Generate time-based UUID**
1835//
1836// Inspired by https://github.com/LiosK/UUID.js
1837// and http://docs.python.org/library/uuid.html
1838let _nodeId;
1839
1840let _clockseq; // Previous uuid creation time
1841
1842
1843let _lastMSecs = 0;
1844let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details
1845
1846function v1(options, buf, offset) {
1847 let i = buf && offset || 0;
1848 const b = buf || new Array(16);
1849 options = options || {};
1850 let node = options.node || _nodeId;
1851 let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not
1852 // specified. We do this lazily to minimize issues related to insufficient
1853 // system entropy. See #189
1854
1855 if (node == null || clockseq == null) {
1856 const seedBytes = options.random || (options.rng || _rng.default)();
1857
1858 if (node == null) {
1859 // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
1860 node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];
1861 }
1862
1863 if (clockseq == null) {
1864 // Per 4.2.2, randomize (14 bit) clockseq
1865 clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
1866 }
1867 } // UUID timestamps are 100 nano-second units since the Gregorian epoch,
1868 // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
1869 // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
1870 // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
1871
1872
1873 let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock
1874 // cycle to simulate higher resolution clock
1875
1876 let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)
1877
1878 const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression
1879
1880 if (dt < 0 && options.clockseq === undefined) {
1881 clockseq = clockseq + 1 & 0x3fff;
1882 } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
1883 // time interval
1884
1885
1886 if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
1887 nsecs = 0;
1888 } // Per 4.2.1.2 Throw error if too many uuids are requested
1889
1890
1891 if (nsecs >= 10000) {
1892 throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");
1893 }
1894
1895 _lastMSecs = msecs;
1896 _lastNSecs = nsecs;
1897 _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
1898
1899 msecs += 12219292800000; // `time_low`
1900
1901 const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
1902 b[i++] = tl >>> 24 & 0xff;
1903 b[i++] = tl >>> 16 & 0xff;
1904 b[i++] = tl >>> 8 & 0xff;
1905 b[i++] = tl & 0xff; // `time_mid`
1906
1907 const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;
1908 b[i++] = tmh >>> 8 & 0xff;
1909 b[i++] = tmh & 0xff; // `time_high_and_version`
1910
1911 b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
1912
1913 b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
1914
1915 b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`
1916
1917 b[i++] = clockseq & 0xff; // `node`
1918
1919 for (let n = 0; n < 6; ++n) {
1920 b[i + n] = node[n];
1921 }
1922
1923 return buf || (0, _stringify.default)(b);
1924}
1925
1926var _default = v1;
1927exports.default = _default;
1928},{"./rng.js":8,"./stringify.js":10}],12:[function(_dereq_,module,exports){
1929"use strict";
1930
1931Object.defineProperty(exports, "__esModule", {
1932 value: true
1933});
1934exports.default = void 0;
1935
1936var _v = _interopRequireDefault(_dereq_("./v35.js"));
1937
1938var _md = _interopRequireDefault(_dereq_("./md5.js"));
1939
1940function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
1941
1942const v3 = (0, _v.default)('v3', 0x30, _md.default);
1943var _default = v3;
1944exports.default = _default;
1945},{"./md5.js":4,"./v35.js":13}],13:[function(_dereq_,module,exports){
1946"use strict";
1947
1948Object.defineProperty(exports, "__esModule", {
1949 value: true
1950});
1951exports.default = _default;
1952exports.URL = exports.DNS = void 0;
1953
1954var _stringify = _interopRequireDefault(_dereq_("./stringify.js"));
1955
1956var _parse = _interopRequireDefault(_dereq_("./parse.js"));
1957
1958function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
1959
1960function stringToBytes(str) {
1961 str = unescape(encodeURIComponent(str)); // UTF8 escape
1962
1963 const bytes = [];
1964
1965 for (let i = 0; i < str.length; ++i) {
1966 bytes.push(str.charCodeAt(i));
1967 }
1968
1969 return bytes;
1970}
1971
1972const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
1973exports.DNS = DNS;
1974const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';
1975exports.URL = URL;
1976
1977function _default(name, version, hashfunc) {
1978 function generateUUID(value, namespace, buf, offset) {
1979 if (typeof value === 'string') {
1980 value = stringToBytes(value);
1981 }
1982
1983 if (typeof namespace === 'string') {
1984 namespace = (0, _parse.default)(namespace);
1985 }
1986
1987 if (namespace.length !== 16) {
1988 throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');
1989 } // Compute hash of namespace and value, Per 4.3
1990 // Future: Use spread syntax when supported on all platforms, e.g. `bytes =
1991 // hashfunc([...namespace, ... value])`
1992
1993
1994 let bytes = new Uint8Array(16 + value.length);
1995 bytes.set(namespace);
1996 bytes.set(value, namespace.length);
1997 bytes = hashfunc(bytes);
1998 bytes[6] = bytes[6] & 0x0f | version;
1999 bytes[8] = bytes[8] & 0x3f | 0x80;
2000
2001 if (buf) {
2002 offset = offset || 0;
2003
2004 for (let i = 0; i < 16; ++i) {
2005 buf[offset + i] = bytes[i];
2006 }
2007
2008 return buf;
2009 }
2010
2011 return (0, _stringify.default)(bytes);
2012 } // Function#name is not settable on some platforms (#270)
2013
2014
2015 try {
2016 generateUUID.name = name; // eslint-disable-next-line no-empty
2017 } catch (err) {} // For CommonJS default export support
2018
2019
2020 generateUUID.DNS = DNS;
2021 generateUUID.URL = URL;
2022 return generateUUID;
2023}
2024},{"./parse.js":6,"./stringify.js":10}],14:[function(_dereq_,module,exports){
2025"use strict";
2026
2027Object.defineProperty(exports, "__esModule", {
2028 value: true
2029});
2030exports.default = void 0;
2031
2032var _rng = _interopRequireDefault(_dereq_("./rng.js"));
2033
2034var _stringify = _interopRequireDefault(_dereq_("./stringify.js"));
2035
2036function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2037
2038function v4(options, buf, offset) {
2039 options = options || {};
2040
2041 const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
2042
2043
2044 rnds[6] = rnds[6] & 0x0f | 0x40;
2045 rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
2046
2047 if (buf) {
2048 offset = offset || 0;
2049
2050 for (let i = 0; i < 16; ++i) {
2051 buf[offset + i] = rnds[i];
2052 }
2053
2054 return buf;
2055 }
2056
2057 return (0, _stringify.default)(rnds);
2058}
2059
2060var _default = v4;
2061exports.default = _default;
2062},{"./rng.js":8,"./stringify.js":10}],15:[function(_dereq_,module,exports){
2063"use strict";
2064
2065Object.defineProperty(exports, "__esModule", {
2066 value: true
2067});
2068exports.default = void 0;
2069
2070var _v = _interopRequireDefault(_dereq_("./v35.js"));
2071
2072var _sha = _interopRequireDefault(_dereq_("./sha1.js"));
2073
2074function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2075
2076const v5 = (0, _v.default)('v5', 0x50, _sha.default);
2077var _default = v5;
2078exports.default = _default;
2079},{"./sha1.js":9,"./v35.js":13}],16:[function(_dereq_,module,exports){
2080"use strict";
2081
2082Object.defineProperty(exports, "__esModule", {
2083 value: true
2084});
2085exports.default = void 0;
2086
2087var _regex = _interopRequireDefault(_dereq_("./regex.js"));
2088
2089function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2090
2091function validate(uuid) {
2092 return typeof uuid === 'string' && _regex.default.test(uuid);
2093}
2094
2095var _default = validate;
2096exports.default = _default;
2097},{"./regex.js":7}],17:[function(_dereq_,module,exports){
2098"use strict";
2099
2100Object.defineProperty(exports, "__esModule", {
2101 value: true
2102});
2103exports.default = void 0;
2104
2105var _validate = _interopRequireDefault(_dereq_("./validate.js"));
2106
2107function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2108
2109function version(uuid) {
2110 if (!(0, _validate.default)(uuid)) {
2111 throw TypeError('Invalid UUID');
2112 }
2113
2114 return parseInt(uuid.substr(14, 1), 16);
2115}
2116
2117var _default = version;
2118exports.default = _default;
2119},{"./validate.js":16}],18:[function(_dereq_,module,exports){
2120'use strict';
2121
2122function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
2123
2124var EventEmitter = _interopDefault(_dereq_('events'));
2125var uuid = _dereq_('uuid');
2126var Md5 = _interopDefault(_dereq_('spark-md5'));
2127
2128function isBinaryObject(object) {
2129 return (typeof ArrayBuffer !== 'undefined' && object instanceof ArrayBuffer) ||
2130 (typeof Blob !== 'undefined' && object instanceof Blob);
2131}
2132
2133/**
2134 * @template {ArrayBuffer | Blob} T
2135 * @param {T} object
2136 * @returns {T}
2137 */
2138function cloneBinaryObject(object) {
2139 return object instanceof ArrayBuffer
2140 ? object.slice(0)
2141 : object.slice(0, object.size, object.type);
2142}
2143
2144// most of this is borrowed from lodash.isPlainObject:
2145// https://github.com/fis-components/lodash.isplainobject/
2146// blob/29c358140a74f252aeb08c9eb28bef86f2217d4a/index.js
2147
2148var funcToString = Function.prototype.toString;
2149var objectCtorString = funcToString.call(Object);
2150
2151function isPlainObject(value) {
2152 var proto = Object.getPrototypeOf(value);
2153 /* istanbul ignore if */
2154 if (proto === null) { // not sure when this happens, but I guess it can
2155 return true;
2156 }
2157 var Ctor = proto.constructor;
2158 return (typeof Ctor == 'function' &&
2159 Ctor instanceof Ctor && funcToString.call(Ctor) == objectCtorString);
2160}
2161
2162function clone(object) {
2163 var newObject;
2164 var i;
2165 var len;
2166
2167 if (!object || typeof object !== 'object') {
2168 return object;
2169 }
2170
2171 if (Array.isArray(object)) {
2172 newObject = [];
2173 for (i = 0, len = object.length; i < len; i++) {
2174 newObject[i] = clone(object[i]);
2175 }
2176 return newObject;
2177 }
2178
2179 // special case: to avoid inconsistencies between IndexedDB
2180 // and other backends, we automatically stringify Dates
2181 if (object instanceof Date && isFinite(object)) {
2182 return object.toISOString();
2183 }
2184
2185 if (isBinaryObject(object)) {
2186 return cloneBinaryObject(object);
2187 }
2188
2189 if (!isPlainObject(object)) {
2190 return object; // don't clone objects like Workers
2191 }
2192
2193 newObject = {};
2194 for (i in object) {
2195 /* istanbul ignore else */
2196 if (Object.prototype.hasOwnProperty.call(object, i)) {
2197 var value = clone(object[i]);
2198 if (typeof value !== 'undefined') {
2199 newObject[i] = value;
2200 }
2201 }
2202 }
2203 return newObject;
2204}
2205
2206// like underscore/lodash _.pick()
2207function pick(obj, arr) {
2208 var res = {};
2209 for (var i = 0, len = arr.length; i < len; i++) {
2210 var prop = arr[i];
2211 if (prop in obj) {
2212 res[prop] = obj[prop];
2213 }
2214 }
2215 return res;
2216}
2217
2218var hasLocal;
2219
2220try {
2221 localStorage.setItem('_pouch_check_localstorage', 1);
2222 hasLocal = !!localStorage.getItem('_pouch_check_localstorage');
2223} catch (e) {
2224 hasLocal = false;
2225}
2226
2227function hasLocalStorage() {
2228 return hasLocal;
2229}
2230
2231const nextTick = typeof queueMicrotask === "function"
2232 ? queueMicrotask
2233 : function nextTick(fn) {
2234 Promise.resolve().then(fn);
2235 };
2236
2237class Changes extends EventEmitter {
2238 constructor() {
2239 super();
2240
2241 this._listeners = {};
2242
2243 if (hasLocalStorage()) {
2244 addEventListener("storage", (e) => {
2245 this.emit(e.key);
2246 });
2247 }
2248 }
2249
2250 addListener(dbName, id, db, opts) {
2251 if (this._listeners[id]) {
2252 return;
2253 }
2254 var inprogress = false;
2255 var self = this;
2256 function eventFunction() {
2257 if (!self._listeners[id]) {
2258 return;
2259 }
2260 if (inprogress) {
2261 inprogress = 'waiting';
2262 return;
2263 }
2264 inprogress = true;
2265 var changesOpts = pick(opts, [
2266 'style', 'include_docs', 'attachments', 'conflicts', 'filter',
2267 'doc_ids', 'view', 'since', 'query_params', 'binary', 'return_docs'
2268 ]);
2269
2270 function onError() {
2271 inprogress = false;
2272 }
2273
2274 db.changes(changesOpts).on('change', function (c) {
2275 if (c.seq > opts.since && !opts.cancelled) {
2276 opts.since = c.seq;
2277 opts.onChange(c);
2278 }
2279 }).on('complete', function () {
2280 if (inprogress === 'waiting') {
2281 nextTick(eventFunction);
2282 }
2283 inprogress = false;
2284 }).on('error', onError);
2285 }
2286 this._listeners[id] = eventFunction;
2287 this.on(dbName, eventFunction);
2288 }
2289
2290 removeListener(dbName, id) {
2291 if (!(id in this._listeners)) {
2292 return;
2293 }
2294 super.removeListener(dbName, this._listeners[id]);
2295 delete this._listeners[id];
2296 }
2297
2298 notifyLocalWindows(dbName) {
2299 //do a useless change on a storage thing
2300 //in order to get other windows's listeners to activate
2301 if (hasLocalStorage()) {
2302 localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a";
2303 }
2304 }
2305
2306 notify(dbName) {
2307 this.emit(dbName);
2308 this.notifyLocalWindows(dbName);
2309 }
2310}
2311
2312function guardedConsole(method) {
2313 /* istanbul ignore else */
2314 if (typeof console !== 'undefined' && typeof console[method] === 'function') {
2315 var args = Array.prototype.slice.call(arguments, 1);
2316 console[method].apply(console, args);
2317 }
2318}
2319
2320class PouchError extends Error {
2321 constructor(status, error, reason) {
2322 super();
2323 this.status = status;
2324 this.name = error;
2325 this.message = reason;
2326 this.error = true;
2327 }
2328
2329 toString() {
2330 return JSON.stringify({
2331 status: this.status,
2332 name: this.name,
2333 message: this.message,
2334 reason: this.reason
2335 });
2336 }
2337}
2338
2339var UNAUTHORIZED = new PouchError(401, 'unauthorized', "Name or password is incorrect.");
2340var MISSING_BULK_DOCS = new PouchError(400, 'bad_request', "Missing JSON list of 'docs'");
2341var MISSING_DOC = new PouchError(404, 'not_found', 'missing');
2342var REV_CONFLICT = new PouchError(409, 'conflict', 'Document update conflict');
2343var INVALID_ID = new PouchError(400, 'bad_request', '_id field must contain a string');
2344var MISSING_ID = new PouchError(412, 'missing_id', '_id is required for puts');
2345var RESERVED_ID = new PouchError(400, 'bad_request', 'Only reserved document ids may start with underscore.');
2346var NOT_OPEN = new PouchError(412, 'precondition_failed', 'Database not open');
2347var UNKNOWN_ERROR = new PouchError(500, 'unknown_error', 'Database encountered an unknown error');
2348var BAD_ARG = new PouchError(500, 'badarg', 'Some query argument is invalid');
2349var INVALID_REQUEST = new PouchError(400, 'invalid_request', 'Request was invalid');
2350var QUERY_PARSE_ERROR = new PouchError(400, 'query_parse_error', 'Some query parameter is invalid');
2351var DOC_VALIDATION = new PouchError(500, 'doc_validation', 'Bad special document member');
2352var BAD_REQUEST = new PouchError(400, 'bad_request', 'Something wrong with the request');
2353var NOT_AN_OBJECT = new PouchError(400, 'bad_request', 'Document must be a JSON object');
2354var DB_MISSING = new PouchError(404, 'not_found', 'Database not found');
2355var IDB_ERROR = new PouchError(500, 'indexed_db_went_bad', 'unknown');
2356var WSQ_ERROR = new PouchError(500, 'web_sql_went_bad', 'unknown');
2357var LDB_ERROR = new PouchError(500, 'levelDB_went_went_bad', 'unknown');
2358var FORBIDDEN = new PouchError(403, 'forbidden', 'Forbidden by design doc validate_doc_update function');
2359var INVALID_REV = new PouchError(400, 'bad_request', 'Invalid rev format');
2360var FILE_EXISTS = new PouchError(412, 'file_exists', 'The database could not be created, the file already exists.');
2361var MISSING_STUB = new PouchError(412, 'missing_stub', 'A pre-existing attachment stub wasn\'t found');
2362var INVALID_URL = new PouchError(413, 'invalid_url', 'Provided URL is invalid');
2363
2364function createError(error, reason) {
2365 function CustomPouchError(reason) {
2366 // inherit error properties from our parent error manually
2367 // so as to allow proper JSON parsing.
2368 var names = Object.getOwnPropertyNames(error);
2369 for (var i = 0, len = names.length; i < len; i++) {
2370 if (typeof error[names[i]] !== 'function') {
2371 this[names[i]] = error[names[i]];
2372 }
2373 }
2374
2375 if (this.stack === undefined) {
2376 this.stack = (new Error()).stack;
2377 }
2378
2379 if (reason !== undefined) {
2380 this.reason = reason;
2381 }
2382 }
2383 CustomPouchError.prototype = PouchError.prototype;
2384 return new CustomPouchError(reason);
2385}
2386
2387function tryFilter(filter, doc, req) {
2388 try {
2389 return !filter(doc, req);
2390 } catch (err) {
2391 var msg = 'Filter function threw: ' + err.toString();
2392 return createError(BAD_REQUEST, msg);
2393 }
2394}
2395
2396function filterChange(opts) {
2397 var req = {};
2398 var hasFilter = opts.filter && typeof opts.filter === 'function';
2399 req.query = opts.query_params;
2400
2401 return function filter(change) {
2402 if (!change.doc) {
2403 // CSG sends events on the changes feed that don't have documents,
2404 // this hack makes a whole lot of existing code robust.
2405 change.doc = {};
2406 }
2407
2408 var filterReturn = hasFilter && tryFilter(opts.filter, change.doc, req);
2409
2410 if (typeof filterReturn === 'object') {
2411 return filterReturn;
2412 }
2413
2414 if (filterReturn) {
2415 return false;
2416 }
2417
2418 if (!opts.include_docs) {
2419 delete change.doc;
2420 } else if (!opts.attachments) {
2421 for (var att in change.doc._attachments) {
2422 /* istanbul ignore else */
2423 if (Object.prototype.hasOwnProperty.call(change.doc._attachments, att)) {
2424 change.doc._attachments[att].stub = true;
2425 }
2426 }
2427 }
2428 return true;
2429 };
2430}
2431
2432// shim for Function.prototype.name,
2433
2434// Determine id an ID is valid
2435// - invalid IDs begin with an underescore that does not begin '_design' or
2436// '_local'
2437// - any other string value is a valid id
2438// Returns the specific error object for each case
2439function invalidIdError(id) {
2440 var err;
2441 if (!id) {
2442 err = createError(MISSING_ID);
2443 } else if (typeof id !== 'string') {
2444 err = createError(INVALID_ID);
2445 } else if (/^_/.test(id) && !(/^_(design|local)/).test(id)) {
2446 err = createError(RESERVED_ID);
2447 }
2448 if (err) {
2449 throw err;
2450 }
2451}
2452
2453// Checks if a PouchDB object is "remote" or not. This is
2454
2455// originally parseUri 1.2.2, now patched by us
2456
2457// Based on https://github.com/alexdavid/scope-eval v0.0.3
2458
2459var thisAtob = function (str) {
2460 return atob(str);
2461};
2462
2463var thisBtoa = function (str) {
2464 return btoa(str);
2465};
2466
2467// Abstracts constructing a Blob object, so it also works in older
2468// browsers that don't support the native Blob constructor (e.g.
2469// old QtWebKit versions, Android < 4.4).
2470function createBlob(parts, properties) {
2471 /* global BlobBuilder,MSBlobBuilder,MozBlobBuilder,WebKitBlobBuilder */
2472 parts = parts || [];
2473 properties = properties || {};
2474 try {
2475 return new Blob(parts, properties);
2476 } catch (e) {
2477 if (e.name !== "TypeError") {
2478 throw e;
2479 }
2480 var Builder = typeof BlobBuilder !== 'undefined' ? BlobBuilder :
2481 typeof MSBlobBuilder !== 'undefined' ? MSBlobBuilder :
2482 typeof MozBlobBuilder !== 'undefined' ? MozBlobBuilder :
2483 WebKitBlobBuilder;
2484 var builder = new Builder();
2485 for (var i = 0; i < parts.length; i += 1) {
2486 builder.append(parts[i]);
2487 }
2488 return builder.getBlob(properties.type);
2489 }
2490}
2491
2492// From http://stackoverflow.com/questions/14967647/ (continues on next line)
2493// encode-decode-image-with-base64-breaks-image (2013-04-21)
2494function binaryStringToArrayBuffer(bin) {
2495 var length = bin.length;
2496 var buf = new ArrayBuffer(length);
2497 var arr = new Uint8Array(buf);
2498 for (var i = 0; i < length; i++) {
2499 arr[i] = bin.charCodeAt(i);
2500 }
2501 return buf;
2502}
2503
2504function binStringToBluffer(binString, type) {
2505 return createBlob([binaryStringToArrayBuffer(binString)], {type});
2506}
2507
2508function b64ToBluffer(b64, type) {
2509 return binStringToBluffer(thisAtob(b64), type);
2510}
2511
2512//Can't find original post, but this is close
2513//http://stackoverflow.com/questions/6965107/ (continues on next line)
2514//converting-between-strings-and-arraybuffers
2515function arrayBufferToBinaryString(buffer) {
2516 var binary = '';
2517 var bytes = new Uint8Array(buffer);
2518 var length = bytes.byteLength;
2519 for (var i = 0; i < length; i++) {
2520 binary += String.fromCharCode(bytes[i]);
2521 }
2522 return binary;
2523}
2524
2525// shim for browsers that don't support it
2526function readAsBinaryString(blob, callback) {
2527 var reader = new FileReader();
2528 var hasBinaryString = typeof reader.readAsBinaryString === 'function';
2529 reader.onloadend = function (e) {
2530 var result = e.target.result || '';
2531 if (hasBinaryString) {
2532 return callback(result);
2533 }
2534 callback(arrayBufferToBinaryString(result));
2535 };
2536 if (hasBinaryString) {
2537 reader.readAsBinaryString(blob);
2538 } else {
2539 reader.readAsArrayBuffer(blob);
2540 }
2541}
2542
2543function blobToBinaryString(blobOrBuffer, callback) {
2544 readAsBinaryString(blobOrBuffer, function (bin) {
2545 callback(bin);
2546 });
2547}
2548
2549function blobToBase64(blobOrBuffer, callback) {
2550 blobToBinaryString(blobOrBuffer, function (base64) {
2551 callback(thisBtoa(base64));
2552 });
2553}
2554
2555// simplified API. universal browser support is assumed
2556function readAsArrayBuffer(blob, callback) {
2557 var reader = new FileReader();
2558 reader.onloadend = function (e) {
2559 var result = e.target.result || new ArrayBuffer(0);
2560 callback(result);
2561 };
2562 reader.readAsArrayBuffer(blob);
2563}
2564
2565// this is not used in the browser
2566
2567var setImmediateShim = self.setImmediate || self.setTimeout;
2568var MD5_CHUNK_SIZE = 32768;
2569
2570function rawToBase64(raw) {
2571 return thisBtoa(raw);
2572}
2573
2574function appendBlob(buffer, blob, start, end, callback) {
2575 if (start > 0 || end < blob.size) {
2576 // only slice blob if we really need to
2577 blob = blob.slice(start, end);
2578 }
2579 readAsArrayBuffer(blob, function (arrayBuffer) {
2580 buffer.append(arrayBuffer);
2581 callback();
2582 });
2583}
2584
2585function appendString(buffer, string, start, end, callback) {
2586 if (start > 0 || end < string.length) {
2587 // only create a substring if we really need to
2588 string = string.substring(start, end);
2589 }
2590 buffer.appendBinary(string);
2591 callback();
2592}
2593
2594function binaryMd5(data, callback) {
2595 var inputIsString = typeof data === 'string';
2596 var len = inputIsString ? data.length : data.size;
2597 var chunkSize = Math.min(MD5_CHUNK_SIZE, len);
2598 var chunks = Math.ceil(len / chunkSize);
2599 var currentChunk = 0;
2600 var buffer = inputIsString ? new Md5() : new Md5.ArrayBuffer();
2601
2602 var append = inputIsString ? appendString : appendBlob;
2603
2604 function next() {
2605 setImmediateShim(loadNextChunk);
2606 }
2607
2608 function done() {
2609 var raw = buffer.end(true);
2610 var base64 = rawToBase64(raw);
2611 callback(base64);
2612 buffer.destroy();
2613 }
2614
2615 function loadNextChunk() {
2616 var start = currentChunk * chunkSize;
2617 var end = start + chunkSize;
2618 currentChunk++;
2619 if (currentChunk < chunks) {
2620 append(buffer, data, start, end, next);
2621 } else {
2622 append(buffer, data, start, end, done);
2623 }
2624 }
2625 loadNextChunk();
2626}
2627
2628function stringMd5(string) {
2629 return Md5.hash(string);
2630}
2631
2632/**
2633 * Creates a new revision string that does NOT include the revision height
2634 * For example '56649f1b0506c6ca9fda0746eb0cacdf'
2635 */
2636function rev(doc, deterministic_revs) {
2637 if (!deterministic_revs) {
2638 return uuid.v4().replace(/-/g, '').toLowerCase();
2639 }
2640
2641 var mutateableDoc = Object.assign({}, doc);
2642 delete mutateableDoc._rev_tree;
2643 return stringMd5(JSON.stringify(mutateableDoc));
2644}
2645
2646var uuid$1 = uuid.v4; // mimic old import, only v4 is ever used elsewhere
2647
2648//
2649// Blobs are not supported in all versions of IndexedDB, notably
2650// Chrome <37, Android <5 and (some?) webkit-based browsers.
2651// In those versions, storing a blob will throw.
2652//
2653// Example Webkit error:
2654// > DataCloneError: Failed to store record in an IDBObjectStore: BlobURLs are not yet supported.
2655//
2656// Various other blob bugs exist in Chrome v37-42 (inclusive).
2657// Detecting them is expensive and confusing to users, and Chrome 37-42
2658// is at very low usage worldwide, so we do a hacky userAgent check instead.
2659//
2660// content-type bug: https://code.google.com/p/chromium/issues/detail?id=408120
2661// 404 bug: https://code.google.com/p/chromium/issues/detail?id=447916
2662// FileReader bug: https://code.google.com/p/chromium/issues/detail?id=447836
2663//
2664function checkBlobSupport(txn, store, docIdOrCreateDoc) {
2665 return new Promise(function (resolve) {
2666 var blob$$1 = createBlob(['']);
2667
2668 let req;
2669 if (typeof docIdOrCreateDoc === 'function') {
2670 // Store may require a specific key path, in which case we can't store the
2671 // blob directly in the store.
2672 const createDoc = docIdOrCreateDoc;
2673 const doc = createDoc(blob$$1);
2674 req = txn.objectStore(store).put(doc);
2675 } else {
2676 const docId = docIdOrCreateDoc;
2677 req = txn.objectStore(store).put(blob$$1, docId);
2678 }
2679
2680 req.onsuccess = function () {
2681 var matchedChrome = navigator.userAgent.match(/Chrome\/(\d+)/);
2682 var matchedEdge = navigator.userAgent.match(/Edge\//);
2683 // MS Edge pretends to be Chrome 42:
2684 // https://msdn.microsoft.com/en-us/library/hh869301%28v=vs.85%29.aspx
2685 resolve(matchedEdge || !matchedChrome ||
2686 parseInt(matchedChrome[1], 10) >= 43);
2687 };
2688
2689 req.onerror = txn.onabort = function (e) {
2690 // If the transaction aborts now its due to not being able to
2691 // write to the database, likely due to the disk being full
2692 e.preventDefault();
2693 e.stopPropagation();
2694 resolve(false);
2695 };
2696 }).catch(function () {
2697 return false; // error, so assume unsupported
2698 });
2699}
2700
2701function toObject(array) {
2702 return array.reduce(function (obj, item) {
2703 obj[item] = true;
2704 return obj;
2705 }, {});
2706}
2707// List of top level reserved words for doc
2708var reservedWords = toObject([
2709 '_id',
2710 '_rev',
2711 '_access',
2712 '_attachments',
2713 '_deleted',
2714 '_revisions',
2715 '_revs_info',
2716 '_conflicts',
2717 '_deleted_conflicts',
2718 '_local_seq',
2719 '_rev_tree',
2720 // replication documents
2721 '_replication_id',
2722 '_replication_state',
2723 '_replication_state_time',
2724 '_replication_state_reason',
2725 '_replication_stats',
2726 // Specific to Couchbase Sync Gateway
2727 '_removed'
2728]);
2729
2730// List of reserved words that should end up in the document
2731var dataWords = toObject([
2732 '_access',
2733 '_attachments',
2734 // replication documents
2735 '_replication_id',
2736 '_replication_state',
2737 '_replication_state_time',
2738 '_replication_state_reason',
2739 '_replication_stats'
2740]);
2741
2742function parseRevisionInfo(rev$$1) {
2743 if (!/^\d+-/.test(rev$$1)) {
2744 return createError(INVALID_REV);
2745 }
2746 var idx = rev$$1.indexOf('-');
2747 var left = rev$$1.substring(0, idx);
2748 var right = rev$$1.substring(idx + 1);
2749 return {
2750 prefix: parseInt(left, 10),
2751 id: right
2752 };
2753}
2754
2755function makeRevTreeFromRevisions(revisions, opts) {
2756 var pos = revisions.start - revisions.ids.length + 1;
2757
2758 var revisionIds = revisions.ids;
2759 var ids = [revisionIds[0], opts, []];
2760
2761 for (var i = 1, len = revisionIds.length; i < len; i++) {
2762 ids = [revisionIds[i], {status: 'missing'}, [ids]];
2763 }
2764
2765 return [{
2766 pos,
2767 ids
2768 }];
2769}
2770
2771// Preprocess documents, parse their revisions, assign an id and a
2772// revision for new writes that are missing them, etc
2773function parseDoc(doc, newEdits, dbOpts) {
2774 if (!dbOpts) {
2775 dbOpts = {
2776 deterministic_revs: true
2777 };
2778 }
2779
2780 var nRevNum;
2781 var newRevId;
2782 var revInfo;
2783 var opts = {status: 'available'};
2784 if (doc._deleted) {
2785 opts.deleted = true;
2786 }
2787
2788 if (newEdits) {
2789 if (!doc._id) {
2790 doc._id = uuid$1();
2791 }
2792 newRevId = rev(doc, dbOpts.deterministic_revs);
2793 if (doc._rev) {
2794 revInfo = parseRevisionInfo(doc._rev);
2795 if (revInfo.error) {
2796 return revInfo;
2797 }
2798 doc._rev_tree = [{
2799 pos: revInfo.prefix,
2800 ids: [revInfo.id, {status: 'missing'}, [[newRevId, opts, []]]]
2801 }];
2802 nRevNum = revInfo.prefix + 1;
2803 } else {
2804 doc._rev_tree = [{
2805 pos: 1,
2806 ids : [newRevId, opts, []]
2807 }];
2808 nRevNum = 1;
2809 }
2810 } else {
2811 if (doc._revisions) {
2812 doc._rev_tree = makeRevTreeFromRevisions(doc._revisions, opts);
2813 nRevNum = doc._revisions.start;
2814 newRevId = doc._revisions.ids[0];
2815 }
2816 if (!doc._rev_tree) {
2817 revInfo = parseRevisionInfo(doc._rev);
2818 if (revInfo.error) {
2819 return revInfo;
2820 }
2821 nRevNum = revInfo.prefix;
2822 newRevId = revInfo.id;
2823 doc._rev_tree = [{
2824 pos: nRevNum,
2825 ids: [newRevId, opts, []]
2826 }];
2827 }
2828 }
2829
2830 invalidIdError(doc._id);
2831
2832 doc._rev = nRevNum + '-' + newRevId;
2833
2834 var result = {metadata : {}, data : {}};
2835 for (var key in doc) {
2836 /* istanbul ignore else */
2837 if (Object.prototype.hasOwnProperty.call(doc, key)) {
2838 var specialKey = key[0] === '_';
2839 if (specialKey && !reservedWords[key]) {
2840 var error = createError(DOC_VALIDATION, key);
2841 error.message = DOC_VALIDATION.message + ': ' + key;
2842 throw error;
2843 } else if (specialKey && !dataWords[key]) {
2844 result.metadata[key.slice(1)] = doc[key];
2845 } else {
2846 result.data[key] = doc[key];
2847 }
2848 }
2849 }
2850 return result;
2851}
2852
2853// We fetch all leafs of the revision tree, and sort them based on tree length
2854// and whether they were deleted, undeleted documents with the longest revision
2855// tree (most edits) win
2856// The final sort algorithm is slightly documented in a sidebar here:
2857// http://guide.couchdb.org/draft/conflicts.html
2858function winningRev(metadata) {
2859 var winningId;
2860 var winningPos;
2861 var winningDeleted;
2862 var toVisit = metadata.rev_tree.slice();
2863 var node;
2864 while ((node = toVisit.pop())) {
2865 var tree = node.ids;
2866 var branches = tree[2];
2867 var pos = node.pos;
2868 if (branches.length) { // non-leaf
2869 for (var i = 0, len = branches.length; i < len; i++) {
2870 toVisit.push({pos: pos + 1, ids: branches[i]});
2871 }
2872 continue;
2873 }
2874 var deleted = !!tree[1].deleted;
2875 var id = tree[0];
2876 // sort by deleted, then pos, then id
2877 if (!winningId || (winningDeleted !== deleted ? winningDeleted :
2878 winningPos !== pos ? winningPos < pos : winningId < id)) {
2879 winningId = id;
2880 winningPos = pos;
2881 winningDeleted = deleted;
2882 }
2883 }
2884
2885 return winningPos + '-' + winningId;
2886}
2887
2888// Pretty much all below can be combined into a higher order function to
2889// traverse revisions
2890// The return value from the callback will be passed as context to all
2891// children of that node
2892function traverseRevTree(revs, callback) {
2893 var toVisit = revs.slice();
2894
2895 var node;
2896 while ((node = toVisit.pop())) {
2897 var pos = node.pos;
2898 var tree = node.ids;
2899 var branches = tree[2];
2900 var newCtx =
2901 callback(branches.length === 0, pos, tree[0], node.ctx, tree[1]);
2902 for (var i = 0, len = branches.length; i < len; i++) {
2903 toVisit.push({pos: pos + 1, ids: branches[i], ctx: newCtx});
2904 }
2905 }
2906}
2907
2908function sortByPos(a, b) {
2909 return a.pos - b.pos;
2910}
2911
2912function collectLeaves(revs) {
2913 var leaves = [];
2914 traverseRevTree(revs, function (isLeaf, pos, id, acc, opts) {
2915 if (isLeaf) {
2916 leaves.push({rev: pos + "-" + id, pos, opts});
2917 }
2918 });
2919 leaves.sort(sortByPos).reverse();
2920 for (var i = 0, len = leaves.length; i < len; i++) {
2921 delete leaves[i].pos;
2922 }
2923 return leaves;
2924}
2925
2926// returns revs of all conflicts that is leaves such that
2927// 1. are not deleted and
2928// 2. are different than winning revision
2929function collectConflicts(metadata) {
2930 var win = winningRev(metadata);
2931 var leaves = collectLeaves(metadata.rev_tree);
2932 var conflicts = [];
2933 for (var i = 0, len = leaves.length; i < len; i++) {
2934 var leaf = leaves[i];
2935 if (leaf.rev !== win && !leaf.opts.deleted) {
2936 conflicts.push(leaf.rev);
2937 }
2938 }
2939 return conflicts;
2940}
2941
2942// compact a tree by marking its non-leafs as missing,
2943// and return a list of revs to delete
2944function compactTree(metadata) {
2945 var revs = [];
2946 traverseRevTree(metadata.rev_tree, function (isLeaf, pos,
2947 revHash, ctx, opts) {
2948 if (opts.status === 'available' && !isLeaf) {
2949 revs.push(pos + '-' + revHash);
2950 opts.status = 'missing';
2951 }
2952 });
2953 return revs;
2954}
2955
2956// `findPathToLeaf()` returns an array of revs that goes from the specified
2957
2958// build up a list of all the paths to the leafs in this revision tree
2959function rootToLeaf(revs) {
2960 var paths = [];
2961 var toVisit = revs.slice();
2962 var node;
2963 while ((node = toVisit.pop())) {
2964 var pos = node.pos;
2965 var tree = node.ids;
2966 var id = tree[0];
2967 var opts = tree[1];
2968 var branches = tree[2];
2969 var isLeaf = branches.length === 0;
2970
2971 var history = node.history ? node.history.slice() : [];
2972 history.push({id, opts});
2973 if (isLeaf) {
2974 paths.push({pos: (pos + 1 - history.length), ids: history});
2975 }
2976 for (var i = 0, len = branches.length; i < len; i++) {
2977 toVisit.push({pos: pos + 1, ids: branches[i], history});
2978 }
2979 }
2980 return paths.reverse();
2981}
2982
2983// for a better overview of what this is doing, read:
2984
2985function sortByPos$1(a, b) {
2986 return a.pos - b.pos;
2987}
2988
2989// classic binary search
2990function binarySearch(arr, item, comparator) {
2991 var low = 0;
2992 var high = arr.length;
2993 var mid;
2994 while (low < high) {
2995 mid = (low + high) >>> 1;
2996 if (comparator(arr[mid], item) < 0) {
2997 low = mid + 1;
2998 } else {
2999 high = mid;
3000 }
3001 }
3002 return low;
3003}
3004
3005// assuming the arr is sorted, insert the item in the proper place
3006function insertSorted(arr, item, comparator) {
3007 var idx = binarySearch(arr, item, comparator);
3008 arr.splice(idx, 0, item);
3009}
3010
3011// Turn a path as a flat array into a tree with a single branch.
3012// If any should be stemmed from the beginning of the array, that's passed
3013// in as the second argument
3014function pathToTree(path, numStemmed) {
3015 var root;
3016 var leaf;
3017 for (var i = numStemmed, len = path.length; i < len; i++) {
3018 var node = path[i];
3019 var currentLeaf = [node.id, node.opts, []];
3020 if (leaf) {
3021 leaf[2].push(currentLeaf);
3022 leaf = currentLeaf;
3023 } else {
3024 root = leaf = currentLeaf;
3025 }
3026 }
3027 return root;
3028}
3029
3030// compare the IDs of two trees
3031function compareTree(a, b) {
3032 return a[0] < b[0] ? -1 : 1;
3033}
3034
3035// Merge two trees together
3036// The roots of tree1 and tree2 must be the same revision
3037function mergeTree(in_tree1, in_tree2) {
3038 var queue = [{tree1: in_tree1, tree2: in_tree2}];
3039 var conflicts = false;
3040 while (queue.length > 0) {
3041 var item = queue.pop();
3042 var tree1 = item.tree1;
3043 var tree2 = item.tree2;
3044
3045 if (tree1[1].status || tree2[1].status) {
3046 tree1[1].status =
3047 (tree1[1].status === 'available' ||
3048 tree2[1].status === 'available') ? 'available' : 'missing';
3049 }
3050
3051 for (var i = 0; i < tree2[2].length; i++) {
3052 if (!tree1[2][0]) {
3053 conflicts = 'new_leaf';
3054 tree1[2][0] = tree2[2][i];
3055 continue;
3056 }
3057
3058 var merged = false;
3059 for (var j = 0; j < tree1[2].length; j++) {
3060 if (tree1[2][j][0] === tree2[2][i][0]) {
3061 queue.push({tree1: tree1[2][j], tree2: tree2[2][i]});
3062 merged = true;
3063 }
3064 }
3065 if (!merged) {
3066 conflicts = 'new_branch';
3067 insertSorted(tree1[2], tree2[2][i], compareTree);
3068 }
3069 }
3070 }
3071 return {conflicts, tree: in_tree1};
3072}
3073
3074function doMerge(tree, path, dontExpand) {
3075 var restree = [];
3076 var conflicts = false;
3077 var merged = false;
3078 var res;
3079
3080 if (!tree.length) {
3081 return {tree: [path], conflicts: 'new_leaf'};
3082 }
3083
3084 for (var i = 0, len = tree.length; i < len; i++) {
3085 var branch = tree[i];
3086 if (branch.pos === path.pos && branch.ids[0] === path.ids[0]) {
3087 // Paths start at the same position and have the same root, so they need
3088 // merged
3089 res = mergeTree(branch.ids, path.ids);
3090 restree.push({pos: branch.pos, ids: res.tree});
3091 conflicts = conflicts || res.conflicts;
3092 merged = true;
3093 } else if (dontExpand !== true) {
3094 // The paths start at a different position, take the earliest path and
3095 // traverse up until it as at the same point from root as the path we
3096 // want to merge. If the keys match we return the longer path with the
3097 // other merged After stemming we don't want to expand the trees
3098
3099 var t1 = branch.pos < path.pos ? branch : path;
3100 var t2 = branch.pos < path.pos ? path : branch;
3101 var diff = t2.pos - t1.pos;
3102
3103 var candidateParents = [];
3104
3105 var trees = [];
3106 trees.push({ids: t1.ids, diff, parent: null, parentIdx: null});
3107 while (trees.length > 0) {
3108 var item = trees.pop();
3109 if (item.diff === 0) {
3110 if (item.ids[0] === t2.ids[0]) {
3111 candidateParents.push(item);
3112 }
3113 continue;
3114 }
3115 var elements = item.ids[2];
3116 for (var j = 0, elementsLen = elements.length; j < elementsLen; j++) {
3117 trees.push({
3118 ids: elements[j],
3119 diff: item.diff - 1,
3120 parent: item.ids,
3121 parentIdx: j
3122 });
3123 }
3124 }
3125
3126 var el = candidateParents[0];
3127
3128 if (!el) {
3129 restree.push(branch);
3130 } else {
3131 res = mergeTree(el.ids, t2.ids);
3132 el.parent[2][el.parentIdx] = res.tree;
3133 restree.push({pos: t1.pos, ids: t1.ids});
3134 conflicts = conflicts || res.conflicts;
3135 merged = true;
3136 }
3137 } else {
3138 restree.push(branch);
3139 }
3140 }
3141
3142 // We didnt find
3143 if (!merged) {
3144 restree.push(path);
3145 }
3146
3147 restree.sort(sortByPos$1);
3148
3149 return {
3150 tree: restree,
3151 conflicts: conflicts || 'internal_node'
3152 };
3153}
3154
3155// To ensure we don't grow the revision tree infinitely, we stem old revisions
3156function stem(tree, depth) {
3157 // First we break out the tree into a complete list of root to leaf paths
3158 var paths = rootToLeaf(tree);
3159 var stemmedRevs;
3160
3161 var result;
3162 for (var i = 0, len = paths.length; i < len; i++) {
3163 // Then for each path, we cut off the start of the path based on the
3164 // `depth` to stem to, and generate a new set of flat trees
3165 var path = paths[i];
3166 var stemmed = path.ids;
3167 var node;
3168 if (stemmed.length > depth) {
3169 // only do the stemming work if we actually need to stem
3170 if (!stemmedRevs) {
3171 stemmedRevs = {}; // avoid allocating this object unnecessarily
3172 }
3173 var numStemmed = stemmed.length - depth;
3174 node = {
3175 pos: path.pos + numStemmed,
3176 ids: pathToTree(stemmed, numStemmed)
3177 };
3178
3179 for (var s = 0; s < numStemmed; s++) {
3180 var rev = (path.pos + s) + '-' + stemmed[s].id;
3181 stemmedRevs[rev] = true;
3182 }
3183 } else { // no need to actually stem
3184 node = {
3185 pos: path.pos,
3186 ids: pathToTree(stemmed, 0)
3187 };
3188 }
3189
3190 // Then we remerge all those flat trees together, ensuring that we don't
3191 // connect trees that would go beyond the depth limit
3192 if (result) {
3193 result = doMerge(result, node, true).tree;
3194 } else {
3195 result = [node];
3196 }
3197 }
3198
3199 // this is memory-heavy per Chrome profiler, avoid unless we actually stemmed
3200 if (stemmedRevs) {
3201 traverseRevTree(result, function (isLeaf, pos, revHash) {
3202 // some revisions may have been removed in a branch but not in another
3203 delete stemmedRevs[pos + '-' + revHash];
3204 });
3205 }
3206
3207 return {
3208 tree: result,
3209 revs: stemmedRevs ? Object.keys(stemmedRevs) : []
3210 };
3211}
3212
3213function merge(tree, path, depth) {
3214 var newTree = doMerge(tree, path);
3215 var stemmed = stem(newTree.tree, depth);
3216 return {
3217 tree: stemmed.tree,
3218 stemmedRevs: stemmed.revs,
3219 conflicts: newTree.conflicts
3220 };
3221}
3222
3223// this method removes a leaf from a rev tree, independent of its status.
3224// e.g., by removing an available leaf, it could leave its predecessor as
3225// a missing leaf and corrupting the tree.
3226function removeLeafFromRevTree(tree, leafRev) {
3227 return tree.flatMap((path) => {
3228 path = removeLeafFromPath(path, leafRev);
3229 return path ? [path] : [];
3230 });
3231}
3232
3233function removeLeafFromPath(path, leafRev) {
3234 const tree = clone(path);
3235 const toVisit = [tree];
3236 let node;
3237
3238 while ((node = toVisit.pop())) {
3239 const { pos, ids: [id, , branches], parent } = node;
3240 const isLeaf = branches.length === 0;
3241 const hash = `${pos}-${id}`;
3242
3243 if (isLeaf && hash === leafRev) {
3244 if (!parent) {
3245 // FIXME: we're facing the root, and probably shouldn't just return an empty array (object? null?).
3246 return null;
3247 }
3248
3249 parent.ids[2] = parent.ids[2].filter(function (branchNode) {
3250 return branchNode[0] !== id;
3251 });
3252 return tree;
3253 }
3254
3255 for (let i = 0, len = branches.length; i < len; i++) {
3256 toVisit.push({ pos: pos + 1, ids: branches[i], parent: node });
3257 }
3258 }
3259 return tree;
3260}
3261
3262// return true if a rev exists in the rev tree, false otherwise
3263
3264function isLocalId(id) {
3265 return typeof id === 'string' && id.startsWith('_local/');
3266}
3267
3268// returns the current leaf node for a given revision
3269function latest(rev, metadata) {
3270 var toVisit = metadata.rev_tree.slice();
3271 var node;
3272 while ((node = toVisit.pop())) {
3273 var pos = node.pos;
3274 var tree = node.ids;
3275 var id = tree[0];
3276 var opts = tree[1];
3277 var branches = tree[2];
3278 var isLeaf = branches.length === 0;
3279
3280 var history = node.history ? node.history.slice() : [];
3281 history.push({id, pos, opts});
3282
3283 if (isLeaf) {
3284 for (var i = 0, len = history.length; i < len; i++) {
3285 var historyNode = history[i];
3286 var historyRev = historyNode.pos + '-' + historyNode.id;
3287
3288 if (historyRev === rev) {
3289 // return the rev of this leaf
3290 return pos + '-' + id;
3291 }
3292 }
3293 }
3294
3295 for (var j = 0, l = branches.length; j < l; j++) {
3296 toVisit.push({pos: pos + 1, ids: branches[j], history});
3297 }
3298 }
3299
3300 /* istanbul ignore next */
3301 throw new Error('Unable to resolve latest revision for id ' + metadata.id + ', rev ' + rev);
3302}
3303
3304const IDB_NULL = Number.MIN_SAFE_INTEGER;
3305const IDB_FALSE = Number.MIN_SAFE_INTEGER + 1;
3306const IDB_TRUE = Number.MIN_SAFE_INTEGER + 2;
3307
3308// These are the same as below but without the global flag
3309// we want to use RegExp.test because it's really fast, but the global flag
3310// makes the regex const stateful (seriously) as it walked through all instances
3311const TEST_KEY_INVALID = /^[^a-zA-Z$]|[^a-zA-Z0-9$]+/;
3312const TEST_PATH_INVALID = /\\.|(^|\.)[^a-zA-Z$]|[^a-zA-Z0-9$.]+/;
3313function needsSanitise(name, isPath) {
3314 if (isPath) {
3315 return TEST_PATH_INVALID.test(name);
3316 } else {
3317 return TEST_KEY_INVALID.test(name);
3318 }
3319}
3320
3321//
3322// IndexedDB only allows valid JS names in its index paths, whereas JSON allows
3323// for any string at all. This converts invalid JS names to valid ones, to allow
3324// for them to be indexed.
3325//
3326// For example, "foo-bar" is a valid JSON key, but cannot be a valid JS name
3327// (because that would be read as foo minus bar).
3328//
3329// Very high level rules for valid JS names are:
3330// - First character cannot start with a number
3331// - Otherwise all characters must be be a-z, A-Z, 0-9, or $.
3332// - Underscores (_) are encoded even though legal, to avoid collisions with
3333// encoded illegal characters
3334// - We allow . unless the name represents a single field, as that represents
3335// a deep index path.
3336// See: https://www.w3.org/TR/IndexedDB/#key-path-construct
3337//
3338// This is more aggressive than it needs to be, but also simpler.
3339//
3340const KEY_INVALID = new RegExp(TEST_KEY_INVALID.source, 'g');
3341const PATH_INVALID = new RegExp(TEST_PATH_INVALID.source, 'g');
3342const SLASH = '\\'.charCodeAt(0);
3343const IS_DOT = '.'.charCodeAt(0);
3344
3345function sanitise(name, isPath) {
3346 const correctCharacters = function (match) {
3347 let good = '';
3348 for (let i = 0; i < match.length; i++) {
3349 const code = match.charCodeAt(i);
3350 // If you're sanitising a path, a slash character is there to be interpreted
3351 // by whatever parses the path later as "escape the next thing".
3352 //
3353 // e.g., if you want to index THIS string:
3354 // {"foo": {"bar.baz": "THIS"}}
3355 // Your index path would be "foo.bar\.baz".
3356
3357 if (code === IS_DOT && isPath && i === 0) {
3358 good += '.';
3359 } else if (code === SLASH && isPath) {
3360 continue;
3361 } else {
3362 good += '_c' + code + '_';
3363 }
3364 }
3365 return good;
3366 };
3367
3368 if (isPath) {
3369 return name.replace(PATH_INVALID, correctCharacters);
3370 } else {
3371 return name.replace(KEY_INVALID, correctCharacters);
3372 }
3373}
3374
3375function needsRewrite(data) {
3376 for (const key of Object.keys(data)) {
3377 if (needsSanitise(key)) {
3378 return true;
3379 } else if (data[key] === null || typeof data[key] === 'boolean') {
3380 return true;
3381 } else if (typeof data[key] === 'object') {
3382 return needsRewrite(data[key]);
3383 }
3384 }
3385}
3386
3387function rewrite(data) {
3388 if (!needsRewrite(data)) {
3389 return false;
3390 }
3391
3392 const isArray = Array.isArray(data);
3393 const clone = isArray
3394 ? []
3395 : {};
3396
3397 Object.keys(data).forEach(function (key) {
3398 const safeKey = isArray ? key : sanitise(key);
3399
3400 if (data[key] === null) {
3401 clone[safeKey] = IDB_NULL;
3402 } else if (typeof data[key] === 'boolean') {
3403 clone[safeKey] = data[key] ? IDB_TRUE : IDB_FALSE;
3404 } else if (typeof data[key] === 'object') {
3405 clone[safeKey] = rewrite(data[key]);
3406 } else {
3407 clone[safeKey] = data[key];
3408 }
3409 });
3410
3411 return clone;
3412}
3413
3414const DOC_STORE = 'docs';
3415const META_LOCAL_STORE = 'meta';
3416
3417function idbError(callback) {
3418 return function (evt) {
3419 let message = 'unknown_error';
3420 if (evt.target && evt.target.error) {
3421 message = evt.target.error.name || evt.target.error.message;
3422 }
3423 callback(createError(IDB_ERROR, message, evt.type));
3424 };
3425}
3426
3427function processAttachment(name, src, doc, isBinary, attachmentFormat) {
3428
3429 delete doc._attachments[name].stub;
3430
3431 if (attachmentFormat === 'base64') {
3432 if (isBinary) {
3433 const att = src.attachments[doc._attachments[name].digest];
3434 doc._attachments[name].data = b64ToBluffer(att.data, att.content_type);
3435 } else {
3436 doc._attachments[name].data =
3437 src.attachments[doc._attachments[name].digest].data;
3438 }
3439 delete doc._attachments[name].length;
3440 return Promise.resolve();
3441 }
3442
3443 if (isBinary) {
3444 doc._attachments[name].data =
3445 src.attachments[doc._attachments[name].digest].data;
3446 return Promise.resolve();
3447 }
3448
3449 return new Promise(function (resolve) {
3450 const data = src.attachments[doc._attachments[name].digest].data;
3451 readAsBinaryString(data, function (binString) {
3452 doc._attachments[name].data = thisBtoa(binString);
3453 delete doc._attachments[name].length;
3454 resolve();
3455 });
3456 });
3457}
3458
3459function rawIndexFields(ddoc, viewName) {
3460 // fields are an array of either the string name of the field, or a key value
3461 const fields = ddoc.views[viewName].options &&
3462 ddoc.views[viewName].options.def &&
3463 ddoc.views[viewName].options.def.fields || [];
3464
3465 // Either ['foo'] or [{'foo': 'desc'}]
3466 return fields.map(function (field) {
3467 if (typeof field === 'string') {
3468 return field;
3469 } else {
3470 return Object.keys(field)[0];
3471 }
3472 });
3473}
3474
3475/**
3476 * true if the view is has a "partial_filter_selector".
3477 */
3478function isPartialFilterView(ddoc, viewName) {
3479 return viewName in ddoc.views &&
3480 ddoc.views[viewName].options &&
3481 ddoc.views[viewName].options.def &&
3482 ddoc.views[viewName].options.def.partial_filter_selector;
3483}
3484
3485function naturalIndexName(fields) {
3486 return '_find_idx/' + fields.join('/');
3487}
3488
3489/**
3490 * Convert the fields the user gave us in the view and convert them to work for
3491 * indexeddb.
3492 *
3493 * fields is an array of field strings. A field string could be one field:
3494 * 'foo'
3495 * Or it could be a json path:
3496 * 'foo.bar'
3497 */
3498function correctIndexFields(fields) {
3499 // Every index has to have deleted at the front, because when we do a query
3500 // we need to filter out deleted documents.
3501 return ['deleted'].concat(
3502 fields.map(function (field) {
3503 if (['_id', '_rev', '_deleted', '_attachments'].includes(field)) {
3504 // These properties are stored at the top level without the underscore
3505 return field.substr(1);
3506 } else {
3507 // The custom document fields are inside the `data` property
3508 return 'data.' + sanitise(field, true);
3509 }
3510 })
3511 );
3512}
3513
3514//
3515// Core PouchDB schema version. Increment this if we, as a library, want to make
3516// schema changes in indexeddb. See upgradePouchDbSchema()
3517//
3518const POUCHDB_IDB_VERSION = 2;
3519
3520//
3521// Functions that manage a combinate indexeddb version, by combining the current
3522// time in millis that represents user migrations with a large multiplier that
3523// represents PouchDB system migrations.
3524//
3525// This lets us use the idb version number to both represent
3526// PouchDB-library-level migrations as well as "user migrations" required for
3527// when design documents trigger the addition or removal of native indexes.
3528//
3529// Given that Number.MAX_SAFE_INTEGER = 9007199254740991
3530//
3531// We can easily use the largest 2-3 digits and either allow:
3532// - 900 system migrations up to 2198/02/18
3533// - or 89 system migrations up to 5050/02/14
3534//
3535// This impl does the former. If this code still exists after 2198 someone send my
3536// descendants a Spacebook message congratulating them on their impressive genes.
3537//
3538// 9007199254740991 <- MAX_SAFE_INTEGER
3539// 10000000000000 <- 10^13
3540// 7199254740991 <- 2198-02-18T16:59:00.991Z
3541//
3542const versionMultiplier = Math.pow(10, 13);
3543function createIdbVersion() {
3544 return (versionMultiplier * POUCHDB_IDB_VERSION) + new Date().getTime();
3545}
3546function getPouchDbVersion(version) {
3547 return Math.floor(version / versionMultiplier);
3548}
3549
3550function maintainNativeIndexes(openReq, reject) {
3551 const docStore = openReq.transaction.objectStore(DOC_STORE);
3552 const ddocsReq = docStore.getAll(IDBKeyRange.bound('_design/', '_design/\uffff'));
3553
3554 ddocsReq.onsuccess = function (e) {
3555 const results = e.target.result;
3556 const existingIndexNames = Array.from(docStore.indexNames);
3557
3558 // NB: the only thing we're supporting here is the declared indexing
3559 // fields nothing more.
3560 const expectedIndexes = results.filter(function (row) {
3561 return row.deleted === 0 && row.revs[row.rev].data.views;
3562 }).map(function (row) {
3563 return row.revs[row.rev].data;
3564 }).reduce(function (indexes, ddoc) {
3565 return Object.keys(ddoc.views).reduce(function (acc, viewName) {
3566 const fields = rawIndexFields(ddoc, viewName);
3567
3568 if (fields && fields.length > 0) {
3569 acc[naturalIndexName(fields)] = correctIndexFields(fields);
3570 }
3571
3572 return acc;
3573 }, indexes);
3574 }, {});
3575
3576 const expectedIndexNames = Object.keys(expectedIndexes);
3577
3578 // Delete any indexes that aren't system indexes or expected
3579 const systemIndexNames = ['seq', 'deleted,id'];
3580 existingIndexNames.forEach(function (index) {
3581 if (systemIndexNames.indexOf(index) === -1 && expectedIndexNames.indexOf(index) === -1) {
3582 docStore.deleteIndex(index);
3583 }
3584 });
3585
3586 // Work out which indexes are missing and create them
3587 const newIndexNames = expectedIndexNames.filter(function (ei) {
3588 return existingIndexNames.indexOf(ei) === -1;
3589 });
3590
3591 try {
3592 newIndexNames.forEach(function (indexName) {
3593 docStore.createIndex(indexName, expectedIndexes[indexName]);
3594 });
3595 } catch (err) {
3596 reject(err);
3597 }
3598 };
3599}
3600
3601function upgradePouchDbSchema(dbName, db, tx, pouchdbVersion) {
3602 if (pouchdbVersion < 1) {
3603 const docStore = db.createObjectStore(DOC_STORE, {keyPath : 'id'});
3604 docStore.createIndex('seq', 'seq', {unique: true});
3605
3606 db.createObjectStore(META_LOCAL_STORE, {keyPath: 'id'});
3607 }
3608
3609 if (pouchdbVersion < 2) {
3610 const docStore = tx.objectStore(DOC_STORE);
3611 docStore.createIndex('deleted,id', [ 'deleted', 'id' ], {unique: true});
3612 if (dbName.includes('-mrview-')) {
3613 docStore.deleteIndex('seq');
3614 }
3615 }
3616
3617 // Declare more PouchDB schema changes here
3618 // if (pouchdbVersion < 3) { .. }
3619}
3620
3621function openDatabase(openDatabases, api, opts, resolve, reject) {
3622 const openReq = opts.versionChangedWhileOpen ?
3623 indexedDB.open(opts.name) :
3624 indexedDB.open(opts.name, createIdbVersion());
3625
3626 openReq.onupgradeneeded = function (e) {
3627 if (e.oldVersion > 0 && e.oldVersion < versionMultiplier) {
3628 // This DB was created with the "idb" adapter, **not** this one.
3629 // For now we're going to just error out here: users must manually
3630 // migrate between the two. In the future, dependent on performance tests,
3631 // we might silently migrate
3632 throw new Error('Incorrect adapter: you should specify the "idb" adapter to open this DB');
3633 } else if (e.oldVersion === 0 && e.newVersion < versionMultiplier) {
3634 // Firefox still creates the database with version=1 even if we throw,
3635 // so we need to be sure to destroy the empty database before throwing
3636 indexedDB.deleteDatabase(opts.name);
3637 throw new Error('Database was deleted while open');
3638 }
3639
3640 const tx = e.target.transaction;
3641 const db = e.target.result;
3642
3643 const pouchdbVersion = getPouchDbVersion(e.oldVersion);
3644 upgradePouchDbSchema(opts.name, db, tx, pouchdbVersion);
3645 maintainNativeIndexes(openReq, reject);
3646
3647 if (pouchdbVersion < 2) {
3648 const docStore = openReq.transaction.objectStore(DOC_STORE);
3649 const metaStore = openReq.transaction.objectStore(META_LOCAL_STORE);
3650
3651 const allDocsReq = docStore.openCursor();
3652 allDocsReq.onsuccess = event => {
3653 const cursor = event.target.result;
3654 if (!cursor) {
3655 return;
3656 }
3657
3658 const doc = cursor.value;
3659
3660 if (!isLocalId(doc.id)) {
3661 return cursor.continue();
3662 }
3663
3664 // Move _local/ docs to the META_LOCAL_STORE
3665 metaStore.put(doc).onsuccess = () => {
3666 cursor.delete(doc).onsuccess = () => {
3667 cursor.continue();
3668 };
3669 };
3670 };
3671 }
3672 };
3673
3674 openReq.onblocked = function (e) {
3675 // AFAICT this only occurs if, after sending `onversionchange` events to
3676 // all other open DBs (ie in different tabs), there are still open
3677 // connections to the DB. In this code we should never see this because we
3678 // close our DBs on these events, and all DB interactions are wrapped in
3679 // safely re-opening the DB.
3680 console.error('onblocked, this should never happen', e);
3681 };
3682
3683 openReq.onsuccess = function (e) {
3684 const idb = e.target.result;
3685
3686 idb.onabort = function (e) {
3687 console.error('Database has a global failure', e.target.error);
3688 delete openDatabases[opts.name];
3689 idb.close();
3690 };
3691
3692 // In IndexedDB you can only change the version, and thus the schema, when you are opening the database.
3693 // versionChangedWhileOpen means that something else outside of our control has likely updated the version.
3694 // One way this could happen is if you open multiple tabs, as the version number changes each time the database is opened.
3695 // If we suspect this we close the db and tag it, so that next time it's accessed it reopens the DB with the current version
3696 // as opposed to upping the version again
3697 // This avoids infinite loops of version updates if you have multiple tabs open
3698 idb.onversionchange = function () {
3699 console.log('Database was made stale, closing handle');
3700 openDatabases[opts.name].versionChangedWhileOpen = true;
3701 idb.close();
3702 };
3703
3704 idb.onclose = function () {
3705 console.log('Database was made stale, closing handle');
3706 if (opts.name in openDatabases) {
3707 openDatabases[opts.name].versionChangedWhileOpen = true;
3708 }
3709 };
3710
3711 let metadata = {id: META_LOCAL_STORE};
3712 const txn = idb.transaction([META_LOCAL_STORE], 'readwrite');
3713
3714 txn.oncomplete = function () {
3715 resolve({idb, metadata});
3716 };
3717
3718 const metaStore = txn.objectStore(META_LOCAL_STORE);
3719 metaStore.get(META_LOCAL_STORE).onsuccess = function (e) {
3720 metadata = e.target.result || metadata;
3721 let changed = false;
3722
3723 if (!('doc_count' in metadata)) {
3724 changed = true;
3725 metadata.doc_count = 0;
3726 }
3727
3728 if (!('seq' in metadata)) {
3729 changed = true;
3730 metadata.seq = 0;
3731 }
3732
3733 if (!('db_uuid' in metadata)) {
3734 changed = true;
3735 metadata.db_uuid = uuid$1();
3736 }
3737
3738 if (!('idb_attachment_format' in metadata)) {
3739 // There will be trouble if any browser _stops_ supporting blobs.
3740
3741 const createBlobDoc = blob => ({ id:'blob-support', blob });
3742
3743 checkBlobSupport(txn, META_LOCAL_STORE, createBlobDoc).then(blobSupport => {
3744 // Unfortunate that we have to track this in both the metadata and on
3745 // api, but sometimes we have access to one, sometimes the other (and
3746 // sometimes both). We could change function signatures in index.js
3747 // to make this consistent.
3748 api.blobSupport = metadata.idb_attachment_format = blobSupport ? 'binary' : 'base64';
3749 metaStore.put(metadata);
3750 });
3751 } else if (changed) {
3752 api.blobSupport = metadata.idb_attachment_format;
3753 metaStore.put(metadata);
3754 }
3755 };
3756 };
3757
3758 openReq.onerror = function (e) {
3759 reject(e.target.error);
3760 };
3761}
3762
3763function setup (openDatabases, api, opts) {
3764 if (!openDatabases[opts.name] || openDatabases[opts.name].versionChangedWhileOpen) {
3765 opts.versionChangedWhileOpen = openDatabases[opts.name] &&
3766 openDatabases[opts.name].versionChangedWhileOpen;
3767
3768 openDatabases[opts.name] = new Promise(function (resolve, reject) {
3769 openDatabase(openDatabases, api, opts, resolve, reject);
3770 });
3771 }
3772
3773 return openDatabases[opts.name];
3774}
3775
3776function info (metadata, callback) {
3777 callback(null, {
3778 doc_count: metadata.doc_count,
3779 update_seq: metadata.seq
3780 });
3781}
3782
3783function get (txn, id, opts, callback) {
3784 if (txn.error) {
3785 return callback(txn.error);
3786 }
3787
3788 txn.txn.objectStore(DOC_STORE).get(id).onsuccess = function (e) {
3789 const doc = e.target.result;
3790 let rev;
3791 if (!opts.rev) {
3792 rev = (doc && doc.rev);
3793 } else {
3794 rev = opts.latest ? latest(opts.rev, doc) : opts.rev;
3795 }
3796
3797 if (!doc || (doc.deleted && !opts.rev) || !(rev in doc.revs)) {
3798 callback(createError(MISSING_DOC, 'missing'));
3799 return;
3800 }
3801
3802 const result = doc.revs[rev].data;
3803 result._id = doc.id;
3804 result._rev = rev;
3805
3806 // WARNING: expecting possible old format
3807 // TODO: why are we passing the transaction in the context?
3808 // It's not clear we ever thread these txns usefully
3809 callback(null, {
3810 doc: result,
3811 metadata: doc,
3812 ctx: txn
3813 });
3814 };
3815}
3816
3817// _getLocal() doesn't know if opts.binary is set or not, so assume it's not.
3818const BINARY_ATTACHMENTS = false;
3819
3820function getLocal (txn, id, api, callback) {
3821 if (txn.error) {
3822 return callback(txn.error);
3823 }
3824
3825 txn.txn.objectStore(META_LOCAL_STORE).get(id).onsuccess = function (e) {
3826 const doc = e.target.result;
3827
3828 if (!doc) {
3829 callback(createError(MISSING_DOC, 'missing'));
3830 return;
3831 }
3832
3833 const result = doc.revs[doc.rev].data;
3834 result._id = doc.id;
3835 result._rev = doc.rev;
3836
3837 if (result._attachments) {
3838 const processing = [];
3839 for (const name in result._attachments) {
3840 processing.push(processAttachment(name, doc, result, BINARY_ATTACHMENTS, api.blobSupport));
3841 }
3842 Promise.all(processing)
3843 .then(() => callback(null, result))
3844 .catch(callback);
3845 } else {
3846 callback(null, result);
3847 }
3848 };
3849}
3850
3851function getAttachment(docId, attachId, attachment, opts, cb) {
3852 if (isLocalId(docId)) {
3853 cb(createError(MISSING_DOC, 'missing'));
3854 return;
3855 }
3856
3857 const doc = opts.metadata;
3858 const data = doc.attachments[attachment.digest].data;
3859
3860 if (typeof data === 'string') {
3861 if (opts.binary) {
3862 cb(null, b64ToBluffer(data, attachment.content_type));
3863 } else {
3864 cb(null, data);
3865 }
3866 return;
3867 }
3868
3869 if (opts.binary) {
3870 return cb(null, data);
3871 } else {
3872 readAsBinaryString(data, function (binString) {
3873 cb(null, thisBtoa(binString));
3874 });
3875 }
3876}
3877
3878function bulkDocs (api, req, opts, metadata, dbOpts, idbChanges, callback) {
3879
3880 let txn;
3881
3882 // TODO: I would prefer to get rid of these globals
3883 let error;
3884 const results = [];
3885 const docs = [];
3886 let lastWriteIndex;
3887
3888 const revsLimit = dbOpts.revs_limit || 1000;
3889 const rewriteEnabled = dbOpts.name.indexOf("-mrview-") === -1;
3890 const autoCompaction = dbOpts.auto_compaction;
3891
3892 // We only need to track 1 revision for local documents
3893 function docsRevsLimit(doc) {
3894 return isLocalId(doc.id) ? 1 : revsLimit;
3895 }
3896
3897 function rootIsMissing(doc) {
3898 return doc.rev_tree[0].ids[1].status === 'missing';
3899 }
3900
3901 // Reads the original doc from the store if available
3902 // As in allDocs with keys option using multiple get calls is the fastest way
3903 function fetchExistingDocs(txn, docs) {
3904 let fetched = 0;
3905 const oldDocs = {};
3906
3907 function readDone(e) {
3908 if (e.target.result) {
3909 oldDocs[e.target.result.id] = e.target.result;
3910 }
3911 if (++fetched === docs.length) {
3912 processDocs$$1(txn, docs, oldDocs);
3913 }
3914 }
3915
3916 docs.forEach(function (doc) {
3917 const docStore = isLocalId(doc.id) ? META_LOCAL_STORE : DOC_STORE;
3918 txn.objectStore(docStore).get(doc.id).onsuccess = readDone;
3919 });
3920 }
3921
3922 function revHasAttachment(doc, rev, digest) {
3923 return doc.revs[rev] &&
3924 doc.revs[rev].data._attachments &&
3925 Object.values(doc.revs[rev].data._attachments).find(function (att) {
3926 return att.digest === digest;
3927 });
3928 }
3929
3930 function processDocs$$1(txn, docs, oldDocs) {
3931
3932 docs.forEach(function (doc, i) {
3933 let newDoc;
3934
3935 // The first document write cannot be a deletion
3936 if ('was_delete' in opts && !(Object.prototype.hasOwnProperty.call(oldDocs, doc.id))) {
3937 newDoc = createError(MISSING_DOC, 'deleted');
3938
3939 // The first write of a document cannot specify a revision
3940 } else if (opts.new_edits &&
3941 !Object.prototype.hasOwnProperty.call(oldDocs, doc.id) &&
3942 rootIsMissing(doc)) {
3943 newDoc = createError(REV_CONFLICT);
3944
3945 // Update the existing document
3946 } else if (Object.prototype.hasOwnProperty.call(oldDocs, doc.id)) {
3947 newDoc = update(txn, doc, oldDocs[doc.id]);
3948 // The update can be rejected if it is an update to an existing
3949 // revision, if so skip it
3950 if (newDoc == false) {
3951 return;
3952 }
3953
3954 // New document
3955 } else {
3956 // Ensure new documents are also stemmed
3957 const merged = merge([], doc.rev_tree[0], docsRevsLimit(doc));
3958 doc.rev_tree = merged.tree;
3959 doc.stemmedRevs = merged.stemmedRevs;
3960 newDoc = doc;
3961 newDoc.isNewDoc = true;
3962 newDoc.wasDeleted = doc.revs[doc.rev].deleted ? 1 : 0;
3963 }
3964
3965 if (newDoc.error) {
3966 results[i] = newDoc;
3967 } else {
3968 oldDocs[newDoc.id] = newDoc;
3969 lastWriteIndex = i;
3970 write(txn, newDoc, i);
3971 }
3972 });
3973 }
3974
3975 // Converts from the format returned by parseDoc into the new format
3976 // we use to store
3977 function convertDocFormat(doc) {
3978
3979 const newDoc = {
3980 id: doc.metadata.id,
3981 rev: doc.metadata.rev,
3982 rev_tree: doc.metadata.rev_tree,
3983 revs: doc.metadata.revs || {}
3984 };
3985
3986 newDoc.revs[newDoc.rev] = {
3987 data: doc.data,
3988 deleted: doc.metadata.deleted
3989 };
3990
3991 return newDoc;
3992 }
3993
3994 function update(txn, doc, oldDoc) {
3995
3996 // Ignore updates to existing revisions
3997 if ((doc.rev in oldDoc.revs) && !opts.new_edits) {
3998 return false;
3999 }
4000
4001 const isRoot = /^1-/.test(doc.rev);
4002
4003 // Reattach first writes after a deletion to last deleted tree
4004 if (oldDoc.deleted && !doc.deleted && opts.new_edits && isRoot) {
4005 const tmp = doc.revs[doc.rev].data;
4006 tmp._rev = oldDoc.rev;
4007 tmp._id = oldDoc.id;
4008 doc = convertDocFormat(parseDoc(tmp, opts.new_edits, dbOpts));
4009 }
4010
4011 const merged = merge(oldDoc.rev_tree, doc.rev_tree[0], docsRevsLimit(doc));
4012 doc.stemmedRevs = merged.stemmedRevs;
4013 doc.rev_tree = merged.tree;
4014
4015 // Merge the old and new rev data
4016 const revs = oldDoc.revs;
4017 revs[doc.rev] = doc.revs[doc.rev];
4018 doc.revs = revs;
4019
4020 doc.attachments = oldDoc.attachments;
4021
4022 const inConflict = opts.new_edits && (((oldDoc.deleted && doc.deleted) ||
4023 (!oldDoc.deleted && merged.conflicts !== 'new_leaf') ||
4024 (oldDoc.deleted && !doc.deleted && merged.conflicts === 'new_branch') ||
4025 (oldDoc.rev === doc.rev)));
4026
4027 if (inConflict) {
4028 return createError(REV_CONFLICT);
4029 }
4030
4031 doc.wasDeleted = oldDoc.deleted;
4032
4033 return doc;
4034 }
4035
4036 function write(txn, doc, i) {
4037
4038 // We copy the data from the winning revision into the root
4039 // of the document so that it can be indexed
4040 const winningRev$$1 = winningRev(doc);
4041 // rev of new doc for attachments and to return it
4042 const writtenRev = doc.rev;
4043 const isLocal = isLocalId(doc.id);
4044
4045 const theDoc = doc.revs[winningRev$$1].data;
4046
4047 const isNewDoc = doc.isNewDoc;
4048
4049 if (rewriteEnabled) {
4050 // doc.data is what we index, so we need to clone and rewrite it, and clean
4051 // it up for indexability
4052 const result = rewrite(theDoc);
4053 if (result) {
4054 doc.data = result;
4055 delete doc.data._attachments;
4056 } else {
4057 doc.data = theDoc;
4058 }
4059 } else {
4060 doc.data = theDoc;
4061 }
4062
4063 doc.rev = winningRev$$1;
4064 // .deleted needs to be an int for indexing
4065 doc.deleted = doc.revs[winningRev$$1].deleted ? 1 : 0;
4066
4067 // Bump the seq for every new (non local) revision written
4068 if (!isLocal) {
4069 doc.seq = ++metadata.seq;
4070
4071 let delta = 0;
4072 // If its a new document, we wont decrement if deleted
4073 if (doc.isNewDoc) {
4074 delta = doc.deleted ? 0 : 1;
4075 } else if (doc.wasDeleted !== doc.deleted) {
4076 delta = doc.deleted ? -1 : 1;
4077 }
4078 metadata.doc_count += delta;
4079 }
4080 delete doc.isNewDoc;
4081 delete doc.wasDeleted;
4082
4083 // If there have been revisions stemmed when merging trees,
4084 // delete their data
4085 let revsToDelete = doc.stemmedRevs || [];
4086
4087 if (autoCompaction && !isNewDoc) {
4088 const result = compactTree(doc);
4089 if (result.length) {
4090 revsToDelete = revsToDelete.concat(result);
4091 }
4092 }
4093
4094 if (revsToDelete.length) {
4095 revsToDelete.forEach(function (rev) { delete doc.revs[rev]; });
4096 }
4097
4098 delete doc.stemmedRevs;
4099
4100 if (!('attachments' in doc)) {
4101 doc.attachments = {};
4102 }
4103
4104 if (theDoc._attachments) {
4105 for (const k in theDoc._attachments) {
4106 const attachment = theDoc._attachments[k];
4107 if (attachment.stub) {
4108 if (!(attachment.digest in doc.attachments)) {
4109 error = createError(MISSING_STUB);
4110 // TODO: Not sure how safe this manual abort is, seeing
4111 // console issues
4112 txn.abort();
4113 return;
4114 }
4115
4116 if (revHasAttachment(doc, writtenRev, attachment.digest)) {
4117 doc.attachments[attachment.digest].revs[writtenRev] = true;
4118 }
4119
4120 } else {
4121
4122 doc.attachments[attachment.digest] = attachment;
4123 doc.attachments[attachment.digest].revs = {};
4124 doc.attachments[attachment.digest].revs[writtenRev] = true;
4125
4126 theDoc._attachments[k] = {
4127 stub: true,
4128 digest: attachment.digest,
4129 content_type: attachment.content_type,
4130 length: attachment.length,
4131 revpos: parseInt(writtenRev, 10)
4132 };
4133 }
4134 }
4135 }
4136
4137 // Local documents have different revision handling
4138 if (isLocal && doc.deleted) {
4139 txn.objectStore(META_LOCAL_STORE).delete(doc.id).onsuccess = function () {
4140 results[i] = {
4141 ok: true,
4142 id: doc.id,
4143 rev: '0-0'
4144 };
4145 };
4146 updateSeq(i);
4147 return;
4148 }
4149
4150 const docStore = isLocal ? META_LOCAL_STORE : DOC_STORE;
4151 txn.objectStore(docStore).put(doc).onsuccess = function () {
4152 results[i] = {
4153 ok: true,
4154 id: doc.id,
4155 rev: writtenRev
4156 };
4157 updateSeq(i);
4158 };
4159 }
4160
4161 function updateSeq(i) {
4162 if (i === lastWriteIndex) {
4163 txn.objectStore(META_LOCAL_STORE).put(metadata);
4164 }
4165 }
4166
4167 function preProcessAttachment(attachment) {
4168 if (attachment.stub) {
4169 return Promise.resolve(attachment);
4170 }
4171
4172 let binData;
4173 if (typeof attachment.data === 'string') {
4174 try {
4175 binData = thisAtob(attachment.data);
4176 } catch (e) {
4177 return Promise.reject(createError(BAD_ARG, 'Attachment is not a valid base64 string'));
4178 }
4179 if (metadata.idb_attachment_format === 'binary') {
4180 attachment.data = binStringToBluffer(binData, attachment.content_type);
4181 }
4182 } else {
4183 binData = attachment.data;
4184 if (metadata.idb_attachment_format === 'base64') {
4185 // TODO could run these in parallel, if we cared
4186 return new Promise(resolve => {
4187 blobToBase64(attachment.data, function (b64) {
4188 attachment.data = b64;
4189 binaryMd5(binData, function (result) {
4190 attachment.digest = 'md5-' + result;
4191 attachment.length = binData.size || binData.length || 0;
4192 resolve(attachment);
4193 });
4194 });
4195 });
4196 }
4197 }
4198
4199 return new Promise(function (resolve) {
4200 binaryMd5(binData, function (result) {
4201 attachment.digest = 'md5-' + result;
4202 attachment.length = binData.size || binData.length || 0;
4203 resolve(attachment);
4204 });
4205 });
4206 }
4207
4208 function preProcessAttachments() {
4209 const promises = docs.map(function (doc) {
4210 const data = doc.revs[doc.rev].data;
4211 if (!data._attachments) {
4212 return Promise.resolve(data);
4213 }
4214 const attachments = Object.keys(data._attachments).map(function (k) {
4215 data._attachments[k].name = k;
4216 return preProcessAttachment(data._attachments[k]);
4217 });
4218
4219 return Promise.all(attachments).then(function (newAttachments) {
4220 const processed = {};
4221 newAttachments.forEach(function (attachment) {
4222 processed[attachment.name] = attachment;
4223 delete attachment.name;
4224 });
4225 data._attachments = processed;
4226 return data;
4227 });
4228 });
4229 return Promise.all(promises);
4230 }
4231
4232 for (let i = 0, len = req.docs.length; i < len; i++) {
4233 let result;
4234 // TODO: We should get rid of throwing for invalid docs, also not sure
4235 // why this is needed in idb-next and not idb
4236 try {
4237 result = parseDoc(req.docs[i], opts.new_edits, dbOpts);
4238 } catch (err) {
4239 result = err;
4240 }
4241 if (result.error) {
4242 return callback(result);
4243 }
4244
4245 // Ideally parseDoc would return data in this format, but it is currently
4246 // shared so we need to convert
4247 docs.push(convertDocFormat(result));
4248 }
4249
4250 preProcessAttachments().then(function () {
4251 // We _could_ check doc ids here, and skip opening DOC_STORE if all docs are local.
4252 // This may marginally slow things down for local docs. It seems pragmatic to keep
4253 // the code simple and optimise for calls to bulkDocs() which include non-local docs.
4254 api._openTransactionSafely([DOC_STORE, META_LOCAL_STORE], 'readwrite', function (err, _txn) {
4255 if (err) {
4256 return callback(err);
4257 }
4258
4259 txn = _txn;
4260
4261 txn.onabort = function () {
4262 callback(error || createError(UNKNOWN_ERROR, 'transaction was aborted'));
4263 };
4264 txn.ontimeout = idbError(callback);
4265
4266 txn.oncomplete = function () {
4267 idbChanges.notify(dbOpts.name);
4268 callback(null, results);
4269 };
4270
4271 // We would like to use promises here, but idb sucks
4272 fetchExistingDocs(txn, docs);
4273 });
4274 }).catch(function (err) {
4275 callback(err);
4276 });
4277}
4278
4279function allDocsKeys(keys, docStore, allDocsInner) {
4280 // It's not guaranteed to be returned in right order
4281 const valuesBatch = new Array(keys.length);
4282 let count = 0;
4283 keys.forEach(function (key, index) {
4284 docStore.get(key).onsuccess = function (event) {
4285 if (event.target.result) {
4286 valuesBatch[index] = event.target.result;
4287 } else {
4288 valuesBatch[index] = {key, error: 'not_found'};
4289 }
4290 count++;
4291 if (count === keys.length) {
4292 valuesBatch.forEach(function (doc) {
4293 allDocsInner(doc);
4294 });
4295 }
4296 };
4297 });
4298}
4299
4300function createKeyRange(start, end, inclusiveStart, inclusiveEnd, key, descending) {
4301 try {
4302 if (key) {
4303 return IDBKeyRange.only([0, key]);
4304 } else if (descending) {
4305 return IDBKeyRange.bound(end, start, !inclusiveEnd, !inclusiveStart);
4306 } else {
4307 return IDBKeyRange.bound(start, end, !inclusiveStart, !inclusiveEnd);
4308 }
4309 } catch (e) {
4310 return {error: e};
4311 }
4312}
4313
4314function handleKeyRangeError(opts, metadata, err, callback) {
4315 if (err.name === "DataError" && err.code === 0) {
4316 // data error, start is less than end
4317 const returnVal = {
4318 total_rows: metadata.doc_count,
4319 offset: opts.skip,
4320 rows: []
4321 };
4322 /* istanbul ignore if */
4323 if (opts.update_seq) {
4324 returnVal.update_seq = metadata.seq;
4325 }
4326 return callback(null, returnVal);
4327 }
4328 callback(createError(IDB_ERROR, err.name, err.message));
4329}
4330
4331function allDocs (txn, metadata, opts, callback) {
4332 if (txn.error) {
4333 return callback(txn.error);
4334 }
4335
4336 // TODO: Weird hack, I don't like it
4337 if (opts.limit === 0) {
4338 const returnVal = {
4339 total_rows: metadata.doc_count,
4340 offset: opts.skip,
4341 rows: []
4342 };
4343
4344 /* istanbul ignore if */
4345 if (opts.update_seq) {
4346 returnVal.update_seq = metadata.seq;
4347 }
4348 return callback(null, returnVal);
4349 }
4350
4351 const results = [];
4352 const processing = [];
4353
4354 const key = 'key' in opts ? opts.key : false;
4355 const keys = 'keys' in opts ? opts.keys : false;
4356 let skip = opts.skip || 0;
4357 let limit = typeof opts.limit === 'number' ? opts.limit : undefined;
4358 const inclusiveEnd = opts.inclusive_end !== false;
4359 const descending = 'descending' in opts && opts.descending ? 'prev' : null;
4360 const start = 'startkey' in opts ? opts.startkey : (descending ? '\uffff' : '');
4361 const end = 'endkey' in opts ? opts.endkey : (descending ? '' : '\uffff');
4362
4363 const docStore = txn.txn.objectStore(DOC_STORE);
4364
4365 if (keys) {
4366 txn.txn.oncomplete = onTxnComplete;
4367 const allDocsInner = doc => {
4368 if (doc.error) {
4369 return results.push(doc);
4370 }
4371
4372 const row = { id:doc.id, key:doc.id, value:{ rev:doc.rev } };
4373
4374 if (doc.deleted) {
4375 row.value.deleted = true;
4376 row.doc = null;
4377 } else if (opts.include_docs) {
4378 include_doc(row, doc);
4379 }
4380
4381 results.push(row);
4382 };
4383 return allDocsKeys(keys, docStore, allDocsInner);
4384 }
4385
4386 let keyRange = createKeyRange([0, start], [0, end], true, inclusiveEnd, key, descending);
4387 if (keyRange.error) {
4388 return handleKeyRangeError(opts, metadata, keyRange.error, callback);
4389 }
4390
4391 // txn.oncomplete must be set AFTER key-range-error is generated
4392 txn.txn.oncomplete = onTxnComplete;
4393
4394 function include_doc(row, doc) {
4395 const docData = doc.revs[doc.rev].data;
4396
4397 row.doc = docData;
4398 row.doc._id = doc.id;
4399 row.doc._rev = doc.rev;
4400 if (opts.conflicts) {
4401 const conflicts = collectConflicts(doc);
4402 if (conflicts.length) {
4403 row.doc._conflicts = conflicts;
4404 }
4405 }
4406 if (opts.attachments && docData._attachments) {
4407 for (const name in docData._attachments) {
4408 processing.push(processAttachment(name, doc, row.doc, opts.binary,
4409 metadata.idb_attachment_format));
4410 }
4411 }
4412 }
4413
4414 function onTxnComplete() {
4415 const returnVal = {
4416 total_rows: metadata.doc_count,
4417 offset: 0,
4418 rows: results
4419 };
4420 /* istanbul ignore if */
4421 if (opts.update_seq) {
4422 returnVal.update_seq = metadata.seq;
4423 }
4424
4425 if (processing.length) {
4426 Promise.all(processing).then(function () {
4427 callback(null, returnVal);
4428 });
4429 } else {
4430 callback(null, returnVal);
4431 }
4432 }
4433
4434 const dbIndex = docStore.index('deleted,id');
4435
4436 if (!skip && !limit) {
4437 fetchResults();
4438 } else {
4439 let firstKey;
4440 let limitKey = limit > 0;
4441
4442 dbIndex.openKeyCursor(keyRange, descending || 'next').onsuccess = (e) => {
4443 const cursor = e.target.result;
4444
4445 if (skip) {
4446 if (!cursor) { return txn.txn.commit(); }
4447 cursor.advance(skip);
4448 skip = 0;
4449 return;
4450 }
4451
4452 if (firstKey === undefined) {
4453 firstKey = cursor && cursor.key;
4454 if (!firstKey) { return txn.txn.commit(); }
4455 }
4456
4457 if (limit) {
4458 if (limit > 1 && cursor) {
4459 cursor.advance(limit - 1);
4460 limit = undefined;
4461 return;
4462 }
4463 limit = undefined;
4464 }
4465
4466
4467 if (limitKey) {
4468 limitKey = cursor && cursor.key;
4469 }
4470 if (!limitKey) {
4471 limitKey = descending ? keyRange.lower : keyRange.upper;
4472 }
4473
4474 keyRange = createKeyRange(firstKey, limitKey, true, inclusiveEnd, key, descending);
4475 if (keyRange.error) {
4476 txn.txn.abort();
4477 return handleKeyRangeError(opts, metadata, keyRange.error, callback);
4478 }
4479
4480 fetchResults();
4481 };
4482 }
4483
4484 async function fetchResults() {
4485 // There is a risk here with getting all results into memory - if they have multiple
4486 // revs, then we risk loading loads of extra data which is then discarded. This is
4487 // reduced by batching. This also loads unused data when include_docs is false.
4488 //
4489 // Current batch size is quite arbitrary, but seems like (1) more than a typical
4490 // result size, and (2) not so big it's likely to cause issues.
4491 const batchSize = 100;
4492
4493 let kr = keyRange;
4494 do {
4495 kr = await fetchNextBatch(kr);
4496 } while (kr);
4497 if (descending) {
4498 results.reverse();
4499 }
4500 return txn.txn.commit();
4501
4502 function fetchNextBatch(kr) {
4503 return new Promise((resolve) => {
4504 dbIndex.getAll(kr, batchSize).onsuccess = (e) => {
4505 const batch = e.target.result;
4506 for (let i=0; i<batch.length; ++i) {
4507 const doc = batch[i];
4508 const row = { id:doc.id, key:doc.id, value:{ rev:doc.rev } };
4509 if (opts.include_docs) {
4510 include_doc(row, doc);
4511 }
4512 results.push(row);
4513 }
4514
4515 if (batch.length >= batchSize) {
4516 const lastSeenKey = [ 0, batch[batch.length-1].id ];
4517 const startKey = descending ? kr.upper : lastSeenKey;
4518 const endKey = descending ? lastSeenKey : kr.upper;
4519 if (startKey[1] !== endKey[1]) {
4520 const incEnd = descending ? false : inclusiveEnd;
4521 const incStart = descending ? true : false;
4522 return resolve(createKeyRange(startKey, endKey, incStart, incEnd, key, descending));
4523 }
4524 }
4525 return resolve();
4526 };
4527 });
4528 }
4529 }
4530}
4531
4532function changes (txn, idbChanges, api, dbOpts, opts) {
4533 if (txn.error) {
4534 return opts.complete(txn.error);
4535 }
4536
4537 if (opts.continuous) {
4538 const id = dbOpts.name + ':' + uuid$1();
4539 idbChanges.addListener(dbOpts.name, id, api, opts);
4540 idbChanges.notify(dbOpts.name);
4541 return {
4542 cancel: function () {
4543 idbChanges.removeListener(dbOpts.name, id);
4544 }
4545 };
4546 }
4547
4548 let limit = 'limit' in opts ? opts.limit : -1;
4549 if (limit === 0) {
4550 limit = 1;
4551 }
4552
4553 const store = txn.txn.objectStore(DOC_STORE).index('seq');
4554
4555 const filter = filterChange(opts);
4556 let received = 0;
4557
4558 let lastSeq = opts.since || 0;
4559 const results = [];
4560
4561 const processing = [];
4562
4563 function onReqSuccess(e) {
4564 if (!e.target.result) { return; }
4565 const cursor = e.target.result;
4566 const doc = cursor.value;
4567 // Overwrite doc.data, which may have been rewritten (see rewrite.js) with
4568 // the clean version for that rev
4569 doc.data = doc.revs[doc.rev].data;
4570 doc.data._id = doc.id;
4571 doc.data._rev = doc.rev;
4572 if (doc.deleted) {
4573 doc.data._deleted = true;
4574 }
4575
4576 if (opts.doc_ids && opts.doc_ids.indexOf(doc.id) === -1) {
4577 return cursor.continue();
4578 }
4579
4580 // WARNING: expecting possible old format
4581 const change = opts.processChange(doc.data, doc, opts);
4582 change.seq = doc.seq;
4583 lastSeq = doc.seq;
4584 const filtered = filter(change);
4585
4586 // If its an error
4587 if (typeof filtered === 'object') {
4588 return opts.complete(filtered);
4589 }
4590
4591 if (filtered) {
4592 received++;
4593 if (opts.return_docs) {
4594 results.push(change);
4595 }
4596
4597 if (opts.include_docs && opts.attachments && doc.data._attachments) {
4598 const promises = [];
4599 for (const name in doc.data._attachments) {
4600 const p = processAttachment(name, doc, change.doc, opts.binary, api.blobSupport);
4601 // We add the processing promise to 2 arrays, one tracks all
4602 // the promises needed before we fire onChange, the other
4603 // ensure we process all attachments before onComplete
4604 promises.push(p);
4605 processing.push(p);
4606 }
4607
4608 Promise.all(promises).then(function () {
4609 opts.onChange(change);
4610 });
4611 } else {
4612 opts.onChange(change);
4613 }
4614 }
4615 if (received !== limit) {
4616 cursor.continue();
4617 }
4618 }
4619
4620 function onTxnComplete() {
4621 Promise.all(processing).then(function () {
4622 opts.complete(null, {
4623 results,
4624 last_seq: lastSeq
4625 });
4626 });
4627 }
4628
4629 let req;
4630 if (opts.descending) {
4631 req = store.openCursor(null, 'prev');
4632 } else {
4633 req = store.openCursor(IDBKeyRange.lowerBound(opts.since, true));
4634 }
4635
4636 txn.txn.oncomplete = onTxnComplete;
4637 req.onsuccess = onReqSuccess;
4638}
4639
4640function getRevisionTree (txn, id, callback) {
4641 if (txn.error) {
4642 return callback(txn.error);
4643 }
4644
4645 const req = txn.txn.objectStore(DOC_STORE).get(id);
4646 req.onsuccess = function (e) {
4647 if (!e.target.result) {
4648 callback(createError(MISSING_DOC));
4649 } else {
4650 callback(null, e.target.result.rev_tree);
4651 }
4652 };
4653}
4654
4655function doCompaction (txn, id, revs, callback) {
4656 if (txn.error) {
4657 return callback(txn.error);
4658 }
4659
4660 const docStore = txn.txn.objectStore(DOC_STORE);
4661
4662 docStore.get(id).onsuccess = function (e) {
4663 const doc = e.target.result;
4664
4665 traverseRevTree(doc.rev_tree, function (isLeaf, pos, revHash, ctx, opts) {
4666 const rev = pos + '-' + revHash;
4667 if (revs.indexOf(rev) !== -1) {
4668 opts.status = 'missing';
4669 }
4670 });
4671
4672 const attachments = [];
4673
4674 revs.forEach(function (rev) {
4675 if (rev in doc.revs) {
4676 // Make a list of attachments that are used by the revisions being
4677 // deleted
4678 if (doc.revs[rev].data._attachments) {
4679 for (const k in doc.revs[rev].data._attachments) {
4680 attachments.push(doc.revs[rev].data._attachments[k].digest);
4681 }
4682 }
4683 delete doc.revs[rev];
4684 }
4685 });
4686
4687 // Attachments have a list of revisions that are using them, when
4688 // that list becomes empty we can delete the attachment.
4689 attachments.forEach(function (digest) {
4690 revs.forEach(function (rev) {
4691 delete doc.attachments[digest].revs[rev];
4692 });
4693 if (!Object.keys(doc.attachments[digest].revs).length) {
4694 delete doc.attachments[digest];
4695 }
4696 });
4697
4698 docStore.put(doc);
4699 };
4700
4701 txn.txn.oncomplete = function () {
4702 callback();
4703 };
4704}
4705
4706function destroy (dbOpts, openDatabases, idbChanges, callback) {
4707
4708 idbChanges.removeAllListeners(dbOpts.name);
4709
4710 function doDestroy() {
4711 const req = indexedDB.deleteDatabase(dbOpts.name);
4712 req.onsuccess = function () {
4713 delete openDatabases[dbOpts.name];
4714 callback(null, {ok: true});
4715 };
4716 }
4717
4718 // If the database is open we need to close it
4719 if (dbOpts.name in openDatabases) {
4720 openDatabases[dbOpts.name].then(function (res) {
4721 res.idb.close();
4722 doDestroy();
4723 });
4724 } else {
4725 doDestroy();
4726 }
4727
4728}
4729
4730// Adapted from
4731// https://github.com/pouchdb/pouchdb/blob/master/packages/node_modules/pouchdb-find/src/adapters/local/find/query-planner.js#L20-L24
4732// This could change / improve in the future?
4733const COUCH_COLLATE_LO = null;
4734const COUCH_COLLATE_HI = '\uffff'; // actually used as {"\uffff": {}}
4735
4736// Adapted from: https://www.w3.org/TR/IndexedDB/#compare-two-keys
4737// Importantly, *there is no upper bound possible* in idb. The ideal data
4738// structure an infinitely deep array:
4739// const IDB_COLLATE_HI = []; IDB_COLLATE_HI.push(IDB_COLLATE_HI)
4740// But IDBKeyRange is not a fan of shenanigans, so I've just gone with 12 layers
4741// because it looks nice and surely that's enough!
4742const IDB_COLLATE_LO = Number.NEGATIVE_INFINITY;
4743const IDB_COLLATE_HI = [[[[[[[[[[[[]]]]]]]]]]]];
4744
4745//
4746// TODO: this should be made offical somewhere and used by AllDocs / get /
4747// changes etc as well.
4748//
4749function externaliseRecord(idbDoc) {
4750 const doc = idbDoc.revs[idbDoc.rev].data;
4751 doc._id = idbDoc.id;
4752 doc._rev = idbDoc.rev;
4753 if (idbDoc.deleted) {
4754 doc._deleted = true;
4755 }
4756
4757 return doc;
4758}
4759
4760/**
4761 * Generates a keyrange based on the opts passed to query
4762 *
4763 * The first key is always 0, as that's how we're filtering out deleted entries.
4764 */
4765function generateKeyRange(opts) {
4766 function defined(obj, k) {
4767 return obj[k] !== void 0;
4768 }
4769
4770 // Converts a valid CouchDB key into a valid IndexedDB one
4771 function convert(key, exact) {
4772 // The first item in every native index is doc.deleted, and we always want
4773 // to only search documents that are not deleted.
4774 // "foo" -> [0, "foo"]
4775 const filterDeleted = [0].concat(key);
4776
4777 return filterDeleted.map(function (k) {
4778 // null, true and false are not indexable by indexeddb. When we write
4779 // these values we convert them to these constants, and so when we
4780 // query for them we need to convert the query also.
4781 if (k === null && exact) {
4782 // for non-exact queries we treat null as a collate property
4783 // see `if (!exact)` block below
4784 return IDB_NULL;
4785 } else if (k === true) {
4786 return IDB_TRUE;
4787 } else if (k === false) {
4788 return IDB_FALSE;
4789 }
4790
4791 if (!exact) {
4792 // We get passed CouchDB's collate low and high values, so for non-exact
4793 // ranged queries we're going to convert them to our IDB equivalents
4794 if (k === COUCH_COLLATE_LO) {
4795 return IDB_COLLATE_LO;
4796 } else if (Object.prototype.hasOwnProperty.call(k, COUCH_COLLATE_HI)) {
4797 return IDB_COLLATE_HI;
4798 }
4799 }
4800
4801 return k;
4802 });
4803 }
4804
4805 // CouchDB and so PouchdB defaults to true. We need to make this explicit as
4806 // we invert these later for IndexedDB.
4807 if (!defined(opts, 'inclusive_end')) {
4808 opts.inclusive_end = true;
4809 }
4810 if (!defined(opts, 'inclusive_start')) {
4811 opts.inclusive_start = true;
4812 }
4813
4814 if (opts.descending) {
4815 // Flip before generating. We'll check descending again later when performing
4816 // an index request
4817 const realEndkey = opts.startkey,
4818 realInclusiveEnd = opts.inclusive_start;
4819
4820 opts.startkey = opts.endkey;
4821 opts.endkey = realEndkey;
4822 opts.inclusive_start = opts.inclusive_end;
4823 opts.inclusive_end = realInclusiveEnd;
4824 }
4825
4826 try {
4827 if (defined(opts, 'key')) {
4828 return IDBKeyRange.only(convert(opts.key, true));
4829 }
4830
4831 if (defined(opts, 'startkey') && !defined(opts, 'endkey')) {
4832 // lowerBound, but without the deleted docs.
4833 // [1] is the start of the deleted doc range, and we don't want to include then.
4834 return IDBKeyRange.bound(
4835 convert(opts.startkey), [1],
4836 !opts.inclusive_start, true
4837 );
4838 }
4839
4840 if (!defined(opts, 'startkey') && defined(opts, 'endkey')) {
4841 return IDBKeyRange.upperBound(convert(opts.endkey), !opts.inclusive_end);
4842 }
4843
4844 if (defined(opts, 'startkey') && defined(opts, 'endkey')) {
4845 return IDBKeyRange.bound(
4846 convert(opts.startkey), convert(opts.endkey),
4847 !opts.inclusive_start, !opts.inclusive_end
4848 );
4849 }
4850
4851 return IDBKeyRange.only([0]);
4852 } catch (err) {
4853 console.error('Could not generate keyRange', err, opts);
4854 throw Error('Could not generate key range with ' + JSON.stringify(opts));
4855 }
4856}
4857
4858function getIndexHandle(pdb, fields, reject) {
4859 const indexName = naturalIndexName(fields);
4860
4861 return new Promise(function (resolve) {
4862 pdb._openTransactionSafely([DOC_STORE], 'readonly', function (err, txn) {
4863 if (err) {
4864 return idbError(reject)(err);
4865 }
4866
4867 txn.onabort = idbError(reject);
4868 txn.ontimeout = idbError(reject);
4869
4870 const existingIndexNames = Array.from(txn.objectStore(DOC_STORE).indexNames);
4871
4872 if (existingIndexNames.indexOf(indexName) === -1) {
4873 // The index is missing, force a db restart and try again
4874 pdb._freshen()
4875 .then(function () { return getIndexHandle(pdb, fields, reject); })
4876 .then(resolve);
4877 } else {
4878 resolve(txn.objectStore(DOC_STORE).index(indexName));
4879 }
4880 });
4881 });
4882}
4883
4884// In theory we should return something like the doc example below, but find
4885// only needs rows: [{doc: {...}}], so I think we can just not bother for now
4886// {
4887// "offset" : 0,
4888// "rows": [{
4889// "id": "doc3",
4890// "key": "Lisa Says",
4891// "value": null,
4892// "doc": {
4893// "_id": "doc3",
4894// "_rev": "1-z",
4895// "title": "Lisa Says"
4896// }
4897// }],
4898// "total_rows" : 4
4899// }
4900function query(idb, signature, opts, fallback) {
4901 // At this stage, in the current implementation, find has already gone through
4902 // and determined if the index already exists from PouchDB's perspective (eg
4903 // there is a design doc for it).
4904 //
4905 // If we find that the index doesn't exist this means we have to close and
4906 // re-open the DB to correct indexes before proceeding, at which point the
4907 // index should exist.
4908
4909 const pdb = this;
4910
4911 // Assumption, there will be only one /, between the design document name
4912 // and the view name.
4913 const parts = signature.split('/');
4914
4915 return new Promise(function (resolve, reject) {
4916 pdb.get('_design/' + parts[0]).then(function (ddoc) {
4917 if (isPartialFilterView(ddoc, parts[1])) {
4918 // Fix for #8522
4919 // An IndexedDB index is always over all entries. And there is no way to filter them.
4920 // Therefore the normal findAbstractMapper will be used
4921 // for indexes with partial_filter_selector.
4922 return fallback(signature, opts).then(resolve, reject);
4923 }
4924
4925 const fields = rawIndexFields(ddoc, parts[1]);
4926 if (!fields) {
4927 throw new Error('ddoc ' + ddoc._id +' with view ' + parts[1] +
4928 ' does not have map.options.def.fields defined.');
4929 }
4930
4931 let skip = opts.skip;
4932 let limit = Number.isInteger(opts.limit) && opts.limit;
4933
4934 return getIndexHandle(pdb, fields, reject)
4935 .then(function (indexHandle) {
4936 const keyRange = generateKeyRange(opts);
4937 const req = indexHandle.openCursor(keyRange, opts.descending ? 'prev' : 'next');
4938
4939 const rows = [];
4940 req.onerror = idbError(reject);
4941 req.onsuccess = function (e) {
4942 const cursor = e.target.result;
4943
4944 if (!cursor || limit === 0) {
4945 return resolve({
4946 rows
4947 });
4948 }
4949
4950 if (skip) {
4951 cursor.advance(skip);
4952 skip = false;
4953 return;
4954 }
4955
4956 if (limit) {
4957 limit = limit - 1;
4958 }
4959
4960 rows.push({doc: externaliseRecord(cursor.value)});
4961 cursor.continue();
4962 };
4963 });
4964 })
4965 .catch(reject);
4966 });
4967
4968}
4969
4970function viewCleanup(idb, fallback) {
4971 // I'm not sure we have to do anything here.
4972 //
4973 // One option is to just close and re-open the DB, which performs the same
4974 // action. The only reason you'd want to call this is if you deleted a bunch
4975 // of indexes and wanted the space back immediately.
4976 //
4977 // Otherwise index cleanup happens when:
4978 // - A DB is opened
4979 // - A find query is performed against an index that doesn't exist but should
4980
4981 // Fix for #8522
4982 // On views with partial_filter_selector the standard find-abstract-mapper is used.
4983 // Its indexes must be cleaned up.
4984 // Fallback is the standard viewCleanup.
4985 return fallback();
4986}
4987
4988function purgeAttachments(doc, revs) {
4989 if (!doc.attachments) {
4990 // If there are no attachments, doc.attachments is an empty object
4991 return {};
4992 }
4993
4994 // Iterate over all attachments and remove the respective revs
4995 for (let key in doc.attachments) {
4996 const attachment = doc.attachments[key];
4997
4998 for (let rev of revs) {
4999 if (attachment.revs[rev]) {
5000 delete attachment.revs[rev];
5001 }
5002 }
5003
5004 if (Object.keys(attachment.revs).length === 0) {
5005 delete doc.attachments[key];
5006 }
5007 }
5008
5009 return doc.attachments;
5010}
5011
5012// `purge()` expects a path of revisions in its revs argument that:
5013// - starts with a leaf rev
5014// - continues sequentially with the remaining revs of that leaf’s branch
5015//
5016// eg. for this rev tree:
5017// 1-9692 ▶ 2-37aa ▶ 3-df22 ▶ 4-6e94 ▶ 5-df4a ▶ 6-6a3a ▶ 7-57e5
5018// ┃ ┗━━━━━━▶ 5-8d8c ▶ 6-65e0
5019// ┗━━━━━━▶ 3-43f6 ▶ 4-a3b4
5020//
5021// …if you wanted to purge '7-57e5', you would provide ['7-57e5', '6-6a3a', '5-df4a']
5022//
5023// The purge adapter implementation in `pouchdb-core` uses the helper function `findPathToLeaf`
5024// from `pouchdb-merge` to construct this array correctly. Since this purge implementation is
5025// only ever called from there, we do no additional checks here as to whether `revs` actually
5026// fulfills the criteria above, since `findPathToLeaf` already does these.
5027function purge(txn, docId, revs, callback) {
5028 if (txn.error) {
5029 return callback(txn.error);
5030 }
5031
5032 const docStore = txn.txn.objectStore(DOC_STORE);
5033 const deletedRevs = [];
5034 let documentWasRemovedCompletely = false;
5035 docStore.get(docId).onsuccess = (e) => {
5036 const doc = e.target.result;
5037
5038 // we could do a dry run here to check if revs is a proper path towards a leaf in the rev tree
5039
5040 for (const rev of revs) {
5041 // purge rev from tree
5042 doc.rev_tree = removeLeafFromRevTree(doc.rev_tree, rev);
5043
5044 // assign new revs
5045 delete doc.revs[rev];
5046 deletedRevs.push(rev);
5047 }
5048
5049 if (doc.rev_tree.length === 0) {
5050 // if the rev tree is empty, we can delete the entire document
5051 docStore.delete(doc.id);
5052 documentWasRemovedCompletely = true;
5053 return;
5054 }
5055
5056 // find new winning rev
5057 doc.rev = winningRev(doc);
5058 doc.data = doc.revs[doc.rev].data;
5059 doc.attachments = purgeAttachments(doc, revs);
5060
5061 // finally, write the purged doc
5062 docStore.put(doc);
5063 };
5064
5065 txn.txn.oncomplete = function () {
5066 callback(null, {
5067 ok: true,
5068 deletedRevs,
5069 documentWasRemovedCompletely
5070 });
5071 };
5072}
5073
5074const ADAPTER_NAME = 'indexeddb';
5075
5076// TODO: Constructor should be capitalised
5077const idbChanges = new Changes();
5078
5079// A shared list of database handles
5080const openDatabases = {};
5081
5082function IndexeddbPouch(dbOpts, callback) {
5083
5084 if (dbOpts.view_adapter) {
5085 console.log('Please note that the indexeddb adapter manages _find indexes itself, therefore it is not using your specified view_adapter');
5086 }
5087
5088 const api = this;
5089 let metadata = {};
5090
5091 // Wrapper that gives you an active DB handle. You probably want $t.
5092 const $ = function (fun) {
5093 return function () {
5094 const args = Array.prototype.slice.call(arguments);
5095 setup(openDatabases, api, dbOpts).then(function (res) {
5096 metadata = res.metadata;
5097 args.unshift(res.idb);
5098 fun.apply(api, args);
5099 }).catch(function (err) {
5100 const last = args.pop();
5101 if (typeof last === 'function') {
5102 last(err);
5103 } else {
5104 console.error(err);
5105 }
5106 });
5107 };
5108 };
5109 // the promise version of $
5110 const $p = function (fun) {
5111 return function () {
5112 const args = Array.prototype.slice.call(arguments);
5113
5114 return setup(openDatabases, api, dbOpts).then(function (res) {
5115 metadata = res.metadata;
5116 args.unshift(res.idb);
5117
5118 return fun.apply(api, args);
5119 });
5120 };
5121 };
5122 // Wrapper that gives you a safe transaction handle. It's important to use
5123 // this instead of opening your own transaction from a db handle got from $,
5124 // because in the time between getting the db handle and opening the
5125 // transaction it may have been invalidated by index changes.
5126 const $t = function (fun, stores, mode) {
5127 mode = mode || 'readonly';
5128
5129 return function () {
5130 const args = Array.prototype.slice.call(arguments);
5131 const txn = {};
5132 setup(openDatabases, api, dbOpts).then(function (res) {
5133 metadata = res.metadata;
5134 txn.txn = res.idb.transaction(stores, mode);
5135 }).catch(function (err) {
5136 console.error('Failed to establish transaction safely');
5137 console.error(err);
5138 txn.error = err;
5139 }).then(function () {
5140 args.unshift(txn);
5141 fun.apply(api, args);
5142 });
5143 };
5144 };
5145
5146 api._openTransactionSafely = function (stores, mode, callback) {
5147 $t(function (txn, callback) {
5148 callback(txn.error, txn.txn);
5149 }, stores, mode)(callback);
5150 };
5151
5152 api._remote = false;
5153 api.type = function () { return ADAPTER_NAME; };
5154
5155 api._id = $(function (_, cb) {
5156 cb(null, metadata.db_uuid);
5157 });
5158
5159 api._info = $(function (_, cb) {
5160 return info(metadata, cb);
5161 });
5162
5163 api._get = $t(get, [DOC_STORE]);
5164 api._getLocal = $t(function (txn, id, callback) {
5165 return getLocal(txn, id, api, callback);
5166 }, [META_LOCAL_STORE]);
5167
5168 api._bulkDocs = $(function (_, req, opts, callback) {
5169 bulkDocs(api, req, opts, metadata, dbOpts, idbChanges, callback);
5170 });
5171
5172 api._allDocs = $t(function (txn, opts, cb) {
5173 allDocs(txn, metadata, opts, cb);
5174 }, [DOC_STORE]);
5175
5176 api._getAttachment = getAttachment;
5177
5178 api._changes = $t(function (txn, opts) {
5179 changes(txn, idbChanges, api, dbOpts, opts);
5180 }, [DOC_STORE]);
5181
5182 api._getRevisionTree = $t(getRevisionTree, [DOC_STORE]);
5183 api._doCompaction = $t(doCompaction, [DOC_STORE], 'readwrite');
5184
5185 api._customFindAbstractMapper = {
5186 query: $p(query),
5187 viewCleanup: $p(viewCleanup)
5188 };
5189
5190 api._destroy = function (opts, callback) {
5191 return destroy(dbOpts, openDatabases, idbChanges, callback);
5192 };
5193
5194 api._close = $(function (db, cb) {
5195 delete openDatabases[dbOpts.name];
5196 db.close();
5197 cb();
5198 });
5199
5200 // Closing and re-opening the DB re-generates native indexes
5201 api._freshen = function () {
5202 return new Promise(function (resolve) {
5203 api._close(function () {
5204 $(resolve)();
5205 });
5206 });
5207 };
5208
5209 api._purge = $t(purge, [DOC_STORE], 'readwrite');
5210
5211 // TODO: this setTimeout seems nasty, if its needed lets
5212 // figure out / explain why
5213 setTimeout(function () {
5214 callback(null, api);
5215 });
5216}
5217
5218// TODO: this isnt really valid permanently, just being lazy to start
5219IndexeddbPouch.valid = function () {
5220 return true;
5221};
5222
5223function IndexeddbPouchPlugin (PouchDB) {
5224 PouchDB.adapter(ADAPTER_NAME, IndexeddbPouch, true);
5225}
5226
5227// this code only runs in the browser, as its own dist/ script
5228
5229if (typeof PouchDB === 'undefined') {
5230 guardedConsole('error', 'indexeddb adapter plugin error: ' +
5231 'Cannot find global "PouchDB" object! ' +
5232 'Did you remember to include pouchdb.js?');
5233} else {
5234 PouchDB.plugin(IndexeddbPouchPlugin);
5235}
5236
5237},{"events":1,"spark-md5":2,"uuid":3}]},{},[18]);