UNPKG

154 kBJavaScriptView Raw
1// PouchDB indexeddb plugin 8.0.1
2(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(_dereq_,module,exports){
3
4},{}],2:[function(_dereq_,module,exports){
5// Copyright Joyent, Inc. and other Node contributors.
6//
7// Permission is hereby granted, free of charge, to any person obtaining a
8// copy of this software and associated documentation files (the
9// "Software"), to deal in the Software without restriction, including
10// without limitation the rights to use, copy, modify, merge, publish,
11// distribute, sublicense, and/or sell copies of the Software, and to permit
12// persons to whom the Software is furnished to do so, subject to the
13// following conditions:
14//
15// The above copyright notice and this permission notice shall be included
16// in all copies or substantial portions of the Software.
17//
18// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
21// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
22// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
23// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
24// USE OR OTHER DEALINGS IN THE SOFTWARE.
25
26var objectCreate = Object.create || objectCreatePolyfill
27var objectKeys = Object.keys || objectKeysPolyfill
28var bind = Function.prototype.bind || functionBindPolyfill
29
30function EventEmitter() {
31 if (!this._events || !Object.prototype.hasOwnProperty.call(this, '_events')) {
32 this._events = objectCreate(null);
33 this._eventsCount = 0;
34 }
35
36 this._maxListeners = this._maxListeners || undefined;
37}
38module.exports = EventEmitter;
39
40// Backwards-compat with node 0.10.x
41EventEmitter.EventEmitter = EventEmitter;
42
43EventEmitter.prototype._events = undefined;
44EventEmitter.prototype._maxListeners = undefined;
45
46// By default EventEmitters will print a warning if more than 10 listeners are
47// added to it. This is a useful default which helps finding memory leaks.
48var defaultMaxListeners = 10;
49
50var hasDefineProperty;
51try {
52 var o = {};
53 if (Object.defineProperty) Object.defineProperty(o, 'x', { value: 0 });
54 hasDefineProperty = o.x === 0;
55} catch (err) { hasDefineProperty = false }
56if (hasDefineProperty) {
57 Object.defineProperty(EventEmitter, 'defaultMaxListeners', {
58 enumerable: true,
59 get: function() {
60 return defaultMaxListeners;
61 },
62 set: function(arg) {
63 // check whether the input is a positive number (whose value is zero or
64 // greater and not a NaN).
65 if (typeof arg !== 'number' || arg < 0 || arg !== arg)
66 throw new TypeError('"defaultMaxListeners" must be a positive number');
67 defaultMaxListeners = arg;
68 }
69 });
70} else {
71 EventEmitter.defaultMaxListeners = defaultMaxListeners;
72}
73
74// Obviously not all Emitters should be limited to 10. This function allows
75// that to be increased. Set to zero for unlimited.
76EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) {
77 if (typeof n !== 'number' || n < 0 || isNaN(n))
78 throw new TypeError('"n" argument must be a positive number');
79 this._maxListeners = n;
80 return this;
81};
82
83function $getMaxListeners(that) {
84 if (that._maxListeners === undefined)
85 return EventEmitter.defaultMaxListeners;
86 return that._maxListeners;
87}
88
89EventEmitter.prototype.getMaxListeners = function getMaxListeners() {
90 return $getMaxListeners(this);
91};
92
93// These standalone emit* functions are used to optimize calling of event
94// handlers for fast cases because emit() itself often has a variable number of
95// arguments and can be deoptimized because of that. These functions always have
96// the same number of arguments and thus do not get deoptimized, so the code
97// inside them can execute faster.
98function emitNone(handler, isFn, self) {
99 if (isFn)
100 handler.call(self);
101 else {
102 var len = handler.length;
103 var listeners = arrayClone(handler, len);
104 for (var i = 0; i < len; ++i)
105 listeners[i].call(self);
106 }
107}
108function emitOne(handler, isFn, self, arg1) {
109 if (isFn)
110 handler.call(self, arg1);
111 else {
112 var len = handler.length;
113 var listeners = arrayClone(handler, len);
114 for (var i = 0; i < len; ++i)
115 listeners[i].call(self, arg1);
116 }
117}
118function emitTwo(handler, isFn, self, arg1, arg2) {
119 if (isFn)
120 handler.call(self, arg1, arg2);
121 else {
122 var len = handler.length;
123 var listeners = arrayClone(handler, len);
124 for (var i = 0; i < len; ++i)
125 listeners[i].call(self, arg1, arg2);
126 }
127}
128function emitThree(handler, isFn, self, arg1, arg2, arg3) {
129 if (isFn)
130 handler.call(self, arg1, arg2, arg3);
131 else {
132 var len = handler.length;
133 var listeners = arrayClone(handler, len);
134 for (var i = 0; i < len; ++i)
135 listeners[i].call(self, arg1, arg2, arg3);
136 }
137}
138
139function emitMany(handler, isFn, self, args) {
140 if (isFn)
141 handler.apply(self, args);
142 else {
143 var len = handler.length;
144 var listeners = arrayClone(handler, len);
145 for (var i = 0; i < len; ++i)
146 listeners[i].apply(self, args);
147 }
148}
149
150EventEmitter.prototype.emit = function emit(type) {
151 var er, handler, len, args, i, events;
152 var doError = (type === 'error');
153
154 events = this._events;
155 if (events)
156 doError = (doError && events.error == null);
157 else if (!doError)
158 return false;
159
160 // If there is no 'error' event listener then throw.
161 if (doError) {
162 if (arguments.length > 1)
163 er = arguments[1];
164 if (er instanceof Error) {
165 throw er; // Unhandled 'error' event
166 } else {
167 // At least give some kind of context to the user
168 var err = new Error('Unhandled "error" event. (' + er + ')');
169 err.context = er;
170 throw err;
171 }
172 return false;
173 }
174
175 handler = events[type];
176
177 if (!handler)
178 return false;
179
180 var isFn = typeof handler === 'function';
181 len = arguments.length;
182 switch (len) {
183 // fast cases
184 case 1:
185 emitNone(handler, isFn, this);
186 break;
187 case 2:
188 emitOne(handler, isFn, this, arguments[1]);
189 break;
190 case 3:
191 emitTwo(handler, isFn, this, arguments[1], arguments[2]);
192 break;
193 case 4:
194 emitThree(handler, isFn, this, arguments[1], arguments[2], arguments[3]);
195 break;
196 // slower
197 default:
198 args = new Array(len - 1);
199 for (i = 1; i < len; i++)
200 args[i - 1] = arguments[i];
201 emitMany(handler, isFn, this, args);
202 }
203
204 return true;
205};
206
207function _addListener(target, type, listener, prepend) {
208 var m;
209 var events;
210 var existing;
211
212 if (typeof listener !== 'function')
213 throw new TypeError('"listener" argument must be a function');
214
215 events = target._events;
216 if (!events) {
217 events = target._events = objectCreate(null);
218 target._eventsCount = 0;
219 } else {
220 // To avoid recursion in the case that type === "newListener"! Before
221 // adding it to the listeners, first emit "newListener".
222 if (events.newListener) {
223 target.emit('newListener', type,
224 listener.listener ? listener.listener : listener);
225
226 // Re-assign `events` because a newListener handler could have caused the
227 // this._events to be assigned to a new object
228 events = target._events;
229 }
230 existing = events[type];
231 }
232
233 if (!existing) {
234 // Optimize the case of one listener. Don't need the extra array object.
235 existing = events[type] = listener;
236 ++target._eventsCount;
237 } else {
238 if (typeof existing === 'function') {
239 // Adding the second element, need to change to array.
240 existing = events[type] =
241 prepend ? [listener, existing] : [existing, listener];
242 } else {
243 // If we've already got an array, just append.
244 if (prepend) {
245 existing.unshift(listener);
246 } else {
247 existing.push(listener);
248 }
249 }
250
251 // Check for listener leak
252 if (!existing.warned) {
253 m = $getMaxListeners(target);
254 if (m && m > 0 && existing.length > m) {
255 existing.warned = true;
256 var w = new Error('Possible EventEmitter memory leak detected. ' +
257 existing.length + ' "' + String(type) + '" listeners ' +
258 'added. Use emitter.setMaxListeners() to ' +
259 'increase limit.');
260 w.name = 'MaxListenersExceededWarning';
261 w.emitter = target;
262 w.type = type;
263 w.count = existing.length;
264 if (typeof console === 'object' && console.warn) {
265 console.warn('%s: %s', w.name, w.message);
266 }
267 }
268 }
269 }
270
271 return target;
272}
273
274EventEmitter.prototype.addListener = function addListener(type, listener) {
275 return _addListener(this, type, listener, false);
276};
277
278EventEmitter.prototype.on = EventEmitter.prototype.addListener;
279
280EventEmitter.prototype.prependListener =
281 function prependListener(type, listener) {
282 return _addListener(this, type, listener, true);
283 };
284
285function onceWrapper() {
286 if (!this.fired) {
287 this.target.removeListener(this.type, this.wrapFn);
288 this.fired = true;
289 switch (arguments.length) {
290 case 0:
291 return this.listener.call(this.target);
292 case 1:
293 return this.listener.call(this.target, arguments[0]);
294 case 2:
295 return this.listener.call(this.target, arguments[0], arguments[1]);
296 case 3:
297 return this.listener.call(this.target, arguments[0], arguments[1],
298 arguments[2]);
299 default:
300 var args = new Array(arguments.length);
301 for (var i = 0; i < args.length; ++i)
302 args[i] = arguments[i];
303 this.listener.apply(this.target, args);
304 }
305 }
306}
307
308function _onceWrap(target, type, listener) {
309 var state = { fired: false, wrapFn: undefined, target: target, type: type, listener: listener };
310 var wrapped = bind.call(onceWrapper, state);
311 wrapped.listener = listener;
312 state.wrapFn = wrapped;
313 return wrapped;
314}
315
316EventEmitter.prototype.once = function once(type, listener) {
317 if (typeof listener !== 'function')
318 throw new TypeError('"listener" argument must be a function');
319 this.on(type, _onceWrap(this, type, listener));
320 return this;
321};
322
323EventEmitter.prototype.prependOnceListener =
324 function prependOnceListener(type, listener) {
325 if (typeof listener !== 'function')
326 throw new TypeError('"listener" argument must be a function');
327 this.prependListener(type, _onceWrap(this, type, listener));
328 return this;
329 };
330
331// Emits a 'removeListener' event if and only if the listener was removed.
332EventEmitter.prototype.removeListener =
333 function removeListener(type, listener) {
334 var list, events, position, i, originalListener;
335
336 if (typeof listener !== 'function')
337 throw new TypeError('"listener" argument must be a function');
338
339 events = this._events;
340 if (!events)
341 return this;
342
343 list = events[type];
344 if (!list)
345 return this;
346
347 if (list === listener || list.listener === listener) {
348 if (--this._eventsCount === 0)
349 this._events = objectCreate(null);
350 else {
351 delete events[type];
352 if (events.removeListener)
353 this.emit('removeListener', type, list.listener || listener);
354 }
355 } else if (typeof list !== 'function') {
356 position = -1;
357
358 for (i = list.length - 1; i >= 0; i--) {
359 if (list[i] === listener || list[i].listener === listener) {
360 originalListener = list[i].listener;
361 position = i;
362 break;
363 }
364 }
365
366 if (position < 0)
367 return this;
368
369 if (position === 0)
370 list.shift();
371 else
372 spliceOne(list, position);
373
374 if (list.length === 1)
375 events[type] = list[0];
376
377 if (events.removeListener)
378 this.emit('removeListener', type, originalListener || listener);
379 }
380
381 return this;
382 };
383
384EventEmitter.prototype.removeAllListeners =
385 function removeAllListeners(type) {
386 var listeners, events, i;
387
388 events = this._events;
389 if (!events)
390 return this;
391
392 // not listening for removeListener, no need to emit
393 if (!events.removeListener) {
394 if (arguments.length === 0) {
395 this._events = objectCreate(null);
396 this._eventsCount = 0;
397 } else if (events[type]) {
398 if (--this._eventsCount === 0)
399 this._events = objectCreate(null);
400 else
401 delete events[type];
402 }
403 return this;
404 }
405
406 // emit removeListener for all listeners on all events
407 if (arguments.length === 0) {
408 var keys = objectKeys(events);
409 var key;
410 for (i = 0; i < keys.length; ++i) {
411 key = keys[i];
412 if (key === 'removeListener') continue;
413 this.removeAllListeners(key);
414 }
415 this.removeAllListeners('removeListener');
416 this._events = objectCreate(null);
417 this._eventsCount = 0;
418 return this;
419 }
420
421 listeners = events[type];
422
423 if (typeof listeners === 'function') {
424 this.removeListener(type, listeners);
425 } else if (listeners) {
426 // LIFO order
427 for (i = listeners.length - 1; i >= 0; i--) {
428 this.removeListener(type, listeners[i]);
429 }
430 }
431
432 return this;
433 };
434
435function _listeners(target, type, unwrap) {
436 var events = target._events;
437
438 if (!events)
439 return [];
440
441 var evlistener = events[type];
442 if (!evlistener)
443 return [];
444
445 if (typeof evlistener === 'function')
446 return unwrap ? [evlistener.listener || evlistener] : [evlistener];
447
448 return unwrap ? unwrapListeners(evlistener) : arrayClone(evlistener, evlistener.length);
449}
450
451EventEmitter.prototype.listeners = function listeners(type) {
452 return _listeners(this, type, true);
453};
454
455EventEmitter.prototype.rawListeners = function rawListeners(type) {
456 return _listeners(this, type, false);
457};
458
459EventEmitter.listenerCount = function(emitter, type) {
460 if (typeof emitter.listenerCount === 'function') {
461 return emitter.listenerCount(type);
462 } else {
463 return listenerCount.call(emitter, type);
464 }
465};
466
467EventEmitter.prototype.listenerCount = listenerCount;
468function listenerCount(type) {
469 var events = this._events;
470
471 if (events) {
472 var evlistener = events[type];
473
474 if (typeof evlistener === 'function') {
475 return 1;
476 } else if (evlistener) {
477 return evlistener.length;
478 }
479 }
480
481 return 0;
482}
483
484EventEmitter.prototype.eventNames = function eventNames() {
485 return this._eventsCount > 0 ? Reflect.ownKeys(this._events) : [];
486};
487
488// About 1.5x faster than the two-arg version of Array#splice().
489function spliceOne(list, index) {
490 for (var i = index, k = i + 1, n = list.length; k < n; i += 1, k += 1)
491 list[i] = list[k];
492 list.pop();
493}
494
495function arrayClone(arr, n) {
496 var copy = new Array(n);
497 for (var i = 0; i < n; ++i)
498 copy[i] = arr[i];
499 return copy;
500}
501
502function unwrapListeners(arr) {
503 var ret = new Array(arr.length);
504 for (var i = 0; i < ret.length; ++i) {
505 ret[i] = arr[i].listener || arr[i];
506 }
507 return ret;
508}
509
510function objectCreatePolyfill(proto) {
511 var F = function() {};
512 F.prototype = proto;
513 return new F;
514}
515function objectKeysPolyfill(obj) {
516 var keys = [];
517 for (var k in obj) if (Object.prototype.hasOwnProperty.call(obj, k)) {
518 keys.push(k);
519 }
520 return k;
521}
522function functionBindPolyfill(context) {
523 var fn = this;
524 return function () {
525 return fn.apply(context, arguments);
526 };
527}
528
529},{}],3:[function(_dereq_,module,exports){
530'use strict';
531var types = [
532 _dereq_(1),
533 _dereq_(6),
534 _dereq_(5),
535 _dereq_(4),
536 _dereq_(7),
537 _dereq_(8)
538];
539var draining;
540var currentQueue;
541var queueIndex = -1;
542var queue = [];
543var scheduled = false;
544function cleanUpNextTick() {
545 if (!draining || !currentQueue) {
546 return;
547 }
548 draining = false;
549 if (currentQueue.length) {
550 queue = currentQueue.concat(queue);
551 } else {
552 queueIndex = -1;
553 }
554 if (queue.length) {
555 nextTick();
556 }
557}
558
559//named nextTick for less confusing stack traces
560function nextTick() {
561 if (draining) {
562 return;
563 }
564 scheduled = false;
565 draining = true;
566 var len = queue.length;
567 var timeout = setTimeout(cleanUpNextTick);
568 while (len) {
569 currentQueue = queue;
570 queue = [];
571 while (currentQueue && ++queueIndex < len) {
572 currentQueue[queueIndex].run();
573 }
574 queueIndex = -1;
575 len = queue.length;
576 }
577 currentQueue = null;
578 queueIndex = -1;
579 draining = false;
580 clearTimeout(timeout);
581}
582var scheduleDrain;
583var i = -1;
584var len = types.length;
585while (++i < len) {
586 if (types[i] && types[i].test && types[i].test()) {
587 scheduleDrain = types[i].install(nextTick);
588 break;
589 }
590}
591// v8 likes predictible objects
592function Item(fun, array) {
593 this.fun = fun;
594 this.array = array;
595}
596Item.prototype.run = function () {
597 var fun = this.fun;
598 var array = this.array;
599 switch (array.length) {
600 case 0:
601 return fun();
602 case 1:
603 return fun(array[0]);
604 case 2:
605 return fun(array[0], array[1]);
606 case 3:
607 return fun(array[0], array[1], array[2]);
608 default:
609 return fun.apply(null, array);
610 }
611
612};
613module.exports = immediate;
614function immediate(task) {
615 var args = new Array(arguments.length - 1);
616 if (arguments.length > 1) {
617 for (var i = 1; i < arguments.length; i++) {
618 args[i - 1] = arguments[i];
619 }
620 }
621 queue.push(new Item(task, args));
622 if (!scheduled && !draining) {
623 scheduled = true;
624 scheduleDrain();
625 }
626}
627
628},{"1":1,"4":4,"5":5,"6":6,"7":7,"8":8}],4:[function(_dereq_,module,exports){
629(function (global){(function (){
630'use strict';
631
632exports.test = function () {
633 if (global.setImmediate) {
634 // we can only get here in IE10
635 // which doesn't handel postMessage well
636 return false;
637 }
638 return typeof global.MessageChannel !== 'undefined';
639};
640
641exports.install = function (func) {
642 var channel = new global.MessageChannel();
643 channel.port1.onmessage = func;
644 return function () {
645 channel.port2.postMessage(0);
646 };
647};
648}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
649},{}],5:[function(_dereq_,module,exports){
650(function (global){(function (){
651'use strict';
652//based off rsvp https://github.com/tildeio/rsvp.js
653//license https://github.com/tildeio/rsvp.js/blob/master/LICENSE
654//https://github.com/tildeio/rsvp.js/blob/master/lib/rsvp/asap.js
655
656var Mutation = global.MutationObserver || global.WebKitMutationObserver;
657
658exports.test = function () {
659 return Mutation;
660};
661
662exports.install = function (handle) {
663 var called = 0;
664 var observer = new Mutation(handle);
665 var element = global.document.createTextNode('');
666 observer.observe(element, {
667 characterData: true
668 });
669 return function () {
670 element.data = (called = ++called % 2);
671 };
672};
673}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
674},{}],6:[function(_dereq_,module,exports){
675(function (global){(function (){
676'use strict';
677exports.test = function () {
678 return typeof global.queueMicrotask === 'function';
679};
680
681exports.install = function (func) {
682 return function () {
683 global.queueMicrotask(func);
684 };
685};
686
687}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
688},{}],7:[function(_dereq_,module,exports){
689(function (global){(function (){
690'use strict';
691
692exports.test = function () {
693 return 'document' in global && 'onreadystatechange' in global.document.createElement('script');
694};
695
696exports.install = function (handle) {
697 return function () {
698
699 // Create a <script> element; its readystatechange event will be fired asynchronously once it is inserted
700 // into the document. Do so, thus queuing up the task. Remember to clean up once it's been called.
701 var scriptEl = global.document.createElement('script');
702 scriptEl.onreadystatechange = function () {
703 handle();
704
705 scriptEl.onreadystatechange = null;
706 scriptEl.parentNode.removeChild(scriptEl);
707 scriptEl = null;
708 };
709 global.document.documentElement.appendChild(scriptEl);
710
711 return handle;
712 };
713};
714}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
715},{}],8:[function(_dereq_,module,exports){
716'use strict';
717exports.test = function () {
718 return true;
719};
720
721exports.install = function (t) {
722 return function () {
723 setTimeout(t, 0);
724 };
725};
726},{}],9:[function(_dereq_,module,exports){
727(function (factory) {
728 if (typeof exports === 'object') {
729 // Node/CommonJS
730 module.exports = factory();
731 } else if (typeof define === 'function' && define.amd) {
732 // AMD
733 define(factory);
734 } else {
735 // Browser globals (with support for web workers)
736 var glob;
737
738 try {
739 glob = window;
740 } catch (e) {
741 glob = self;
742 }
743
744 glob.SparkMD5 = factory();
745 }
746}(function (undefined) {
747
748 'use strict';
749
750 /*
751 * Fastest md5 implementation around (JKM md5).
752 * Credits: Joseph Myers
753 *
754 * @see http://www.myersdaily.org/joseph/javascript/md5-text.html
755 * @see http://jsperf.com/md5-shootout/7
756 */
757
758 /* this function is much faster,
759 so if possible we use it. Some IEs
760 are the only ones I know of that
761 need the idiotic second function,
762 generated by an if clause. */
763 var add32 = function (a, b) {
764 return (a + b) & 0xFFFFFFFF;
765 },
766 hex_chr = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'];
767
768
769 function cmn(q, a, b, x, s, t) {
770 a = add32(add32(a, q), add32(x, t));
771 return add32((a << s) | (a >>> (32 - s)), b);
772 }
773
774 function md5cycle(x, k) {
775 var a = x[0],
776 b = x[1],
777 c = x[2],
778 d = x[3];
779
780 a += (b & c | ~b & d) + k[0] - 680876936 | 0;
781 a = (a << 7 | a >>> 25) + b | 0;
782 d += (a & b | ~a & c) + k[1] - 389564586 | 0;
783 d = (d << 12 | d >>> 20) + a | 0;
784 c += (d & a | ~d & b) + k[2] + 606105819 | 0;
785 c = (c << 17 | c >>> 15) + d | 0;
786 b += (c & d | ~c & a) + k[3] - 1044525330 | 0;
787 b = (b << 22 | b >>> 10) + c | 0;
788 a += (b & c | ~b & d) + k[4] - 176418897 | 0;
789 a = (a << 7 | a >>> 25) + b | 0;
790 d += (a & b | ~a & c) + k[5] + 1200080426 | 0;
791 d = (d << 12 | d >>> 20) + a | 0;
792 c += (d & a | ~d & b) + k[6] - 1473231341 | 0;
793 c = (c << 17 | c >>> 15) + d | 0;
794 b += (c & d | ~c & a) + k[7] - 45705983 | 0;
795 b = (b << 22 | b >>> 10) + c | 0;
796 a += (b & c | ~b & d) + k[8] + 1770035416 | 0;
797 a = (a << 7 | a >>> 25) + b | 0;
798 d += (a & b | ~a & c) + k[9] - 1958414417 | 0;
799 d = (d << 12 | d >>> 20) + a | 0;
800 c += (d & a | ~d & b) + k[10] - 42063 | 0;
801 c = (c << 17 | c >>> 15) + d | 0;
802 b += (c & d | ~c & a) + k[11] - 1990404162 | 0;
803 b = (b << 22 | b >>> 10) + c | 0;
804 a += (b & c | ~b & d) + k[12] + 1804603682 | 0;
805 a = (a << 7 | a >>> 25) + b | 0;
806 d += (a & b | ~a & c) + k[13] - 40341101 | 0;
807 d = (d << 12 | d >>> 20) + a | 0;
808 c += (d & a | ~d & b) + k[14] - 1502002290 | 0;
809 c = (c << 17 | c >>> 15) + d | 0;
810 b += (c & d | ~c & a) + k[15] + 1236535329 | 0;
811 b = (b << 22 | b >>> 10) + c | 0;
812
813 a += (b & d | c & ~d) + k[1] - 165796510 | 0;
814 a = (a << 5 | a >>> 27) + b | 0;
815 d += (a & c | b & ~c) + k[6] - 1069501632 | 0;
816 d = (d << 9 | d >>> 23) + a | 0;
817 c += (d & b | a & ~b) + k[11] + 643717713 | 0;
818 c = (c << 14 | c >>> 18) + d | 0;
819 b += (c & a | d & ~a) + k[0] - 373897302 | 0;
820 b = (b << 20 | b >>> 12) + c | 0;
821 a += (b & d | c & ~d) + k[5] - 701558691 | 0;
822 a = (a << 5 | a >>> 27) + b | 0;
823 d += (a & c | b & ~c) + k[10] + 38016083 | 0;
824 d = (d << 9 | d >>> 23) + a | 0;
825 c += (d & b | a & ~b) + k[15] - 660478335 | 0;
826 c = (c << 14 | c >>> 18) + d | 0;
827 b += (c & a | d & ~a) + k[4] - 405537848 | 0;
828 b = (b << 20 | b >>> 12) + c | 0;
829 a += (b & d | c & ~d) + k[9] + 568446438 | 0;
830 a = (a << 5 | a >>> 27) + b | 0;
831 d += (a & c | b & ~c) + k[14] - 1019803690 | 0;
832 d = (d << 9 | d >>> 23) + a | 0;
833 c += (d & b | a & ~b) + k[3] - 187363961 | 0;
834 c = (c << 14 | c >>> 18) + d | 0;
835 b += (c & a | d & ~a) + k[8] + 1163531501 | 0;
836 b = (b << 20 | b >>> 12) + c | 0;
837 a += (b & d | c & ~d) + k[13] - 1444681467 | 0;
838 a = (a << 5 | a >>> 27) + b | 0;
839 d += (a & c | b & ~c) + k[2] - 51403784 | 0;
840 d = (d << 9 | d >>> 23) + a | 0;
841 c += (d & b | a & ~b) + k[7] + 1735328473 | 0;
842 c = (c << 14 | c >>> 18) + d | 0;
843 b += (c & a | d & ~a) + k[12] - 1926607734 | 0;
844 b = (b << 20 | b >>> 12) + c | 0;
845
846 a += (b ^ c ^ d) + k[5] - 378558 | 0;
847 a = (a << 4 | a >>> 28) + b | 0;
848 d += (a ^ b ^ c) + k[8] - 2022574463 | 0;
849 d = (d << 11 | d >>> 21) + a | 0;
850 c += (d ^ a ^ b) + k[11] + 1839030562 | 0;
851 c = (c << 16 | c >>> 16) + d | 0;
852 b += (c ^ d ^ a) + k[14] - 35309556 | 0;
853 b = (b << 23 | b >>> 9) + c | 0;
854 a += (b ^ c ^ d) + k[1] - 1530992060 | 0;
855 a = (a << 4 | a >>> 28) + b | 0;
856 d += (a ^ b ^ c) + k[4] + 1272893353 | 0;
857 d = (d << 11 | d >>> 21) + a | 0;
858 c += (d ^ a ^ b) + k[7] - 155497632 | 0;
859 c = (c << 16 | c >>> 16) + d | 0;
860 b += (c ^ d ^ a) + k[10] - 1094730640 | 0;
861 b = (b << 23 | b >>> 9) + c | 0;
862 a += (b ^ c ^ d) + k[13] + 681279174 | 0;
863 a = (a << 4 | a >>> 28) + b | 0;
864 d += (a ^ b ^ c) + k[0] - 358537222 | 0;
865 d = (d << 11 | d >>> 21) + a | 0;
866 c += (d ^ a ^ b) + k[3] - 722521979 | 0;
867 c = (c << 16 | c >>> 16) + d | 0;
868 b += (c ^ d ^ a) + k[6] + 76029189 | 0;
869 b = (b << 23 | b >>> 9) + c | 0;
870 a += (b ^ c ^ d) + k[9] - 640364487 | 0;
871 a = (a << 4 | a >>> 28) + b | 0;
872 d += (a ^ b ^ c) + k[12] - 421815835 | 0;
873 d = (d << 11 | d >>> 21) + a | 0;
874 c += (d ^ a ^ b) + k[15] + 530742520 | 0;
875 c = (c << 16 | c >>> 16) + d | 0;
876 b += (c ^ d ^ a) + k[2] - 995338651 | 0;
877 b = (b << 23 | b >>> 9) + c | 0;
878
879 a += (c ^ (b | ~d)) + k[0] - 198630844 | 0;
880 a = (a << 6 | a >>> 26) + b | 0;
881 d += (b ^ (a | ~c)) + k[7] + 1126891415 | 0;
882 d = (d << 10 | d >>> 22) + a | 0;
883 c += (a ^ (d | ~b)) + k[14] - 1416354905 | 0;
884 c = (c << 15 | c >>> 17) + d | 0;
885 b += (d ^ (c | ~a)) + k[5] - 57434055 | 0;
886 b = (b << 21 |b >>> 11) + c | 0;
887 a += (c ^ (b | ~d)) + k[12] + 1700485571 | 0;
888 a = (a << 6 | a >>> 26) + b | 0;
889 d += (b ^ (a | ~c)) + k[3] - 1894986606 | 0;
890 d = (d << 10 | d >>> 22) + a | 0;
891 c += (a ^ (d | ~b)) + k[10] - 1051523 | 0;
892 c = (c << 15 | c >>> 17) + d | 0;
893 b += (d ^ (c | ~a)) + k[1] - 2054922799 | 0;
894 b = (b << 21 |b >>> 11) + c | 0;
895 a += (c ^ (b | ~d)) + k[8] + 1873313359 | 0;
896 a = (a << 6 | a >>> 26) + b | 0;
897 d += (b ^ (a | ~c)) + k[15] - 30611744 | 0;
898 d = (d << 10 | d >>> 22) + a | 0;
899 c += (a ^ (d | ~b)) + k[6] - 1560198380 | 0;
900 c = (c << 15 | c >>> 17) + d | 0;
901 b += (d ^ (c | ~a)) + k[13] + 1309151649 | 0;
902 b = (b << 21 |b >>> 11) + c | 0;
903 a += (c ^ (b | ~d)) + k[4] - 145523070 | 0;
904 a = (a << 6 | a >>> 26) + b | 0;
905 d += (b ^ (a | ~c)) + k[11] - 1120210379 | 0;
906 d = (d << 10 | d >>> 22) + a | 0;
907 c += (a ^ (d | ~b)) + k[2] + 718787259 | 0;
908 c = (c << 15 | c >>> 17) + d | 0;
909 b += (d ^ (c | ~a)) + k[9] - 343485551 | 0;
910 b = (b << 21 | b >>> 11) + c | 0;
911
912 x[0] = a + x[0] | 0;
913 x[1] = b + x[1] | 0;
914 x[2] = c + x[2] | 0;
915 x[3] = d + x[3] | 0;
916 }
917
918 function md5blk(s) {
919 var md5blks = [],
920 i; /* Andy King said do it this way. */
921
922 for (i = 0; i < 64; i += 4) {
923 md5blks[i >> 2] = s.charCodeAt(i) + (s.charCodeAt(i + 1) << 8) + (s.charCodeAt(i + 2) << 16) + (s.charCodeAt(i + 3) << 24);
924 }
925 return md5blks;
926 }
927
928 function md5blk_array(a) {
929 var md5blks = [],
930 i; /* Andy King said do it this way. */
931
932 for (i = 0; i < 64; i += 4) {
933 md5blks[i >> 2] = a[i] + (a[i + 1] << 8) + (a[i + 2] << 16) + (a[i + 3] << 24);
934 }
935 return md5blks;
936 }
937
938 function md51(s) {
939 var n = s.length,
940 state = [1732584193, -271733879, -1732584194, 271733878],
941 i,
942 length,
943 tail,
944 tmp,
945 lo,
946 hi;
947
948 for (i = 64; i <= n; i += 64) {
949 md5cycle(state, md5blk(s.substring(i - 64, i)));
950 }
951 s = s.substring(i - 64);
952 length = s.length;
953 tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
954 for (i = 0; i < length; i += 1) {
955 tail[i >> 2] |= s.charCodeAt(i) << ((i % 4) << 3);
956 }
957 tail[i >> 2] |= 0x80 << ((i % 4) << 3);
958 if (i > 55) {
959 md5cycle(state, tail);
960 for (i = 0; i < 16; i += 1) {
961 tail[i] = 0;
962 }
963 }
964
965 // Beware that the final length might not fit in 32 bits so we take care of that
966 tmp = n * 8;
967 tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
968 lo = parseInt(tmp[2], 16);
969 hi = parseInt(tmp[1], 16) || 0;
970
971 tail[14] = lo;
972 tail[15] = hi;
973
974 md5cycle(state, tail);
975 return state;
976 }
977
978 function md51_array(a) {
979 var n = a.length,
980 state = [1732584193, -271733879, -1732584194, 271733878],
981 i,
982 length,
983 tail,
984 tmp,
985 lo,
986 hi;
987
988 for (i = 64; i <= n; i += 64) {
989 md5cycle(state, md5blk_array(a.subarray(i - 64, i)));
990 }
991
992 // Not sure if it is a bug, however IE10 will always produce a sub array of length 1
993 // containing the last element of the parent array if the sub array specified starts
994 // beyond the length of the parent array - weird.
995 // https://connect.microsoft.com/IE/feedback/details/771452/typed-array-subarray-issue
996 a = (i - 64) < n ? a.subarray(i - 64) : new Uint8Array(0);
997
998 length = a.length;
999 tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
1000 for (i = 0; i < length; i += 1) {
1001 tail[i >> 2] |= a[i] << ((i % 4) << 3);
1002 }
1003
1004 tail[i >> 2] |= 0x80 << ((i % 4) << 3);
1005 if (i > 55) {
1006 md5cycle(state, tail);
1007 for (i = 0; i < 16; i += 1) {
1008 tail[i] = 0;
1009 }
1010 }
1011
1012 // Beware that the final length might not fit in 32 bits so we take care of that
1013 tmp = n * 8;
1014 tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
1015 lo = parseInt(tmp[2], 16);
1016 hi = parseInt(tmp[1], 16) || 0;
1017
1018 tail[14] = lo;
1019 tail[15] = hi;
1020
1021 md5cycle(state, tail);
1022
1023 return state;
1024 }
1025
1026 function rhex(n) {
1027 var s = '',
1028 j;
1029 for (j = 0; j < 4; j += 1) {
1030 s += hex_chr[(n >> (j * 8 + 4)) & 0x0F] + hex_chr[(n >> (j * 8)) & 0x0F];
1031 }
1032 return s;
1033 }
1034
1035 function hex(x) {
1036 var i;
1037 for (i = 0; i < x.length; i += 1) {
1038 x[i] = rhex(x[i]);
1039 }
1040 return x.join('');
1041 }
1042
1043 // In some cases the fast add32 function cannot be used..
1044 if (hex(md51('hello')) !== '5d41402abc4b2a76b9719d911017c592') {
1045 add32 = function (x, y) {
1046 var lsw = (x & 0xFFFF) + (y & 0xFFFF),
1047 msw = (x >> 16) + (y >> 16) + (lsw >> 16);
1048 return (msw << 16) | (lsw & 0xFFFF);
1049 };
1050 }
1051
1052 // ---------------------------------------------------
1053
1054 /**
1055 * ArrayBuffer slice polyfill.
1056 *
1057 * @see https://github.com/ttaubert/node-arraybuffer-slice
1058 */
1059
1060 if (typeof ArrayBuffer !== 'undefined' && !ArrayBuffer.prototype.slice) {
1061 (function () {
1062 function clamp(val, length) {
1063 val = (val | 0) || 0;
1064
1065 if (val < 0) {
1066 return Math.max(val + length, 0);
1067 }
1068
1069 return Math.min(val, length);
1070 }
1071
1072 ArrayBuffer.prototype.slice = function (from, to) {
1073 var length = this.byteLength,
1074 begin = clamp(from, length),
1075 end = length,
1076 num,
1077 target,
1078 targetArray,
1079 sourceArray;
1080
1081 if (to !== undefined) {
1082 end = clamp(to, length);
1083 }
1084
1085 if (begin > end) {
1086 return new ArrayBuffer(0);
1087 }
1088
1089 num = end - begin;
1090 target = new ArrayBuffer(num);
1091 targetArray = new Uint8Array(target);
1092
1093 sourceArray = new Uint8Array(this, begin, num);
1094 targetArray.set(sourceArray);
1095
1096 return target;
1097 };
1098 })();
1099 }
1100
1101 // ---------------------------------------------------
1102
1103 /**
1104 * Helpers.
1105 */
1106
1107 function toUtf8(str) {
1108 if (/[\u0080-\uFFFF]/.test(str)) {
1109 str = unescape(encodeURIComponent(str));
1110 }
1111
1112 return str;
1113 }
1114
1115 function utf8Str2ArrayBuffer(str, returnUInt8Array) {
1116 var length = str.length,
1117 buff = new ArrayBuffer(length),
1118 arr = new Uint8Array(buff),
1119 i;
1120
1121 for (i = 0; i < length; i += 1) {
1122 arr[i] = str.charCodeAt(i);
1123 }
1124
1125 return returnUInt8Array ? arr : buff;
1126 }
1127
1128 function arrayBuffer2Utf8Str(buff) {
1129 return String.fromCharCode.apply(null, new Uint8Array(buff));
1130 }
1131
1132 function concatenateArrayBuffers(first, second, returnUInt8Array) {
1133 var result = new Uint8Array(first.byteLength + second.byteLength);
1134
1135 result.set(new Uint8Array(first));
1136 result.set(new Uint8Array(second), first.byteLength);
1137
1138 return returnUInt8Array ? result : result.buffer;
1139 }
1140
1141 function hexToBinaryString(hex) {
1142 var bytes = [],
1143 length = hex.length,
1144 x;
1145
1146 for (x = 0; x < length - 1; x += 2) {
1147 bytes.push(parseInt(hex.substr(x, 2), 16));
1148 }
1149
1150 return String.fromCharCode.apply(String, bytes);
1151 }
1152
1153 // ---------------------------------------------------
1154
1155 /**
1156 * SparkMD5 OOP implementation.
1157 *
1158 * Use this class to perform an incremental md5, otherwise use the
1159 * static methods instead.
1160 */
1161
1162 function SparkMD5() {
1163 // call reset to init the instance
1164 this.reset();
1165 }
1166
1167 /**
1168 * Appends a string.
1169 * A conversion will be applied if an utf8 string is detected.
1170 *
1171 * @param {String} str The string to be appended
1172 *
1173 * @return {SparkMD5} The instance itself
1174 */
1175 SparkMD5.prototype.append = function (str) {
1176 // Converts the string to utf8 bytes if necessary
1177 // Then append as binary
1178 this.appendBinary(toUtf8(str));
1179
1180 return this;
1181 };
1182
1183 /**
1184 * Appends a binary string.
1185 *
1186 * @param {String} contents The binary string to be appended
1187 *
1188 * @return {SparkMD5} The instance itself
1189 */
1190 SparkMD5.prototype.appendBinary = function (contents) {
1191 this._buff += contents;
1192 this._length += contents.length;
1193
1194 var length = this._buff.length,
1195 i;
1196
1197 for (i = 64; i <= length; i += 64) {
1198 md5cycle(this._hash, md5blk(this._buff.substring(i - 64, i)));
1199 }
1200
1201 this._buff = this._buff.substring(i - 64);
1202
1203 return this;
1204 };
1205
1206 /**
1207 * Finishes the incremental computation, reseting the internal state and
1208 * returning the result.
1209 *
1210 * @param {Boolean} raw True to get the raw string, false to get the hex string
1211 *
1212 * @return {String} The result
1213 */
1214 SparkMD5.prototype.end = function (raw) {
1215 var buff = this._buff,
1216 length = buff.length,
1217 i,
1218 tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
1219 ret;
1220
1221 for (i = 0; i < length; i += 1) {
1222 tail[i >> 2] |= buff.charCodeAt(i) << ((i % 4) << 3);
1223 }
1224
1225 this._finish(tail, length);
1226 ret = hex(this._hash);
1227
1228 if (raw) {
1229 ret = hexToBinaryString(ret);
1230 }
1231
1232 this.reset();
1233
1234 return ret;
1235 };
1236
1237 /**
1238 * Resets the internal state of the computation.
1239 *
1240 * @return {SparkMD5} The instance itself
1241 */
1242 SparkMD5.prototype.reset = function () {
1243 this._buff = '';
1244 this._length = 0;
1245 this._hash = [1732584193, -271733879, -1732584194, 271733878];
1246
1247 return this;
1248 };
1249
1250 /**
1251 * Gets the internal state of the computation.
1252 *
1253 * @return {Object} The state
1254 */
1255 SparkMD5.prototype.getState = function () {
1256 return {
1257 buff: this._buff,
1258 length: this._length,
1259 hash: this._hash.slice()
1260 };
1261 };
1262
1263 /**
1264 * Gets the internal state of the computation.
1265 *
1266 * @param {Object} state The state
1267 *
1268 * @return {SparkMD5} The instance itself
1269 */
1270 SparkMD5.prototype.setState = function (state) {
1271 this._buff = state.buff;
1272 this._length = state.length;
1273 this._hash = state.hash;
1274
1275 return this;
1276 };
1277
1278 /**
1279 * Releases memory used by the incremental buffer and other additional
1280 * resources. If you plan to use the instance again, use reset instead.
1281 */
1282 SparkMD5.prototype.destroy = function () {
1283 delete this._hash;
1284 delete this._buff;
1285 delete this._length;
1286 };
1287
1288 /**
1289 * Finish the final calculation based on the tail.
1290 *
1291 * @param {Array} tail The tail (will be modified)
1292 * @param {Number} length The length of the remaining buffer
1293 */
1294 SparkMD5.prototype._finish = function (tail, length) {
1295 var i = length,
1296 tmp,
1297 lo,
1298 hi;
1299
1300 tail[i >> 2] |= 0x80 << ((i % 4) << 3);
1301 if (i > 55) {
1302 md5cycle(this._hash, tail);
1303 for (i = 0; i < 16; i += 1) {
1304 tail[i] = 0;
1305 }
1306 }
1307
1308 // Do the final computation based on the tail and length
1309 // Beware that the final length may not fit in 32 bits so we take care of that
1310 tmp = this._length * 8;
1311 tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
1312 lo = parseInt(tmp[2], 16);
1313 hi = parseInt(tmp[1], 16) || 0;
1314
1315 tail[14] = lo;
1316 tail[15] = hi;
1317 md5cycle(this._hash, tail);
1318 };
1319
1320 /**
1321 * Performs the md5 hash on a string.
1322 * A conversion will be applied if utf8 string is detected.
1323 *
1324 * @param {String} str The string
1325 * @param {Boolean} [raw] True to get the raw string, false to get the hex string
1326 *
1327 * @return {String} The result
1328 */
1329 SparkMD5.hash = function (str, raw) {
1330 // Converts the string to utf8 bytes if necessary
1331 // Then compute it using the binary function
1332 return SparkMD5.hashBinary(toUtf8(str), raw);
1333 };
1334
1335 /**
1336 * Performs the md5 hash on a binary string.
1337 *
1338 * @param {String} content The binary string
1339 * @param {Boolean} [raw] True to get the raw string, false to get the hex string
1340 *
1341 * @return {String} The result
1342 */
1343 SparkMD5.hashBinary = function (content, raw) {
1344 var hash = md51(content),
1345 ret = hex(hash);
1346
1347 return raw ? hexToBinaryString(ret) : ret;
1348 };
1349
1350 // ---------------------------------------------------
1351
1352 /**
1353 * SparkMD5 OOP implementation for array buffers.
1354 *
1355 * Use this class to perform an incremental md5 ONLY for array buffers.
1356 */
1357 SparkMD5.ArrayBuffer = function () {
1358 // call reset to init the instance
1359 this.reset();
1360 };
1361
1362 /**
1363 * Appends an array buffer.
1364 *
1365 * @param {ArrayBuffer} arr The array to be appended
1366 *
1367 * @return {SparkMD5.ArrayBuffer} The instance itself
1368 */
1369 SparkMD5.ArrayBuffer.prototype.append = function (arr) {
1370 var buff = concatenateArrayBuffers(this._buff.buffer, arr, true),
1371 length = buff.length,
1372 i;
1373
1374 this._length += arr.byteLength;
1375
1376 for (i = 64; i <= length; i += 64) {
1377 md5cycle(this._hash, md5blk_array(buff.subarray(i - 64, i)));
1378 }
1379
1380 this._buff = (i - 64) < length ? new Uint8Array(buff.buffer.slice(i - 64)) : new Uint8Array(0);
1381
1382 return this;
1383 };
1384
1385 /**
1386 * Finishes the incremental computation, reseting the internal state and
1387 * returning the result.
1388 *
1389 * @param {Boolean} raw True to get the raw string, false to get the hex string
1390 *
1391 * @return {String} The result
1392 */
1393 SparkMD5.ArrayBuffer.prototype.end = function (raw) {
1394 var buff = this._buff,
1395 length = buff.length,
1396 tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
1397 i,
1398 ret;
1399
1400 for (i = 0; i < length; i += 1) {
1401 tail[i >> 2] |= buff[i] << ((i % 4) << 3);
1402 }
1403
1404 this._finish(tail, length);
1405 ret = hex(this._hash);
1406
1407 if (raw) {
1408 ret = hexToBinaryString(ret);
1409 }
1410
1411 this.reset();
1412
1413 return ret;
1414 };
1415
1416 /**
1417 * Resets the internal state of the computation.
1418 *
1419 * @return {SparkMD5.ArrayBuffer} The instance itself
1420 */
1421 SparkMD5.ArrayBuffer.prototype.reset = function () {
1422 this._buff = new Uint8Array(0);
1423 this._length = 0;
1424 this._hash = [1732584193, -271733879, -1732584194, 271733878];
1425
1426 return this;
1427 };
1428
1429 /**
1430 * Gets the internal state of the computation.
1431 *
1432 * @return {Object} The state
1433 */
1434 SparkMD5.ArrayBuffer.prototype.getState = function () {
1435 var state = SparkMD5.prototype.getState.call(this);
1436
1437 // Convert buffer to a string
1438 state.buff = arrayBuffer2Utf8Str(state.buff);
1439
1440 return state;
1441 };
1442
1443 /**
1444 * Gets the internal state of the computation.
1445 *
1446 * @param {Object} state The state
1447 *
1448 * @return {SparkMD5.ArrayBuffer} The instance itself
1449 */
1450 SparkMD5.ArrayBuffer.prototype.setState = function (state) {
1451 // Convert string to buffer
1452 state.buff = utf8Str2ArrayBuffer(state.buff, true);
1453
1454 return SparkMD5.prototype.setState.call(this, state);
1455 };
1456
1457 SparkMD5.ArrayBuffer.prototype.destroy = SparkMD5.prototype.destroy;
1458
1459 SparkMD5.ArrayBuffer.prototype._finish = SparkMD5.prototype._finish;
1460
1461 /**
1462 * Performs the md5 hash on an array buffer.
1463 *
1464 * @param {ArrayBuffer} arr The array buffer
1465 * @param {Boolean} [raw] True to get the raw string, false to get the hex one
1466 *
1467 * @return {String} The result
1468 */
1469 SparkMD5.ArrayBuffer.hash = function (arr, raw) {
1470 var hash = md51_array(new Uint8Array(arr)),
1471 ret = hex(hash);
1472
1473 return raw ? hexToBinaryString(ret) : ret;
1474 };
1475
1476 return SparkMD5;
1477}));
1478
1479},{}],10:[function(_dereq_,module,exports){
1480"use strict";
1481
1482Object.defineProperty(exports, "__esModule", {
1483 value: true
1484});
1485Object.defineProperty(exports, "v1", {
1486 enumerable: true,
1487 get: function () {
1488 return _v.default;
1489 }
1490});
1491Object.defineProperty(exports, "v3", {
1492 enumerable: true,
1493 get: function () {
1494 return _v2.default;
1495 }
1496});
1497Object.defineProperty(exports, "v4", {
1498 enumerable: true,
1499 get: function () {
1500 return _v3.default;
1501 }
1502});
1503Object.defineProperty(exports, "v5", {
1504 enumerable: true,
1505 get: function () {
1506 return _v4.default;
1507 }
1508});
1509Object.defineProperty(exports, "NIL", {
1510 enumerable: true,
1511 get: function () {
1512 return _nil.default;
1513 }
1514});
1515Object.defineProperty(exports, "version", {
1516 enumerable: true,
1517 get: function () {
1518 return _version.default;
1519 }
1520});
1521Object.defineProperty(exports, "validate", {
1522 enumerable: true,
1523 get: function () {
1524 return _validate.default;
1525 }
1526});
1527Object.defineProperty(exports, "stringify", {
1528 enumerable: true,
1529 get: function () {
1530 return _stringify.default;
1531 }
1532});
1533Object.defineProperty(exports, "parse", {
1534 enumerable: true,
1535 get: function () {
1536 return _parse.default;
1537 }
1538});
1539
1540var _v = _interopRequireDefault(_dereq_(18));
1541
1542var _v2 = _interopRequireDefault(_dereq_(19));
1543
1544var _v3 = _interopRequireDefault(_dereq_(21));
1545
1546var _v4 = _interopRequireDefault(_dereq_(22));
1547
1548var _nil = _interopRequireDefault(_dereq_(12));
1549
1550var _version = _interopRequireDefault(_dereq_(24));
1551
1552var _validate = _interopRequireDefault(_dereq_(23));
1553
1554var _stringify = _interopRequireDefault(_dereq_(17));
1555
1556var _parse = _interopRequireDefault(_dereq_(13));
1557
1558function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
1559},{"12":12,"13":13,"17":17,"18":18,"19":19,"21":21,"22":22,"23":23,"24":24}],11:[function(_dereq_,module,exports){
1560"use strict";
1561
1562Object.defineProperty(exports, "__esModule", {
1563 value: true
1564});
1565exports.default = void 0;
1566
1567/*
1568 * Browser-compatible JavaScript MD5
1569 *
1570 * Modification of JavaScript MD5
1571 * https://github.com/blueimp/JavaScript-MD5
1572 *
1573 * Copyright 2011, Sebastian Tschan
1574 * https://blueimp.net
1575 *
1576 * Licensed under the MIT license:
1577 * https://opensource.org/licenses/MIT
1578 *
1579 * Based on
1580 * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message
1581 * Digest Algorithm, as defined in RFC 1321.
1582 * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009
1583 * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
1584 * Distributed under the BSD License
1585 * See http://pajhome.org.uk/crypt/md5 for more info.
1586 */
1587function md5(bytes) {
1588 if (typeof bytes === 'string') {
1589 const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
1590
1591 bytes = new Uint8Array(msg.length);
1592
1593 for (let i = 0; i < msg.length; ++i) {
1594 bytes[i] = msg.charCodeAt(i);
1595 }
1596 }
1597
1598 return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8));
1599}
1600/*
1601 * Convert an array of little-endian words to an array of bytes
1602 */
1603
1604
1605function md5ToHexEncodedArray(input) {
1606 const output = [];
1607 const length32 = input.length * 32;
1608 const hexTab = '0123456789abcdef';
1609
1610 for (let i = 0; i < length32; i += 8) {
1611 const x = input[i >> 5] >>> i % 32 & 0xff;
1612 const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16);
1613 output.push(hex);
1614 }
1615
1616 return output;
1617}
1618/**
1619 * Calculate output length with padding and bit length
1620 */
1621
1622
1623function getOutputLength(inputLength8) {
1624 return (inputLength8 + 64 >>> 9 << 4) + 14 + 1;
1625}
1626/*
1627 * Calculate the MD5 of an array of little-endian words, and a bit length.
1628 */
1629
1630
1631function wordsToMd5(x, len) {
1632 /* append padding */
1633 x[len >> 5] |= 0x80 << len % 32;
1634 x[getOutputLength(len) - 1] = len;
1635 let a = 1732584193;
1636 let b = -271733879;
1637 let c = -1732584194;
1638 let d = 271733878;
1639
1640 for (let i = 0; i < x.length; i += 16) {
1641 const olda = a;
1642 const oldb = b;
1643 const oldc = c;
1644 const oldd = d;
1645 a = md5ff(a, b, c, d, x[i], 7, -680876936);
1646 d = md5ff(d, a, b, c, x[i + 1], 12, -389564586);
1647 c = md5ff(c, d, a, b, x[i + 2], 17, 606105819);
1648 b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330);
1649 a = md5ff(a, b, c, d, x[i + 4], 7, -176418897);
1650 d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426);
1651 c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341);
1652 b = md5ff(b, c, d, a, x[i + 7], 22, -45705983);
1653 a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416);
1654 d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417);
1655 c = md5ff(c, d, a, b, x[i + 10], 17, -42063);
1656 b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162);
1657 a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682);
1658 d = md5ff(d, a, b, c, x[i + 13], 12, -40341101);
1659 c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290);
1660 b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329);
1661 a = md5gg(a, b, c, d, x[i + 1], 5, -165796510);
1662 d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632);
1663 c = md5gg(c, d, a, b, x[i + 11], 14, 643717713);
1664 b = md5gg(b, c, d, a, x[i], 20, -373897302);
1665 a = md5gg(a, b, c, d, x[i + 5], 5, -701558691);
1666 d = md5gg(d, a, b, c, x[i + 10], 9, 38016083);
1667 c = md5gg(c, d, a, b, x[i + 15], 14, -660478335);
1668 b = md5gg(b, c, d, a, x[i + 4], 20, -405537848);
1669 a = md5gg(a, b, c, d, x[i + 9], 5, 568446438);
1670 d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690);
1671 c = md5gg(c, d, a, b, x[i + 3], 14, -187363961);
1672 b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501);
1673 a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467);
1674 d = md5gg(d, a, b, c, x[i + 2], 9, -51403784);
1675 c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473);
1676 b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734);
1677 a = md5hh(a, b, c, d, x[i + 5], 4, -378558);
1678 d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463);
1679 c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562);
1680 b = md5hh(b, c, d, a, x[i + 14], 23, -35309556);
1681 a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060);
1682 d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353);
1683 c = md5hh(c, d, a, b, x[i + 7], 16, -155497632);
1684 b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640);
1685 a = md5hh(a, b, c, d, x[i + 13], 4, 681279174);
1686 d = md5hh(d, a, b, c, x[i], 11, -358537222);
1687 c = md5hh(c, d, a, b, x[i + 3], 16, -722521979);
1688 b = md5hh(b, c, d, a, x[i + 6], 23, 76029189);
1689 a = md5hh(a, b, c, d, x[i + 9], 4, -640364487);
1690 d = md5hh(d, a, b, c, x[i + 12], 11, -421815835);
1691 c = md5hh(c, d, a, b, x[i + 15], 16, 530742520);
1692 b = md5hh(b, c, d, a, x[i + 2], 23, -995338651);
1693 a = md5ii(a, b, c, d, x[i], 6, -198630844);
1694 d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415);
1695 c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905);
1696 b = md5ii(b, c, d, a, x[i + 5], 21, -57434055);
1697 a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571);
1698 d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606);
1699 c = md5ii(c, d, a, b, x[i + 10], 15, -1051523);
1700 b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799);
1701 a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359);
1702 d = md5ii(d, a, b, c, x[i + 15], 10, -30611744);
1703 c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380);
1704 b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649);
1705 a = md5ii(a, b, c, d, x[i + 4], 6, -145523070);
1706 d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379);
1707 c = md5ii(c, d, a, b, x[i + 2], 15, 718787259);
1708 b = md5ii(b, c, d, a, x[i + 9], 21, -343485551);
1709 a = safeAdd(a, olda);
1710 b = safeAdd(b, oldb);
1711 c = safeAdd(c, oldc);
1712 d = safeAdd(d, oldd);
1713 }
1714
1715 return [a, b, c, d];
1716}
1717/*
1718 * Convert an array bytes to an array of little-endian words
1719 * Characters >255 have their high-byte silently ignored.
1720 */
1721
1722
1723function bytesToWords(input) {
1724 if (input.length === 0) {
1725 return [];
1726 }
1727
1728 const length8 = input.length * 8;
1729 const output = new Uint32Array(getOutputLength(length8));
1730
1731 for (let i = 0; i < length8; i += 8) {
1732 output[i >> 5] |= (input[i / 8] & 0xff) << i % 32;
1733 }
1734
1735 return output;
1736}
1737/*
1738 * Add integers, wrapping at 2^32. This uses 16-bit operations internally
1739 * to work around bugs in some JS interpreters.
1740 */
1741
1742
1743function safeAdd(x, y) {
1744 const lsw = (x & 0xffff) + (y & 0xffff);
1745 const msw = (x >> 16) + (y >> 16) + (lsw >> 16);
1746 return msw << 16 | lsw & 0xffff;
1747}
1748/*
1749 * Bitwise rotate a 32-bit number to the left.
1750 */
1751
1752
1753function bitRotateLeft(num, cnt) {
1754 return num << cnt | num >>> 32 - cnt;
1755}
1756/*
1757 * These functions implement the four basic operations the algorithm uses.
1758 */
1759
1760
1761function md5cmn(q, a, b, x, s, t) {
1762 return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b);
1763}
1764
1765function md5ff(a, b, c, d, x, s, t) {
1766 return md5cmn(b & c | ~b & d, a, b, x, s, t);
1767}
1768
1769function md5gg(a, b, c, d, x, s, t) {
1770 return md5cmn(b & d | c & ~d, a, b, x, s, t);
1771}
1772
1773function md5hh(a, b, c, d, x, s, t) {
1774 return md5cmn(b ^ c ^ d, a, b, x, s, t);
1775}
1776
1777function md5ii(a, b, c, d, x, s, t) {
1778 return md5cmn(c ^ (b | ~d), a, b, x, s, t);
1779}
1780
1781var _default = md5;
1782exports.default = _default;
1783},{}],12:[function(_dereq_,module,exports){
1784"use strict";
1785
1786Object.defineProperty(exports, "__esModule", {
1787 value: true
1788});
1789exports.default = void 0;
1790var _default = '00000000-0000-0000-0000-000000000000';
1791exports.default = _default;
1792},{}],13:[function(_dereq_,module,exports){
1793"use strict";
1794
1795Object.defineProperty(exports, "__esModule", {
1796 value: true
1797});
1798exports.default = void 0;
1799
1800var _validate = _interopRequireDefault(_dereq_(23));
1801
1802function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
1803
1804function parse(uuid) {
1805 if (!(0, _validate.default)(uuid)) {
1806 throw TypeError('Invalid UUID');
1807 }
1808
1809 let v;
1810 const arr = new Uint8Array(16); // Parse ########-....-....-....-............
1811
1812 arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;
1813 arr[1] = v >>> 16 & 0xff;
1814 arr[2] = v >>> 8 & 0xff;
1815 arr[3] = v & 0xff; // Parse ........-####-....-....-............
1816
1817 arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;
1818 arr[5] = v & 0xff; // Parse ........-....-####-....-............
1819
1820 arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;
1821 arr[7] = v & 0xff; // Parse ........-....-....-####-............
1822
1823 arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;
1824 arr[9] = v & 0xff; // Parse ........-....-....-....-############
1825 // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
1826
1827 arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;
1828 arr[11] = v / 0x100000000 & 0xff;
1829 arr[12] = v >>> 24 & 0xff;
1830 arr[13] = v >>> 16 & 0xff;
1831 arr[14] = v >>> 8 & 0xff;
1832 arr[15] = v & 0xff;
1833 return arr;
1834}
1835
1836var _default = parse;
1837exports.default = _default;
1838},{"23":23}],14:[function(_dereq_,module,exports){
1839"use strict";
1840
1841Object.defineProperty(exports, "__esModule", {
1842 value: true
1843});
1844exports.default = void 0;
1845var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;
1846exports.default = _default;
1847},{}],15:[function(_dereq_,module,exports){
1848"use strict";
1849
1850Object.defineProperty(exports, "__esModule", {
1851 value: true
1852});
1853exports.default = rng;
1854// Unique ID creation requires a high quality random # generator. In the browser we therefore
1855// require the crypto API and do not support built-in fallback to lower quality random number
1856// generators (like Math.random()).
1857let getRandomValues;
1858const rnds8 = new Uint8Array(16);
1859
1860function rng() {
1861 // lazy load so that environments that need to polyfill have a chance to do so
1862 if (!getRandomValues) {
1863 // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also,
1864 // find the complete implementation of crypto (msCrypto) on IE11.
1865 getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto);
1866
1867 if (!getRandomValues) {
1868 throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported');
1869 }
1870 }
1871
1872 return getRandomValues(rnds8);
1873}
1874},{}],16:[function(_dereq_,module,exports){
1875"use strict";
1876
1877Object.defineProperty(exports, "__esModule", {
1878 value: true
1879});
1880exports.default = void 0;
1881
1882// Adapted from Chris Veness' SHA1 code at
1883// http://www.movable-type.co.uk/scripts/sha1.html
1884function f(s, x, y, z) {
1885 switch (s) {
1886 case 0:
1887 return x & y ^ ~x & z;
1888
1889 case 1:
1890 return x ^ y ^ z;
1891
1892 case 2:
1893 return x & y ^ x & z ^ y & z;
1894
1895 case 3:
1896 return x ^ y ^ z;
1897 }
1898}
1899
1900function ROTL(x, n) {
1901 return x << n | x >>> 32 - n;
1902}
1903
1904function sha1(bytes) {
1905 const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6];
1906 const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0];
1907
1908 if (typeof bytes === 'string') {
1909 const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
1910
1911 bytes = [];
1912
1913 for (let i = 0; i < msg.length; ++i) {
1914 bytes.push(msg.charCodeAt(i));
1915 }
1916 } else if (!Array.isArray(bytes)) {
1917 // Convert Array-like to Array
1918 bytes = Array.prototype.slice.call(bytes);
1919 }
1920
1921 bytes.push(0x80);
1922 const l = bytes.length / 4 + 2;
1923 const N = Math.ceil(l / 16);
1924 const M = new Array(N);
1925
1926 for (let i = 0; i < N; ++i) {
1927 const arr = new Uint32Array(16);
1928
1929 for (let j = 0; j < 16; ++j) {
1930 arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3];
1931 }
1932
1933 M[i] = arr;
1934 }
1935
1936 M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32);
1937 M[N - 1][14] = Math.floor(M[N - 1][14]);
1938 M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff;
1939
1940 for (let i = 0; i < N; ++i) {
1941 const W = new Uint32Array(80);
1942
1943 for (let t = 0; t < 16; ++t) {
1944 W[t] = M[i][t];
1945 }
1946
1947 for (let t = 16; t < 80; ++t) {
1948 W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1);
1949 }
1950
1951 let a = H[0];
1952 let b = H[1];
1953 let c = H[2];
1954 let d = H[3];
1955 let e = H[4];
1956
1957 for (let t = 0; t < 80; ++t) {
1958 const s = Math.floor(t / 20);
1959 const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0;
1960 e = d;
1961 d = c;
1962 c = ROTL(b, 30) >>> 0;
1963 b = a;
1964 a = T;
1965 }
1966
1967 H[0] = H[0] + a >>> 0;
1968 H[1] = H[1] + b >>> 0;
1969 H[2] = H[2] + c >>> 0;
1970 H[3] = H[3] + d >>> 0;
1971 H[4] = H[4] + e >>> 0;
1972 }
1973
1974 return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff];
1975}
1976
1977var _default = sha1;
1978exports.default = _default;
1979},{}],17:[function(_dereq_,module,exports){
1980"use strict";
1981
1982Object.defineProperty(exports, "__esModule", {
1983 value: true
1984});
1985exports.default = void 0;
1986
1987var _validate = _interopRequireDefault(_dereq_(23));
1988
1989function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
1990
1991/**
1992 * Convert array of 16 byte values to UUID string format of the form:
1993 * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
1994 */
1995const byteToHex = [];
1996
1997for (let i = 0; i < 256; ++i) {
1998 byteToHex.push((i + 0x100).toString(16).substr(1));
1999}
2000
2001function stringify(arr, offset = 0) {
2002 // Note: Be careful editing this code! It's been tuned for performance
2003 // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
2004 const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one
2005 // of the following:
2006 // - One or more input array values don't map to a hex octet (leading to
2007 // "undefined" in the uuid)
2008 // - Invalid input values for the RFC `version` or `variant` fields
2009
2010 if (!(0, _validate.default)(uuid)) {
2011 throw TypeError('Stringified UUID is invalid');
2012 }
2013
2014 return uuid;
2015}
2016
2017var _default = stringify;
2018exports.default = _default;
2019},{"23":23}],18:[function(_dereq_,module,exports){
2020"use strict";
2021
2022Object.defineProperty(exports, "__esModule", {
2023 value: true
2024});
2025exports.default = void 0;
2026
2027var _rng = _interopRequireDefault(_dereq_(15));
2028
2029var _stringify = _interopRequireDefault(_dereq_(17));
2030
2031function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2032
2033// **`v1()` - Generate time-based UUID**
2034//
2035// Inspired by https://github.com/LiosK/UUID.js
2036// and http://docs.python.org/library/uuid.html
2037let _nodeId;
2038
2039let _clockseq; // Previous uuid creation time
2040
2041
2042let _lastMSecs = 0;
2043let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details
2044
2045function v1(options, buf, offset) {
2046 let i = buf && offset || 0;
2047 const b = buf || new Array(16);
2048 options = options || {};
2049 let node = options.node || _nodeId;
2050 let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not
2051 // specified. We do this lazily to minimize issues related to insufficient
2052 // system entropy. See #189
2053
2054 if (node == null || clockseq == null) {
2055 const seedBytes = options.random || (options.rng || _rng.default)();
2056
2057 if (node == null) {
2058 // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
2059 node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];
2060 }
2061
2062 if (clockseq == null) {
2063 // Per 4.2.2, randomize (14 bit) clockseq
2064 clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
2065 }
2066 } // UUID timestamps are 100 nano-second units since the Gregorian epoch,
2067 // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
2068 // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
2069 // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
2070
2071
2072 let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock
2073 // cycle to simulate higher resolution clock
2074
2075 let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)
2076
2077 const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression
2078
2079 if (dt < 0 && options.clockseq === undefined) {
2080 clockseq = clockseq + 1 & 0x3fff;
2081 } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
2082 // time interval
2083
2084
2085 if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
2086 nsecs = 0;
2087 } // Per 4.2.1.2 Throw error if too many uuids are requested
2088
2089
2090 if (nsecs >= 10000) {
2091 throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");
2092 }
2093
2094 _lastMSecs = msecs;
2095 _lastNSecs = nsecs;
2096 _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
2097
2098 msecs += 12219292800000; // `time_low`
2099
2100 const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
2101 b[i++] = tl >>> 24 & 0xff;
2102 b[i++] = tl >>> 16 & 0xff;
2103 b[i++] = tl >>> 8 & 0xff;
2104 b[i++] = tl & 0xff; // `time_mid`
2105
2106 const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;
2107 b[i++] = tmh >>> 8 & 0xff;
2108 b[i++] = tmh & 0xff; // `time_high_and_version`
2109
2110 b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
2111
2112 b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
2113
2114 b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`
2115
2116 b[i++] = clockseq & 0xff; // `node`
2117
2118 for (let n = 0; n < 6; ++n) {
2119 b[i + n] = node[n];
2120 }
2121
2122 return buf || (0, _stringify.default)(b);
2123}
2124
2125var _default = v1;
2126exports.default = _default;
2127},{"15":15,"17":17}],19:[function(_dereq_,module,exports){
2128"use strict";
2129
2130Object.defineProperty(exports, "__esModule", {
2131 value: true
2132});
2133exports.default = void 0;
2134
2135var _v = _interopRequireDefault(_dereq_(20));
2136
2137var _md = _interopRequireDefault(_dereq_(11));
2138
2139function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2140
2141const v3 = (0, _v.default)('v3', 0x30, _md.default);
2142var _default = v3;
2143exports.default = _default;
2144},{"11":11,"20":20}],20:[function(_dereq_,module,exports){
2145"use strict";
2146
2147Object.defineProperty(exports, "__esModule", {
2148 value: true
2149});
2150exports.default = _default;
2151exports.URL = exports.DNS = void 0;
2152
2153var _stringify = _interopRequireDefault(_dereq_(17));
2154
2155var _parse = _interopRequireDefault(_dereq_(13));
2156
2157function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2158
2159function stringToBytes(str) {
2160 str = unescape(encodeURIComponent(str)); // UTF8 escape
2161
2162 const bytes = [];
2163
2164 for (let i = 0; i < str.length; ++i) {
2165 bytes.push(str.charCodeAt(i));
2166 }
2167
2168 return bytes;
2169}
2170
2171const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
2172exports.DNS = DNS;
2173const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';
2174exports.URL = URL;
2175
2176function _default(name, version, hashfunc) {
2177 function generateUUID(value, namespace, buf, offset) {
2178 if (typeof value === 'string') {
2179 value = stringToBytes(value);
2180 }
2181
2182 if (typeof namespace === 'string') {
2183 namespace = (0, _parse.default)(namespace);
2184 }
2185
2186 if (namespace.length !== 16) {
2187 throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');
2188 } // Compute hash of namespace and value, Per 4.3
2189 // Future: Use spread syntax when supported on all platforms, e.g. `bytes =
2190 // hashfunc([...namespace, ... value])`
2191
2192
2193 let bytes = new Uint8Array(16 + value.length);
2194 bytes.set(namespace);
2195 bytes.set(value, namespace.length);
2196 bytes = hashfunc(bytes);
2197 bytes[6] = bytes[6] & 0x0f | version;
2198 bytes[8] = bytes[8] & 0x3f | 0x80;
2199
2200 if (buf) {
2201 offset = offset || 0;
2202
2203 for (let i = 0; i < 16; ++i) {
2204 buf[offset + i] = bytes[i];
2205 }
2206
2207 return buf;
2208 }
2209
2210 return (0, _stringify.default)(bytes);
2211 } // Function#name is not settable on some platforms (#270)
2212
2213
2214 try {
2215 generateUUID.name = name; // eslint-disable-next-line no-empty
2216 } catch (err) {} // For CommonJS default export support
2217
2218
2219 generateUUID.DNS = DNS;
2220 generateUUID.URL = URL;
2221 return generateUUID;
2222}
2223},{"13":13,"17":17}],21:[function(_dereq_,module,exports){
2224"use strict";
2225
2226Object.defineProperty(exports, "__esModule", {
2227 value: true
2228});
2229exports.default = void 0;
2230
2231var _rng = _interopRequireDefault(_dereq_(15));
2232
2233var _stringify = _interopRequireDefault(_dereq_(17));
2234
2235function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2236
2237function v4(options, buf, offset) {
2238 options = options || {};
2239
2240 const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
2241
2242
2243 rnds[6] = rnds[6] & 0x0f | 0x40;
2244 rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
2245
2246 if (buf) {
2247 offset = offset || 0;
2248
2249 for (let i = 0; i < 16; ++i) {
2250 buf[offset + i] = rnds[i];
2251 }
2252
2253 return buf;
2254 }
2255
2256 return (0, _stringify.default)(rnds);
2257}
2258
2259var _default = v4;
2260exports.default = _default;
2261},{"15":15,"17":17}],22:[function(_dereq_,module,exports){
2262"use strict";
2263
2264Object.defineProperty(exports, "__esModule", {
2265 value: true
2266});
2267exports.default = void 0;
2268
2269var _v = _interopRequireDefault(_dereq_(20));
2270
2271var _sha = _interopRequireDefault(_dereq_(16));
2272
2273function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2274
2275const v5 = (0, _v.default)('v5', 0x50, _sha.default);
2276var _default = v5;
2277exports.default = _default;
2278},{"16":16,"20":20}],23:[function(_dereq_,module,exports){
2279"use strict";
2280
2281Object.defineProperty(exports, "__esModule", {
2282 value: true
2283});
2284exports.default = void 0;
2285
2286var _regex = _interopRequireDefault(_dereq_(14));
2287
2288function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2289
2290function validate(uuid) {
2291 return typeof uuid === 'string' && _regex.default.test(uuid);
2292}
2293
2294var _default = validate;
2295exports.default = _default;
2296},{"14":14}],24:[function(_dereq_,module,exports){
2297"use strict";
2298
2299Object.defineProperty(exports, "__esModule", {
2300 value: true
2301});
2302exports.default = void 0;
2303
2304var _validate = _interopRequireDefault(_dereq_(23));
2305
2306function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2307
2308function version(uuid) {
2309 if (!(0, _validate.default)(uuid)) {
2310 throw TypeError('Invalid UUID');
2311 }
2312
2313 return parseInt(uuid.substr(14, 1), 16);
2314}
2315
2316var _default = version;
2317exports.default = _default;
2318},{"23":23}],25:[function(_dereq_,module,exports){
2319'use strict';
2320
2321function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
2322
2323var immediate = _interopDefault(_dereq_(3));
2324var EventEmitter = _interopDefault(_dereq_(2));
2325var uuid = _dereq_(10);
2326var Md5 = _interopDefault(_dereq_(9));
2327
2328function isBinaryObject(object) {
2329 return (typeof ArrayBuffer !== 'undefined' && object instanceof ArrayBuffer) ||
2330 (typeof Blob !== 'undefined' && object instanceof Blob);
2331}
2332
2333function cloneArrayBuffer(buff) {
2334 if (typeof buff.slice === 'function') {
2335 return buff.slice(0);
2336 }
2337 // IE10-11 slice() polyfill
2338 var target = new ArrayBuffer(buff.byteLength);
2339 var targetArray = new Uint8Array(target);
2340 var sourceArray = new Uint8Array(buff);
2341 targetArray.set(sourceArray);
2342 return target;
2343}
2344
2345function cloneBinaryObject(object) {
2346 if (object instanceof ArrayBuffer) {
2347 return cloneArrayBuffer(object);
2348 }
2349 var size = object.size;
2350 var type = object.type;
2351 // Blob
2352 if (typeof object.slice === 'function') {
2353 return object.slice(0, size, type);
2354 }
2355 // PhantomJS slice() replacement
2356 return object.webkitSlice(0, size, type);
2357}
2358
2359// most of this is borrowed from lodash.isPlainObject:
2360// https://github.com/fis-components/lodash.isplainobject/
2361// blob/29c358140a74f252aeb08c9eb28bef86f2217d4a/index.js
2362
2363var funcToString = Function.prototype.toString;
2364var objectCtorString = funcToString.call(Object);
2365
2366function isPlainObject(value) {
2367 var proto = Object.getPrototypeOf(value);
2368 /* istanbul ignore if */
2369 if (proto === null) { // not sure when this happens, but I guess it can
2370 return true;
2371 }
2372 var Ctor = proto.constructor;
2373 return (typeof Ctor == 'function' &&
2374 Ctor instanceof Ctor && funcToString.call(Ctor) == objectCtorString);
2375}
2376
2377function clone(object) {
2378 var newObject;
2379 var i;
2380 var len;
2381
2382 if (!object || typeof object !== 'object') {
2383 return object;
2384 }
2385
2386 if (Array.isArray(object)) {
2387 newObject = [];
2388 for (i = 0, len = object.length; i < len; i++) {
2389 newObject[i] = clone(object[i]);
2390 }
2391 return newObject;
2392 }
2393
2394 // special case: to avoid inconsistencies between IndexedDB
2395 // and other backends, we automatically stringify Dates
2396 if (object instanceof Date && isFinite(object)) {
2397 return object.toISOString();
2398 }
2399
2400 if (isBinaryObject(object)) {
2401 return cloneBinaryObject(object);
2402 }
2403
2404 if (!isPlainObject(object)) {
2405 return object; // don't clone objects like Workers
2406 }
2407
2408 newObject = {};
2409 for (i in object) {
2410 /* istanbul ignore else */
2411 if (Object.prototype.hasOwnProperty.call(object, i)) {
2412 var value = clone(object[i]);
2413 if (typeof value !== 'undefined') {
2414 newObject[i] = value;
2415 }
2416 }
2417 }
2418 return newObject;
2419}
2420
2421function mangle(key) {
2422 return '$' + key;
2423}
2424function unmangle(key) {
2425 return key.substring(1);
2426}
2427function Map$1() {
2428 this._store = {};
2429}
2430Map$1.prototype.get = function (key) {
2431 var mangled = mangle(key);
2432 return this._store[mangled];
2433};
2434Map$1.prototype.set = function (key, value) {
2435 var mangled = mangle(key);
2436 this._store[mangled] = value;
2437 return true;
2438};
2439Map$1.prototype.has = function (key) {
2440 var mangled = mangle(key);
2441 return mangled in this._store;
2442};
2443Map$1.prototype.keys = function () {
2444 return Object.keys(this._store).map(k => unmangle(k));
2445};
2446Map$1.prototype["delete"] = function (key) {
2447 var mangled = mangle(key);
2448 var res = mangled in this._store;
2449 delete this._store[mangled];
2450 return res;
2451};
2452Map$1.prototype.forEach = function (cb) {
2453 var keys = Object.keys(this._store);
2454 for (var i = 0, len = keys.length; i < len; i++) {
2455 var key = keys[i];
2456 var value = this._store[key];
2457 key = unmangle(key);
2458 cb(value, key);
2459 }
2460};
2461Object.defineProperty(Map$1.prototype, 'size', {
2462 get: function () {
2463 return Object.keys(this._store).length;
2464 }
2465});
2466
2467function Set$1(array) {
2468 this._store = new Map$1();
2469
2470 // init with an array
2471 if (array && Array.isArray(array)) {
2472 for (var i = 0, len = array.length; i < len; i++) {
2473 this.add(array[i]);
2474 }
2475 }
2476}
2477Set$1.prototype.add = function (key) {
2478 return this._store.set(key, true);
2479};
2480Set$1.prototype.has = function (key) {
2481 return this._store.has(key);
2482};
2483Set$1.prototype.forEach = function (cb) {
2484 this._store.forEach(function (value, key) {
2485 cb(key);
2486 });
2487};
2488Object.defineProperty(Set$1.prototype, 'size', {
2489 get: function () {
2490 return this._store.size;
2491 }
2492});
2493
2494// Based on https://kangax.github.io/compat-table/es6/ we can sniff out
2495
2496// based on https://github.com/montagejs/collections
2497
2498// like underscore/lodash _.pick()
2499function pick(obj, arr) {
2500 var res = {};
2501 for (var i = 0, len = arr.length; i < len; i++) {
2502 var prop = arr[i];
2503 if (prop in obj) {
2504 res[prop] = obj[prop];
2505 }
2506 }
2507 return res;
2508}
2509
2510var hasLocal;
2511
2512try {
2513 localStorage.setItem('_pouch_check_localstorage', 1);
2514 hasLocal = !!localStorage.getItem('_pouch_check_localstorage');
2515} catch (e) {
2516 hasLocal = false;
2517}
2518
2519function hasLocalStorage() {
2520 return hasLocal;
2521}
2522
2523// Custom nextTick() shim for browsers. In node, this will just be process.nextTick(). We
2524
2525class Changes extends EventEmitter {
2526 constructor() {
2527 super();
2528
2529 this._listeners = {};
2530
2531 if (hasLocalStorage()) {
2532 addEventListener("storage", (e) => {
2533 this.emit(e.key);
2534 });
2535 }
2536 }
2537
2538 addListener(dbName, id, db, opts) {
2539 if (this._listeners[id]) {
2540 return;
2541 }
2542 var inprogress = false;
2543 var self = this;
2544 function eventFunction() {
2545 if (!self._listeners[id]) {
2546 return;
2547 }
2548 if (inprogress) {
2549 inprogress = 'waiting';
2550 return;
2551 }
2552 inprogress = true;
2553 var changesOpts = pick(opts, [
2554 'style', 'include_docs', 'attachments', 'conflicts', 'filter',
2555 'doc_ids', 'view', 'since', 'query_params', 'binary', 'return_docs'
2556 ]);
2557
2558 function onError() {
2559 inprogress = false;
2560 }
2561
2562 db.changes(changesOpts).on('change', function (c) {
2563 if (c.seq > opts.since && !opts.cancelled) {
2564 opts.since = c.seq;
2565 opts.onChange(c);
2566 }
2567 }).on('complete', function () {
2568 if (inprogress === 'waiting') {
2569 immediate(eventFunction);
2570 }
2571 inprogress = false;
2572 }).on('error', onError);
2573 }
2574 this._listeners[id] = eventFunction;
2575 this.on(dbName, eventFunction);
2576 }
2577
2578 removeListener(dbName, id) {
2579 if (!(id in this._listeners)) {
2580 return;
2581 }
2582 super.removeListener(dbName, this._listeners[id]);
2583 delete this._listeners[id];
2584 }
2585
2586 notifyLocalWindows(dbName) {
2587 //do a useless change on a storage thing
2588 //in order to get other windows's listeners to activate
2589 if (hasLocalStorage()) {
2590 localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a";
2591 }
2592 }
2593
2594 notify(dbName) {
2595 this.emit(dbName);
2596 this.notifyLocalWindows(dbName);
2597 }
2598}
2599
2600function guardedConsole(method) {
2601 /* istanbul ignore else */
2602 if (typeof console !== 'undefined' && typeof console[method] === 'function') {
2603 var args = Array.prototype.slice.call(arguments, 1);
2604 console[method].apply(console, args);
2605 }
2606}
2607
2608var assign;
2609{
2610 if (typeof Object.assign === 'function') {
2611 assign = Object.assign;
2612 } else {
2613 // lite Object.assign polyfill based on
2614 // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/assign
2615 assign = function (target) {
2616 var to = Object(target);
2617
2618 for (var index = 1; index < arguments.length; index++) {
2619 var nextSource = arguments[index];
2620
2621 if (nextSource != null) { // Skip over if undefined or null
2622 for (var nextKey in nextSource) {
2623 // Avoid bugs when hasOwnProperty is shadowed
2624 if (Object.prototype.hasOwnProperty.call(nextSource, nextKey)) {
2625 to[nextKey] = nextSource[nextKey];
2626 }
2627 }
2628 }
2629 }
2630 return to;
2631 };
2632 }
2633}
2634
2635var $inject_Object_assign = assign;
2636
2637class PouchError extends Error {
2638 constructor(status, error, reason) {
2639 super();
2640 this.status = status;
2641 this.name = error;
2642 this.message = reason;
2643 this.error = true;
2644 }
2645
2646 toString() {
2647 return JSON.stringify({
2648 status: this.status,
2649 name: this.name,
2650 message: this.message,
2651 reason: this.reason
2652 });
2653 }
2654}
2655
2656var UNAUTHORIZED = new PouchError(401, 'unauthorized', "Name or password is incorrect.");
2657var MISSING_BULK_DOCS = new PouchError(400, 'bad_request', "Missing JSON list of 'docs'");
2658var MISSING_DOC = new PouchError(404, 'not_found', 'missing');
2659var REV_CONFLICT = new PouchError(409, 'conflict', 'Document update conflict');
2660var INVALID_ID = new PouchError(400, 'bad_request', '_id field must contain a string');
2661var MISSING_ID = new PouchError(412, 'missing_id', '_id is required for puts');
2662var RESERVED_ID = new PouchError(400, 'bad_request', 'Only reserved document ids may start with underscore.');
2663var NOT_OPEN = new PouchError(412, 'precondition_failed', 'Database not open');
2664var UNKNOWN_ERROR = new PouchError(500, 'unknown_error', 'Database encountered an unknown error');
2665var BAD_ARG = new PouchError(500, 'badarg', 'Some query argument is invalid');
2666var INVALID_REQUEST = new PouchError(400, 'invalid_request', 'Request was invalid');
2667var QUERY_PARSE_ERROR = new PouchError(400, 'query_parse_error', 'Some query parameter is invalid');
2668var DOC_VALIDATION = new PouchError(500, 'doc_validation', 'Bad special document member');
2669var BAD_REQUEST = new PouchError(400, 'bad_request', 'Something wrong with the request');
2670var NOT_AN_OBJECT = new PouchError(400, 'bad_request', 'Document must be a JSON object');
2671var DB_MISSING = new PouchError(404, 'not_found', 'Database not found');
2672var IDB_ERROR = new PouchError(500, 'indexed_db_went_bad', 'unknown');
2673var WSQ_ERROR = new PouchError(500, 'web_sql_went_bad', 'unknown');
2674var LDB_ERROR = new PouchError(500, 'levelDB_went_went_bad', 'unknown');
2675var FORBIDDEN = new PouchError(403, 'forbidden', 'Forbidden by design doc validate_doc_update function');
2676var INVALID_REV = new PouchError(400, 'bad_request', 'Invalid rev format');
2677var FILE_EXISTS = new PouchError(412, 'file_exists', 'The database could not be created, the file already exists.');
2678var MISSING_STUB = new PouchError(412, 'missing_stub', 'A pre-existing attachment stub wasn\'t found');
2679var INVALID_URL = new PouchError(413, 'invalid_url', 'Provided URL is invalid');
2680
2681function createError(error, reason) {
2682 function CustomPouchError(reason) {
2683 // inherit error properties from our parent error manually
2684 // so as to allow proper JSON parsing.
2685 /* jshint ignore:start */
2686 var names = Object.getOwnPropertyNames(error);
2687 for (var i = 0, len = names.length; i < len; i++) {
2688 if (typeof error[names[i]] !== 'function') {
2689 this[names[i]] = error[names[i]];
2690 }
2691 }
2692
2693 if (this.stack === undefined) {
2694 this.stack = (new Error()).stack;
2695 }
2696
2697 /* jshint ignore:end */
2698 if (reason !== undefined) {
2699 this.reason = reason;
2700 }
2701 }
2702 CustomPouchError.prototype = PouchError.prototype;
2703 return new CustomPouchError(reason);
2704}
2705
2706function tryFilter(filter, doc, req) {
2707 try {
2708 return !filter(doc, req);
2709 } catch (err) {
2710 var msg = 'Filter function threw: ' + err.toString();
2711 return createError(BAD_REQUEST, msg);
2712 }
2713}
2714
2715function filterChange(opts) {
2716 var req = {};
2717 var hasFilter = opts.filter && typeof opts.filter === 'function';
2718 req.query = opts.query_params;
2719
2720 return function filter(change) {
2721 if (!change.doc) {
2722 // CSG sends events on the changes feed that don't have documents,
2723 // this hack makes a whole lot of existing code robust.
2724 change.doc = {};
2725 }
2726
2727 var filterReturn = hasFilter && tryFilter(opts.filter, change.doc, req);
2728
2729 if (typeof filterReturn === 'object') {
2730 return filterReturn;
2731 }
2732
2733 if (filterReturn) {
2734 return false;
2735 }
2736
2737 if (!opts.include_docs) {
2738 delete change.doc;
2739 } else if (!opts.attachments) {
2740 for (var att in change.doc._attachments) {
2741 /* istanbul ignore else */
2742 if (Object.prototype.hasOwnProperty.call(change.doc._attachments, att)) {
2743 change.doc._attachments[att].stub = true;
2744 }
2745 }
2746 }
2747 return true;
2748 };
2749}
2750
2751// shim for Function.prototype.name,
2752
2753// Determine id an ID is valid
2754// - invalid IDs begin with an underescore that does not begin '_design' or
2755// '_local'
2756// - any other string value is a valid id
2757// Returns the specific error object for each case
2758function invalidIdError(id) {
2759 var err;
2760 if (!id) {
2761 err = createError(MISSING_ID);
2762 } else if (typeof id !== 'string') {
2763 err = createError(INVALID_ID);
2764 } else if (/^_/.test(id) && !(/^_(design|local)/).test(id)) {
2765 err = createError(RESERVED_ID);
2766 }
2767 if (err) {
2768 throw err;
2769 }
2770}
2771
2772// Checks if a PouchDB object is "remote" or not. This is
2773
2774// originally parseUri 1.2.2, now patched by us
2775
2776// Based on https://github.com/alexdavid/scope-eval v0.0.3
2777
2778var thisBtoa = function (str) {
2779 return btoa(str);
2780};
2781
2782// Abstracts constructing a Blob object, so it also works in older
2783// browsers that don't support the native Blob constructor (e.g.
2784// old QtWebKit versions, Android < 4.4).
2785function createBlob(parts, properties) {
2786 /* global BlobBuilder,MSBlobBuilder,MozBlobBuilder,WebKitBlobBuilder */
2787 parts = parts || [];
2788 properties = properties || {};
2789 try {
2790 return new Blob(parts, properties);
2791 } catch (e) {
2792 if (e.name !== "TypeError") {
2793 throw e;
2794 }
2795 var Builder = typeof BlobBuilder !== 'undefined' ? BlobBuilder :
2796 typeof MSBlobBuilder !== 'undefined' ? MSBlobBuilder :
2797 typeof MozBlobBuilder !== 'undefined' ? MozBlobBuilder :
2798 WebKitBlobBuilder;
2799 var builder = new Builder();
2800 for (var i = 0; i < parts.length; i += 1) {
2801 builder.append(parts[i]);
2802 }
2803 return builder.getBlob(properties.type);
2804 }
2805}
2806
2807// From http://stackoverflow.com/questions/14967647/ (continues on next line)
2808// encode-decode-image-with-base64-breaks-image (2013-04-21)
2809function binaryStringToArrayBuffer(bin) {
2810 var length = bin.length;
2811 var buf = new ArrayBuffer(length);
2812 var arr = new Uint8Array(buf);
2813 for (var i = 0; i < length; i++) {
2814 arr[i] = bin.charCodeAt(i);
2815 }
2816 return buf;
2817}
2818
2819function binStringToBluffer(binString, type) {
2820 return createBlob([binaryStringToArrayBuffer(binString)], {type: type});
2821}
2822
2823//Can't find original post, but this is close
2824//http://stackoverflow.com/questions/6965107/ (continues on next line)
2825//converting-between-strings-and-arraybuffers
2826function arrayBufferToBinaryString(buffer) {
2827 var binary = '';
2828 var bytes = new Uint8Array(buffer);
2829 var length = bytes.byteLength;
2830 for (var i = 0; i < length; i++) {
2831 binary += String.fromCharCode(bytes[i]);
2832 }
2833 return binary;
2834}
2835
2836// shim for browsers that don't support it
2837function readAsBinaryString(blob, callback) {
2838 var reader = new FileReader();
2839 var hasBinaryString = typeof reader.readAsBinaryString === 'function';
2840 reader.onloadend = function (e) {
2841 var result = e.target.result || '';
2842 if (hasBinaryString) {
2843 return callback(result);
2844 }
2845 callback(arrayBufferToBinaryString(result));
2846 };
2847 if (hasBinaryString) {
2848 reader.readAsBinaryString(blob);
2849 } else {
2850 reader.readAsArrayBuffer(blob);
2851 }
2852}
2853
2854// simplified API. universal browser support is assumed
2855function readAsArrayBuffer(blob, callback) {
2856 var reader = new FileReader();
2857 reader.onloadend = function (e) {
2858 var result = e.target.result || new ArrayBuffer(0);
2859 callback(result);
2860 };
2861 reader.readAsArrayBuffer(blob);
2862}
2863
2864// this is not used in the browser
2865
2866var setImmediateShim = self.setImmediate || self.setTimeout;
2867var MD5_CHUNK_SIZE = 32768;
2868
2869function rawToBase64(raw) {
2870 return thisBtoa(raw);
2871}
2872
2873function sliceBlob(blob, start, end) {
2874 if (blob.webkitSlice) {
2875 return blob.webkitSlice(start, end);
2876 }
2877 return blob.slice(start, end);
2878}
2879
2880function appendBlob(buffer, blob, start, end, callback) {
2881 if (start > 0 || end < blob.size) {
2882 // only slice blob if we really need to
2883 blob = sliceBlob(blob, start, end);
2884 }
2885 readAsArrayBuffer(blob, function (arrayBuffer) {
2886 buffer.append(arrayBuffer);
2887 callback();
2888 });
2889}
2890
2891function appendString(buffer, string, start, end, callback) {
2892 if (start > 0 || end < string.length) {
2893 // only create a substring if we really need to
2894 string = string.substring(start, end);
2895 }
2896 buffer.appendBinary(string);
2897 callback();
2898}
2899
2900function binaryMd5(data, callback) {
2901 var inputIsString = typeof data === 'string';
2902 var len = inputIsString ? data.length : data.size;
2903 var chunkSize = Math.min(MD5_CHUNK_SIZE, len);
2904 var chunks = Math.ceil(len / chunkSize);
2905 var currentChunk = 0;
2906 var buffer = inputIsString ? new Md5() : new Md5.ArrayBuffer();
2907
2908 var append = inputIsString ? appendString : appendBlob;
2909
2910 function next() {
2911 setImmediateShim(loadNextChunk);
2912 }
2913
2914 function done() {
2915 var raw = buffer.end(true);
2916 var base64 = rawToBase64(raw);
2917 callback(base64);
2918 buffer.destroy();
2919 }
2920
2921 function loadNextChunk() {
2922 var start = currentChunk * chunkSize;
2923 var end = start + chunkSize;
2924 currentChunk++;
2925 if (currentChunk < chunks) {
2926 append(buffer, data, start, end, next);
2927 } else {
2928 append(buffer, data, start, end, done);
2929 }
2930 }
2931 loadNextChunk();
2932}
2933
2934function stringMd5(string) {
2935 return Md5.hash(string);
2936}
2937
2938/**
2939 * Creates a new revision string that does NOT include the revision height
2940 * For example '56649f1b0506c6ca9fda0746eb0cacdf'
2941 */
2942function rev$$1(doc, deterministic_revs) {
2943 if (!deterministic_revs) {
2944 return uuid.v4().replace(/-/g, '').toLowerCase();
2945 }
2946
2947 var mutateableDoc = $inject_Object_assign({}, doc);
2948 delete mutateableDoc._rev_tree;
2949 return stringMd5(JSON.stringify(mutateableDoc));
2950}
2951
2952var uuid$1 = uuid.v4; // mimic old import, only v4 is ever used elsewhere
2953
2954var IDB_NULL = Number.MIN_SAFE_INTEGER;
2955var IDB_FALSE = Number.MIN_SAFE_INTEGER + 1;
2956var IDB_TRUE = Number.MIN_SAFE_INTEGER + 2;
2957
2958// These are the same as bellow but without the global flag
2959// we want to use RegExp.test because it's really fast, but the global flag
2960// makes the regex const stateful (seriously) as it walked through all instances
2961var TEST_KEY_INVALID = /^[^a-zA-Z_$]|[^a-zA-Z0-9_$]+/;
2962var TEST_PATH_INVALID = /\\.|(^|\.)[^a-zA-Z_$]|[^a-zA-Z0-9_$.]+/;
2963function needsSanitise(name, isPath) {
2964 if (isPath) {
2965 return TEST_PATH_INVALID.test(name);
2966 } else {
2967 return TEST_KEY_INVALID.test(name);
2968 }
2969}
2970
2971//
2972// IndexedDB only allows valid JS names in its index paths, whereas JSON allows
2973// for any string at all. This converts invalid JS names to valid ones, to allow
2974// for them to be indexed.
2975//
2976// For example, "foo-bar" is a valid JSON key, but cannot be a valid JS name
2977// (because that would be read as foo minus bar).
2978//
2979// Very high level rules for valid JS names are:
2980// - First character cannot start with a number
2981// - Otherwise all characters must be be a-z, A-Z, 0-9, $ or _.
2982// - We allow . unless the name represents a single field, as that represents
2983// a deep index path.
2984//
2985// This is more aggressive than it needs to be, but also simpler.
2986//
2987var KEY_INVALID = new RegExp(TEST_KEY_INVALID.source, 'g');
2988var PATH_INVALID = new RegExp(TEST_PATH_INVALID.source, 'g');
2989var SLASH = '\\'.charCodeAt(0);
2990const IS_DOT = '.'.charCodeAt(0);
2991
2992function sanitise(name, isPath) {
2993 var correctCharacters = function (match) {
2994 var good = '';
2995 for (var i = 0; i < match.length; i++) {
2996 var code = match.charCodeAt(i);
2997 // If you're sanitising a path, a slash character is there to be interpreted
2998 // by whatever parses the path later as "escape the next thing".
2999 //
3000 // e.g., if you want to index THIS string:
3001 // {"foo": {"bar.baz": "THIS"}}
3002 // Your index path would be "foo.bar\.baz".
3003
3004 if (code === IS_DOT && isPath && i === 0) {
3005 good += '.';
3006 } else if (code === SLASH && isPath) {
3007 continue;
3008 } else {
3009 good += '_c' + code + '_';
3010 }
3011 }
3012 return good;
3013 };
3014
3015 if (isPath) {
3016 return name.replace(PATH_INVALID, correctCharacters);
3017 } else {
3018 return name.replace(KEY_INVALID, correctCharacters);
3019 }
3020}
3021
3022function needsRewrite(data) {
3023 for (var key of Object.keys(data)) {
3024 if (needsSanitise(key)) {
3025 return true;
3026 } else if (data[key] === null || typeof data[key] === 'boolean') {
3027 return true;
3028 } else if (typeof data[key] === 'object') {
3029 return needsRewrite(data[key]);
3030 }
3031 }
3032}
3033
3034function rewrite(data) {
3035 if (!needsRewrite(data)) {
3036 return false;
3037 }
3038
3039 var isArray = Array.isArray(data);
3040 var clone = isArray
3041 ? []
3042 : {};
3043
3044 Object.keys(data).forEach(function (key) {
3045 var safeKey = isArray ? key : sanitise(key);
3046
3047 if (data[key] === null) {
3048 clone[safeKey] = IDB_NULL;
3049 } else if (typeof data[key] === 'boolean') {
3050 clone[safeKey] = data[key] ? IDB_TRUE : IDB_FALSE;
3051 } else if (typeof data[key] === 'object') {
3052 clone[safeKey] = rewrite(data[key]);
3053 } else {
3054 clone[safeKey] = data[key];
3055 }
3056 });
3057
3058 return clone;
3059}
3060
3061var DOC_STORE = 'docs';
3062var META_STORE = 'meta';
3063
3064function idbError(callback) {
3065 return function (evt) {
3066 var message = 'unknown_error';
3067 if (evt.target && evt.target.error) {
3068 message = evt.target.error.name || evt.target.error.message;
3069 }
3070 callback(createError(IDB_ERROR, message, evt.type));
3071 };
3072}
3073
3074function processAttachment(name, src, doc, isBinary) {
3075
3076 delete doc._attachments[name].stub;
3077
3078 if (isBinary) {
3079 doc._attachments[name].data =
3080 src.attachments[doc._attachments[name].digest].data;
3081 return Promise.resolve();
3082 }
3083
3084 return new Promise(function (resolve) {
3085 var data = src.attachments[doc._attachments[name].digest].data;
3086 readAsBinaryString(data, function (binString) {
3087 doc._attachments[name].data = thisBtoa(binString);
3088 delete doc._attachments[name].length;
3089 resolve();
3090 });
3091 });
3092}
3093
3094function rawIndexFields(ddoc, viewName) {
3095 // fields are an array of either the string name of the field, or a key value
3096 var fields = ddoc.views[viewName].options &&
3097 ddoc.views[viewName].options.def &&
3098 ddoc.views[viewName].options.def.fields || [];
3099
3100 // Either ['foo'] or [{'foo': 'desc'}]
3101 return fields.map(function (field) {
3102 if (typeof field === 'string') {
3103 return field;
3104 } else {
3105 return Object.keys(field)[0];
3106 }
3107 });
3108}
3109
3110/**
3111 * true if the view is has a "partial_filter_selector".
3112 */
3113function isPartialFilterView(ddoc, viewName) {
3114 return viewName in ddoc.views &&
3115 ddoc.views[viewName].options &&
3116 ddoc.views[viewName].options.def &&
3117 ddoc.views[viewName].options.def.partial_filter_selector;
3118}
3119
3120function naturalIndexName(fields) {
3121 return '_find_idx/' + fields.join('/');
3122}
3123
3124/**
3125 * Convert the fields the user gave us in the view and convert them to work for
3126 * indexeddb.
3127 *
3128 * fields is an array of field strings. A field string could be one field:
3129 * 'foo'
3130 * Or it could be a json path:
3131 * 'foo.bar'
3132 */
3133function correctIndexFields(fields) {
3134 // Every index has to have deleted at the front, because when we do a query
3135 // we need to filter out deleted documents.
3136 return ['deleted'].concat(
3137 fields.map(function (field) {
3138 if (['_id', '_rev', '_deleted', '_attachments'].includes(field)) {
3139 // These properties are stored at the top level without the underscore
3140 return field.substr(1);
3141 } else {
3142 // The custom document fields are inside the `data` property
3143 return 'data.' + sanitise(field, true);
3144 }
3145 })
3146 );
3147}
3148
3149//
3150// Core PouchDB schema version. Increment this if we, as a library, want to make
3151// schema changes in indexeddb. See upgradePouchDbSchema()
3152//
3153var POUCHDB_IDB_VERSION = 1;
3154
3155//
3156// Functions that manage a combinate indexeddb version, by combining the current
3157// time in millis that represents user migrations with a large multiplier that
3158// represents PouchDB system migrations.
3159//
3160// This lets us use the idb version number to both represent
3161// PouchDB-library-level migrations as well as "user migrations" required for
3162// when design documents trigger the addition or removal of native indexes.
3163//
3164// Given that Number.MAX_SAFE_INTEGER = 9007199254740991
3165//
3166// We can easily use the largest 2-3 digits and either allow:
3167// - 900 system migrations up to 2198/02/18
3168// - or 89 system migrations up to 5050/02/14
3169//
3170// This impl does the former. If this code still exists after 2198 someone send my
3171// decendents a Spacebook message congratulating them on their impressive genes.
3172//
3173// 9007199254740991 <- MAX_SAFE_INTEGER
3174// 10000000000000 <- 10^13
3175// 7199254740991 <- 2198-02-18T16:59:00.991Z
3176//
3177var versionMultiplier = Math.pow(10, 13);
3178function createIdbVersion() {
3179 return (versionMultiplier * POUCHDB_IDB_VERSION) + new Date().getTime();
3180}
3181function getPouchDbVersion(version) {
3182 return Math.floor(version / versionMultiplier);
3183}
3184
3185function maintainNativeIndexes(openReq, reject) {
3186 var docStore = openReq.transaction.objectStore(DOC_STORE);
3187 var ddocsReq = docStore.getAll(IDBKeyRange.bound('_design/', '_design/\uffff'));
3188
3189 ddocsReq.onsuccess = function (e) {
3190 var results = e.target.result;
3191 var existingIndexNames = Array.from(docStore.indexNames);
3192
3193 // NB: the only thing we're supporting here is the declared indexing
3194 // fields nothing more.
3195 var expectedIndexes = results.filter(function (row) {
3196 return row.deleted === 0 && row.revs[row.rev].data.views;
3197 }).map(function (row) {
3198 return row.revs[row.rev].data;
3199 }).reduce(function (indexes, ddoc) {
3200 return Object.keys(ddoc.views).reduce(function (acc, viewName) {
3201 var fields = rawIndexFields(ddoc, viewName);
3202
3203 if (fields && fields.length > 0) {
3204 acc[naturalIndexName(fields)] = correctIndexFields(fields);
3205 }
3206
3207 return acc;
3208 }, indexes);
3209 }, {});
3210
3211 var expectedIndexNames = Object.keys(expectedIndexes);
3212
3213 // Delete any indexes that aren't system indexes or expected
3214 var systemIndexNames = ['seq'];
3215 existingIndexNames.forEach(function (index) {
3216 if (systemIndexNames.indexOf(index) === -1 && expectedIndexNames.indexOf(index) === -1) {
3217 docStore.deleteIndex(index);
3218 }
3219 });
3220
3221 // Work out which indexes are missing and create them
3222 var newIndexNames = expectedIndexNames.filter(function (ei) {
3223 return existingIndexNames.indexOf(ei) === -1;
3224 });
3225
3226 try {
3227 newIndexNames.forEach(function (indexName) {
3228 docStore.createIndex(indexName, expectedIndexes[indexName]);
3229 });
3230 } catch (err) {
3231 reject(err);
3232 }
3233 };
3234}
3235
3236function upgradePouchDbSchema(db, pouchdbVersion) {
3237 if (pouchdbVersion < 1) {
3238 var docStore = db.createObjectStore(DOC_STORE, {keyPath : 'id'});
3239 docStore.createIndex('seq', 'seq', {unique: true});
3240
3241 db.createObjectStore(META_STORE, {keyPath: 'id'});
3242 }
3243
3244 // Declare more PouchDB schema changes here
3245 // if (pouchdbVersion < 2) { .. }
3246}
3247
3248function openDatabase(openDatabases, api, opts, resolve, reject) {
3249 var openReq = opts.versionchanged ?
3250 indexedDB.open(opts.name) :
3251 indexedDB.open(opts.name, createIdbVersion());
3252
3253 openReq.onupgradeneeded = function (e) {
3254 if (e.oldVersion > 0 && e.oldVersion < versionMultiplier) {
3255 // This DB was created with the "idb" adapter, **not** this one.
3256 // For now we're going to just error out here: users must manually
3257 // migrate between the two. In the future, dependent on performance tests,
3258 // we might silently migrate
3259 throw new Error('Incorrect adapter: you should specify the "idb" adapter to open this DB');
3260 } else if (e.oldVersion === 0 && e.newVersion < versionMultiplier) {
3261 // Firefox still creates the database with version=1 even if we throw,
3262 // so we need to be sure to destroy the empty database before throwing
3263 indexedDB.deleteDatabase(opts.name);
3264 throw new Error('Database was deleted while open');
3265 }
3266
3267 var db = e.target.result;
3268
3269 var pouchdbVersion = getPouchDbVersion(e.oldVersion);
3270 upgradePouchDbSchema(db, pouchdbVersion);
3271 maintainNativeIndexes(openReq, reject);
3272 };
3273
3274 openReq.onblocked = function (e) {
3275 // AFAICT this only occurs if, after sending `onversionchange` events to
3276 // all other open DBs (ie in different tabs), there are still open
3277 // connections to the DB. In this code we should never see this because we
3278 // close our DBs on these events, and all DB interactions are wrapped in
3279 // safely re-opening the DB.
3280 console.error('onblocked, this should never happen', e);
3281 };
3282
3283 openReq.onsuccess = function (e) {
3284 var idb = e.target.result;
3285
3286 idb.onabort = function (e) {
3287 console.error('Database has a global failure', e.target.error);
3288 delete openDatabases[opts.name];
3289 idb.close();
3290 };
3291
3292 idb.onversionchange = function () {
3293 console.log('Database was made stale, closing handle');
3294 openDatabases[opts.name].versionchanged = true;
3295 idb.close();
3296 };
3297
3298 idb.onclose = function () {
3299 console.log('Database was made stale, closing handle');
3300 if (opts.name in openDatabases) {
3301 openDatabases[opts.name].versionchanged = true;
3302 }
3303 };
3304
3305 var metadata = {id: META_STORE};
3306 var txn = idb.transaction([META_STORE], 'readwrite');
3307
3308 txn.oncomplete = function () {
3309 resolve({idb: idb, metadata: metadata});
3310 };
3311
3312 var metaStore = txn.objectStore(META_STORE);
3313 metaStore.get(META_STORE).onsuccess = function (e) {
3314 metadata = e.target.result || metadata;
3315 var changed = false;
3316
3317 if (!('doc_count' in metadata)) {
3318 changed = true;
3319 metadata.doc_count = 0;
3320 }
3321
3322 if (!('seq' in metadata)) {
3323 changed = true;
3324 metadata.seq = 0;
3325 }
3326
3327 if (!('db_uuid' in metadata)) {
3328 changed = true;
3329 metadata.db_uuid = uuid$1();
3330 }
3331
3332 if (changed) {
3333 metaStore.put(metadata);
3334 }
3335 };
3336 };
3337
3338 openReq.onerror = function (e) {
3339 reject(e.target.error);
3340 };
3341}
3342
3343function setup (openDatabases, api, opts) {
3344 if (!openDatabases[opts.name] || openDatabases[opts.name].versionchanged) {
3345 opts.versionchanged = openDatabases[opts.name] &&
3346 openDatabases[opts.name].versionchanged;
3347
3348 openDatabases[opts.name] = new Promise(function (resolve, reject) {
3349 openDatabase(openDatabases, api, opts, resolve, reject);
3350 });
3351 }
3352
3353 return openDatabases[opts.name];
3354}
3355
3356function info (metadata, callback) {
3357 callback(null, {
3358 doc_count: metadata.doc_count,
3359 update_seq: metadata.seq
3360 });
3361}
3362
3363// We fetch all leafs of the revision tree, and sort them based on tree length
3364// and whether they were deleted, undeleted documents with the longest revision
3365// tree (most edits) win
3366// The final sort algorithm is slightly documented in a sidebar here:
3367// http://guide.couchdb.org/draft/conflicts.html
3368function winningRev(metadata) {
3369 var winningId;
3370 var winningPos;
3371 var winningDeleted;
3372 var toVisit = metadata.rev_tree.slice();
3373 var node;
3374 while ((node = toVisit.pop())) {
3375 var tree = node.ids;
3376 var branches = tree[2];
3377 var pos = node.pos;
3378 if (branches.length) { // non-leaf
3379 for (var i = 0, len = branches.length; i < len; i++) {
3380 toVisit.push({pos: pos + 1, ids: branches[i]});
3381 }
3382 continue;
3383 }
3384 var deleted = !!tree[1].deleted;
3385 var id = tree[0];
3386 // sort by deleted, then pos, then id
3387 if (!winningId || (winningDeleted !== deleted ? winningDeleted :
3388 winningPos !== pos ? winningPos < pos : winningId < id)) {
3389 winningId = id;
3390 winningPos = pos;
3391 winningDeleted = deleted;
3392 }
3393 }
3394
3395 return winningPos + '-' + winningId;
3396}
3397
3398// Pretty much all below can be combined into a higher order function to
3399// traverse revisions
3400// The return value from the callback will be passed as context to all
3401// children of that node
3402function traverseRevTree(revs, callback) {
3403 var toVisit = revs.slice();
3404
3405 var node;
3406 while ((node = toVisit.pop())) {
3407 var pos = node.pos;
3408 var tree = node.ids;
3409 var branches = tree[2];
3410 var newCtx =
3411 callback(branches.length === 0, pos, tree[0], node.ctx, tree[1]);
3412 for (var i = 0, len = branches.length; i < len; i++) {
3413 toVisit.push({pos: pos + 1, ids: branches[i], ctx: newCtx});
3414 }
3415 }
3416}
3417
3418function sortByPos(a, b) {
3419 return a.pos - b.pos;
3420}
3421
3422function collectLeaves(revs) {
3423 var leaves = [];
3424 traverseRevTree(revs, function (isLeaf, pos, id, acc, opts) {
3425 if (isLeaf) {
3426 leaves.push({rev: pos + "-" + id, pos: pos, opts: opts});
3427 }
3428 });
3429 leaves.sort(sortByPos).reverse();
3430 for (var i = 0, len = leaves.length; i < len; i++) {
3431 delete leaves[i].pos;
3432 }
3433 return leaves;
3434}
3435
3436// returns revs of all conflicts that is leaves such that
3437// 1. are not deleted and
3438// 2. are different than winning revision
3439function collectConflicts(metadata) {
3440 var win = winningRev(metadata);
3441 var leaves = collectLeaves(metadata.rev_tree);
3442 var conflicts = [];
3443 for (var i = 0, len = leaves.length; i < len; i++) {
3444 var leaf = leaves[i];
3445 if (leaf.rev !== win && !leaf.opts.deleted) {
3446 conflicts.push(leaf.rev);
3447 }
3448 }
3449 return conflicts;
3450}
3451
3452// compact a tree by marking its non-leafs as missing,
3453// and return a list of revs to delete
3454function compactTree(metadata) {
3455 var revs = [];
3456 traverseRevTree(metadata.rev_tree, function (isLeaf, pos,
3457 revHash, ctx, opts) {
3458 if (opts.status === 'available' && !isLeaf) {
3459 revs.push(pos + '-' + revHash);
3460 opts.status = 'missing';
3461 }
3462 });
3463 return revs;
3464}
3465
3466// `findPathToLeaf()` returns an array of revs that goes from the specified
3467
3468// build up a list of all the paths to the leafs in this revision tree
3469function rootToLeaf(revs) {
3470 var paths = [];
3471 var toVisit = revs.slice();
3472 var node;
3473 while ((node = toVisit.pop())) {
3474 var pos = node.pos;
3475 var tree = node.ids;
3476 var id = tree[0];
3477 var opts = tree[1];
3478 var branches = tree[2];
3479 var isLeaf = branches.length === 0;
3480
3481 var history = node.history ? node.history.slice() : [];
3482 history.push({id: id, opts: opts});
3483 if (isLeaf) {
3484 paths.push({pos: (pos + 1 - history.length), ids: history});
3485 }
3486 for (var i = 0, len = branches.length; i < len; i++) {
3487 toVisit.push({pos: pos + 1, ids: branches[i], history: history});
3488 }
3489 }
3490 return paths.reverse();
3491}
3492
3493// for a better overview of what this is doing, read:
3494
3495function sortByPos$1(a, b) {
3496 return a.pos - b.pos;
3497}
3498
3499// classic binary search
3500function binarySearch(arr, item, comparator) {
3501 var low = 0;
3502 var high = arr.length;
3503 var mid;
3504 while (low < high) {
3505 mid = (low + high) >>> 1;
3506 if (comparator(arr[mid], item) < 0) {
3507 low = mid + 1;
3508 } else {
3509 high = mid;
3510 }
3511 }
3512 return low;
3513}
3514
3515// assuming the arr is sorted, insert the item in the proper place
3516function insertSorted(arr, item, comparator) {
3517 var idx = binarySearch(arr, item, comparator);
3518 arr.splice(idx, 0, item);
3519}
3520
3521// Turn a path as a flat array into a tree with a single branch.
3522// If any should be stemmed from the beginning of the array, that's passed
3523// in as the second argument
3524function pathToTree(path, numStemmed) {
3525 var root;
3526 var leaf;
3527 for (var i = numStemmed, len = path.length; i < len; i++) {
3528 var node = path[i];
3529 var currentLeaf = [node.id, node.opts, []];
3530 if (leaf) {
3531 leaf[2].push(currentLeaf);
3532 leaf = currentLeaf;
3533 } else {
3534 root = leaf = currentLeaf;
3535 }
3536 }
3537 return root;
3538}
3539
3540// compare the IDs of two trees
3541function compareTree(a, b) {
3542 return a[0] < b[0] ? -1 : 1;
3543}
3544
3545// Merge two trees together
3546// The roots of tree1 and tree2 must be the same revision
3547function mergeTree(in_tree1, in_tree2) {
3548 var queue = [{tree1: in_tree1, tree2: in_tree2}];
3549 var conflicts = false;
3550 while (queue.length > 0) {
3551 var item = queue.pop();
3552 var tree1 = item.tree1;
3553 var tree2 = item.tree2;
3554
3555 if (tree1[1].status || tree2[1].status) {
3556 tree1[1].status =
3557 (tree1[1].status === 'available' ||
3558 tree2[1].status === 'available') ? 'available' : 'missing';
3559 }
3560
3561 for (var i = 0; i < tree2[2].length; i++) {
3562 if (!tree1[2][0]) {
3563 conflicts = 'new_leaf';
3564 tree1[2][0] = tree2[2][i];
3565 continue;
3566 }
3567
3568 var merged = false;
3569 for (var j = 0; j < tree1[2].length; j++) {
3570 if (tree1[2][j][0] === tree2[2][i][0]) {
3571 queue.push({tree1: tree1[2][j], tree2: tree2[2][i]});
3572 merged = true;
3573 }
3574 }
3575 if (!merged) {
3576 conflicts = 'new_branch';
3577 insertSorted(tree1[2], tree2[2][i], compareTree);
3578 }
3579 }
3580 }
3581 return {conflicts: conflicts, tree: in_tree1};
3582}
3583
3584function doMerge(tree, path, dontExpand) {
3585 var restree = [];
3586 var conflicts = false;
3587 var merged = false;
3588 var res;
3589
3590 if (!tree.length) {
3591 return {tree: [path], conflicts: 'new_leaf'};
3592 }
3593
3594 for (var i = 0, len = tree.length; i < len; i++) {
3595 var branch = tree[i];
3596 if (branch.pos === path.pos && branch.ids[0] === path.ids[0]) {
3597 // Paths start at the same position and have the same root, so they need
3598 // merged
3599 res = mergeTree(branch.ids, path.ids);
3600 restree.push({pos: branch.pos, ids: res.tree});
3601 conflicts = conflicts || res.conflicts;
3602 merged = true;
3603 } else if (dontExpand !== true) {
3604 // The paths start at a different position, take the earliest path and
3605 // traverse up until it as at the same point from root as the path we
3606 // want to merge. If the keys match we return the longer path with the
3607 // other merged After stemming we dont want to expand the trees
3608
3609 var t1 = branch.pos < path.pos ? branch : path;
3610 var t2 = branch.pos < path.pos ? path : branch;
3611 var diff = t2.pos - t1.pos;
3612
3613 var candidateParents = [];
3614
3615 var trees = [];
3616 trees.push({ids: t1.ids, diff: diff, parent: null, parentIdx: null});
3617 while (trees.length > 0) {
3618 var item = trees.pop();
3619 if (item.diff === 0) {
3620 if (item.ids[0] === t2.ids[0]) {
3621 candidateParents.push(item);
3622 }
3623 continue;
3624 }
3625 var elements = item.ids[2];
3626 for (var j = 0, elementsLen = elements.length; j < elementsLen; j++) {
3627 trees.push({
3628 ids: elements[j],
3629 diff: item.diff - 1,
3630 parent: item.ids,
3631 parentIdx: j
3632 });
3633 }
3634 }
3635
3636 var el = candidateParents[0];
3637
3638 if (!el) {
3639 restree.push(branch);
3640 } else {
3641 res = mergeTree(el.ids, t2.ids);
3642 el.parent[2][el.parentIdx] = res.tree;
3643 restree.push({pos: t1.pos, ids: t1.ids});
3644 conflicts = conflicts || res.conflicts;
3645 merged = true;
3646 }
3647 } else {
3648 restree.push(branch);
3649 }
3650 }
3651
3652 // We didnt find
3653 if (!merged) {
3654 restree.push(path);
3655 }
3656
3657 restree.sort(sortByPos$1);
3658
3659 return {
3660 tree: restree,
3661 conflicts: conflicts || 'internal_node'
3662 };
3663}
3664
3665// To ensure we dont grow the revision tree infinitely, we stem old revisions
3666function stem(tree, depth) {
3667 // First we break out the tree into a complete list of root to leaf paths
3668 var paths = rootToLeaf(tree);
3669 var stemmedRevs;
3670
3671 var result;
3672 for (var i = 0, len = paths.length; i < len; i++) {
3673 // Then for each path, we cut off the start of the path based on the
3674 // `depth` to stem to, and generate a new set of flat trees
3675 var path = paths[i];
3676 var stemmed = path.ids;
3677 var node;
3678 if (stemmed.length > depth) {
3679 // only do the stemming work if we actually need to stem
3680 if (!stemmedRevs) {
3681 stemmedRevs = {}; // avoid allocating this object unnecessarily
3682 }
3683 var numStemmed = stemmed.length - depth;
3684 node = {
3685 pos: path.pos + numStemmed,
3686 ids: pathToTree(stemmed, numStemmed)
3687 };
3688
3689 for (var s = 0; s < numStemmed; s++) {
3690 var rev = (path.pos + s) + '-' + stemmed[s].id;
3691 stemmedRevs[rev] = true;
3692 }
3693 } else { // no need to actually stem
3694 node = {
3695 pos: path.pos,
3696 ids: pathToTree(stemmed, 0)
3697 };
3698 }
3699
3700 // Then we remerge all those flat trees together, ensuring that we dont
3701 // connect trees that would go beyond the depth limit
3702 if (result) {
3703 result = doMerge(result, node, true).tree;
3704 } else {
3705 result = [node];
3706 }
3707 }
3708
3709 // this is memory-heavy per Chrome profiler, avoid unless we actually stemmed
3710 if (stemmedRevs) {
3711 traverseRevTree(result, function (isLeaf, pos, revHash) {
3712 // some revisions may have been removed in a branch but not in another
3713 delete stemmedRevs[pos + '-' + revHash];
3714 });
3715 }
3716
3717 return {
3718 tree: result,
3719 revs: stemmedRevs ? Object.keys(stemmedRevs) : []
3720 };
3721}
3722
3723function merge(tree, path, depth) {
3724 var newTree = doMerge(tree, path);
3725 var stemmed = stem(newTree.tree, depth);
3726 return {
3727 tree: stemmed.tree,
3728 stemmedRevs: stemmed.revs,
3729 conflicts: newTree.conflicts
3730 };
3731}
3732
3733// this method removes a leaf from a rev tree, independent of its status.
3734// e.g., by removing an available leaf, it could leave its predecessor as
3735// a missing leaf and corrupting the tree.
3736function removeLeafFromRevTree(tree, leafRev) {
3737 return tree.flatMap((path) => {
3738 path = removeLeafFromPath(path, leafRev);
3739 return path ? [path] : [];
3740 });
3741}
3742
3743function removeLeafFromPath(path, leafRev) {
3744 const tree = clone(path);
3745 const toVisit = [tree];
3746 let node;
3747
3748 while ((node = toVisit.pop())) {
3749 const { pos, ids: [id, , branches], parent } = node;
3750 const isLeaf = branches.length === 0;
3751 const hash = `${pos}-${id}`;
3752
3753 if (isLeaf && hash === leafRev) {
3754 if (!parent) {
3755 // FIXME: we're facing the root, and probably shouldn't just return an empty array (object? null?).
3756 return null;
3757 }
3758
3759 parent.ids[2] = parent.ids[2].filter(function (branchNode) {
3760 return branchNode[0] !== id;
3761 });
3762 return tree;
3763 }
3764
3765 for (let i = 0, len = branches.length; i < len; i++) {
3766 toVisit.push({ pos: pos + 1, ids: branches[i], parent: node });
3767 }
3768 }
3769 return tree;
3770}
3771
3772// return true if a rev exists in the rev tree, false otherwise
3773
3774// returns the current leaf node for a given revision
3775function latest(rev, metadata) {
3776 var toVisit = metadata.rev_tree.slice();
3777 var node;
3778 while ((node = toVisit.pop())) {
3779 var pos = node.pos;
3780 var tree = node.ids;
3781 var id = tree[0];
3782 var opts = tree[1];
3783 var branches = tree[2];
3784 var isLeaf = branches.length === 0;
3785
3786 var history = node.history ? node.history.slice() : [];
3787 history.push({id: id, pos: pos, opts: opts});
3788
3789 if (isLeaf) {
3790 for (var i = 0, len = history.length; i < len; i++) {
3791 var historyNode = history[i];
3792 var historyRev = historyNode.pos + '-' + historyNode.id;
3793
3794 if (historyRev === rev) {
3795 // return the rev of this leaf
3796 return pos + '-' + id;
3797 }
3798 }
3799 }
3800
3801 for (var j = 0, l = branches.length; j < l; j++) {
3802 toVisit.push({pos: pos + 1, ids: branches[j], history: history});
3803 }
3804 }
3805
3806 /* istanbul ignore next */
3807 throw new Error('Unable to resolve latest revision for id ' + metadata.id + ', rev ' + rev);
3808}
3809
3810function get (txn, id, opts, callback) {
3811 if (txn.error) {
3812 return callback(txn.error);
3813 }
3814
3815 txn.txn.objectStore(DOC_STORE).get(id).onsuccess = function (e) {
3816 var doc = e.target.result;
3817 var rev;
3818 if (!opts.rev) {
3819 rev = (doc && doc.rev);
3820 } else {
3821 rev = opts.latest ? latest(opts.rev, doc) : opts.rev;
3822 }
3823
3824 if (!doc || (doc.deleted && !opts.rev) || !(rev in doc.revs)) {
3825 callback(createError(MISSING_DOC, 'missing'));
3826 return;
3827 }
3828
3829 var result = doc.revs[rev].data;
3830 result._id = doc.id;
3831 result._rev = rev;
3832
3833 // WARNING: expecting possible old format
3834 // TODO: why are we passing the transaction in the context?
3835 // It's not clear we ever thread these txns usefully
3836 callback(null, {
3837 doc: result,
3838 metadata: doc,
3839 ctx: txn
3840 });
3841 };
3842}
3843
3844function parseAttachment(attachment, opts, cb) {
3845 if (opts.binary) {
3846 return cb(null, attachment);
3847 } else {
3848 readAsBinaryString(attachment, function (binString) {
3849 cb(null, thisBtoa(binString));
3850 });
3851 }
3852}
3853
3854function getAttachment(txn, docId, attachId, _, opts, cb) {
3855 if (txn.error) {
3856 return cb(txn.error);
3857 }
3858
3859 var attachment;
3860
3861 txn.txn.objectStore(DOC_STORE).get(docId).onsuccess = function (e) {
3862 var doc = e.target.result;
3863 var rev = doc.revs[opts.rev || doc.rev].data;
3864 var digest = rev._attachments[attachId].digest;
3865 attachment = doc.attachments[digest].data;
3866 };
3867
3868 txn.txn.oncomplete = function () {
3869 parseAttachment(attachment, opts, cb);
3870 };
3871
3872 txn.txn.onabort = cb;
3873}
3874
3875function toObject(array) {
3876 return array.reduce(function (obj, item) {
3877 obj[item] = true;
3878 return obj;
3879 }, {});
3880}
3881// List of top level reserved words for doc
3882var reservedWords = toObject([
3883 '_id',
3884 '_rev',
3885 '_access',
3886 '_attachments',
3887 '_deleted',
3888 '_revisions',
3889 '_revs_info',
3890 '_conflicts',
3891 '_deleted_conflicts',
3892 '_local_seq',
3893 '_rev_tree',
3894 // replication documents
3895 '_replication_id',
3896 '_replication_state',
3897 '_replication_state_time',
3898 '_replication_state_reason',
3899 '_replication_stats',
3900 // Specific to Couchbase Sync Gateway
3901 '_removed'
3902]);
3903
3904// List of reserved words that should end up in the document
3905var dataWords = toObject([
3906 '_access',
3907 '_attachments',
3908 // replication documents
3909 '_replication_id',
3910 '_replication_state',
3911 '_replication_state_time',
3912 '_replication_state_reason',
3913 '_replication_stats'
3914]);
3915
3916function parseRevisionInfo(rev) {
3917 if (!/^\d+-/.test(rev)) {
3918 return createError(INVALID_REV);
3919 }
3920 var idx = rev.indexOf('-');
3921 var left = rev.substring(0, idx);
3922 var right = rev.substring(idx + 1);
3923 return {
3924 prefix: parseInt(left, 10),
3925 id: right
3926 };
3927}
3928
3929function makeRevTreeFromRevisions(revisions, opts) {
3930 var pos = revisions.start - revisions.ids.length + 1;
3931
3932 var revisionIds = revisions.ids;
3933 var ids = [revisionIds[0], opts, []];
3934
3935 for (var i = 1, len = revisionIds.length; i < len; i++) {
3936 ids = [revisionIds[i], {status: 'missing'}, [ids]];
3937 }
3938
3939 return [{
3940 pos: pos,
3941 ids: ids
3942 }];
3943}
3944
3945// Preprocess documents, parse their revisions, assign an id and a
3946// revision for new writes that are missing them, etc
3947function parseDoc(doc, newEdits, dbOpts) {
3948 if (!dbOpts) {
3949 dbOpts = {
3950 deterministic_revs: true
3951 };
3952 }
3953
3954 var nRevNum;
3955 var newRevId;
3956 var revInfo;
3957 var opts = {status: 'available'};
3958 if (doc._deleted) {
3959 opts.deleted = true;
3960 }
3961
3962 if (newEdits) {
3963 if (!doc._id) {
3964 doc._id = uuid$1();
3965 }
3966 newRevId = rev$$1(doc, dbOpts.deterministic_revs);
3967 if (doc._rev) {
3968 revInfo = parseRevisionInfo(doc._rev);
3969 if (revInfo.error) {
3970 return revInfo;
3971 }
3972 doc._rev_tree = [{
3973 pos: revInfo.prefix,
3974 ids: [revInfo.id, {status: 'missing'}, [[newRevId, opts, []]]]
3975 }];
3976 nRevNum = revInfo.prefix + 1;
3977 } else {
3978 doc._rev_tree = [{
3979 pos: 1,
3980 ids : [newRevId, opts, []]
3981 }];
3982 nRevNum = 1;
3983 }
3984 } else {
3985 if (doc._revisions) {
3986 doc._rev_tree = makeRevTreeFromRevisions(doc._revisions, opts);
3987 nRevNum = doc._revisions.start;
3988 newRevId = doc._revisions.ids[0];
3989 }
3990 if (!doc._rev_tree) {
3991 revInfo = parseRevisionInfo(doc._rev);
3992 if (revInfo.error) {
3993 return revInfo;
3994 }
3995 nRevNum = revInfo.prefix;
3996 newRevId = revInfo.id;
3997 doc._rev_tree = [{
3998 pos: nRevNum,
3999 ids: [newRevId, opts, []]
4000 }];
4001 }
4002 }
4003
4004 invalidIdError(doc._id);
4005
4006 doc._rev = nRevNum + '-' + newRevId;
4007
4008 var result = {metadata : {}, data : {}};
4009 for (var key in doc) {
4010 /* istanbul ignore else */
4011 if (Object.prototype.hasOwnProperty.call(doc, key)) {
4012 var specialKey = key[0] === '_';
4013 if (specialKey && !reservedWords[key]) {
4014 var error = createError(DOC_VALIDATION, key);
4015 error.message = DOC_VALIDATION.message + ': ' + key;
4016 throw error;
4017 } else if (specialKey && !dataWords[key]) {
4018 result.metadata[key.slice(1)] = doc[key];
4019 } else {
4020 result.data[key] = doc[key];
4021 }
4022 }
4023 }
4024 return result;
4025}
4026
4027function bulkDocs (api, req, opts, metadata, dbOpts, idbChanges, callback) {
4028
4029 var txn;
4030
4031 // TODO: I would prefer to get rid of these globals
4032 var error;
4033 var results = [];
4034 var docs = [];
4035 var lastWriteIndex;
4036
4037 var revsLimit = dbOpts.revs_limit || 1000;
4038 var rewriteEnabled = dbOpts.name.indexOf("-mrview-") === -1;
4039 const autoCompaction = dbOpts.auto_compaction;
4040
4041 // We only need to track 1 revision for local documents
4042 function docsRevsLimit(doc) {
4043 return /^_local/.test(doc.id) ? 1 : revsLimit;
4044 }
4045
4046 function rootIsMissing(doc) {
4047 return doc.rev_tree[0].ids[1].status === 'missing';
4048 }
4049
4050 function parseBase64(data) {
4051 try {
4052 return atob(data);
4053 } catch (e) {
4054 return {
4055 error: createError(BAD_ARG, 'Attachment is not a valid base64 string')
4056 };
4057 }
4058 }
4059
4060 // Reads the original doc from the store if available
4061 // As in allDocs with keys option using multiple get calls is the fastest way
4062 function fetchExistingDocs(txn, docs) {
4063 var fetched = 0;
4064 var oldDocs = {};
4065
4066 function readDone(e) {
4067 if (e.target.result) {
4068 oldDocs[e.target.result.id] = e.target.result;
4069 }
4070 if (++fetched === docs.length) {
4071 processDocs$$1(txn, docs, oldDocs);
4072 }
4073 }
4074
4075 docs.forEach(function (doc) {
4076 txn.objectStore(DOC_STORE).get(doc.id).onsuccess = readDone;
4077 });
4078 }
4079
4080 function revHasAttachment(doc, rev, digest) {
4081 return doc.revs[rev] &&
4082 doc.revs[rev].data._attachments &&
4083 Object.values(doc.revs[rev].data._attachments).find(function (att) {
4084 return att.digest === digest;
4085 });
4086 }
4087
4088 function processDocs$$1(txn, docs, oldDocs) {
4089
4090 docs.forEach(function (doc, i) {
4091 var newDoc;
4092
4093 // The first document write cannot be a deletion
4094 if ('was_delete' in opts && !(Object.prototype.hasOwnProperty.call(oldDocs, doc.id))) {
4095 newDoc = createError(MISSING_DOC, 'deleted');
4096
4097 // The first write of a document cannot specify a revision
4098 } else if (opts.new_edits &&
4099 !Object.prototype.hasOwnProperty.call(oldDocs, doc.id) &&
4100 rootIsMissing(doc)) {
4101 newDoc = createError(REV_CONFLICT);
4102
4103 // Update the existing document
4104 } else if (Object.prototype.hasOwnProperty.call(oldDocs, doc.id)) {
4105 newDoc = update(txn, doc, oldDocs[doc.id]);
4106 // The update can be rejected if it is an update to an existing
4107 // revision, if so skip it
4108 if (newDoc == false) {
4109 return;
4110 }
4111
4112 // New document
4113 } else {
4114 // Ensure new documents are also stemmed
4115 var merged = merge([], doc.rev_tree[0], docsRevsLimit(doc));
4116 doc.rev_tree = merged.tree;
4117 doc.stemmedRevs = merged.stemmedRevs;
4118 newDoc = doc;
4119 newDoc.isNewDoc = true;
4120 newDoc.wasDeleted = doc.revs[doc.rev].deleted ? 1 : 0;
4121 }
4122
4123 if (newDoc.error) {
4124 results[i] = newDoc;
4125 } else {
4126 oldDocs[newDoc.id] = newDoc;
4127 lastWriteIndex = i;
4128 write(txn, newDoc, i);
4129 }
4130 });
4131 }
4132
4133 // Converts from the format returned by parseDoc into the new format
4134 // we use to store
4135 function convertDocFormat(doc) {
4136
4137 var newDoc = {
4138 id: doc.metadata.id,
4139 rev: doc.metadata.rev,
4140 rev_tree: doc.metadata.rev_tree,
4141 revs: doc.metadata.revs || {}
4142 };
4143
4144 newDoc.revs[newDoc.rev] = {
4145 data: doc.data,
4146 deleted: doc.metadata.deleted
4147 };
4148
4149 return newDoc;
4150 }
4151
4152 function update(txn, doc, oldDoc) {
4153
4154 // Ignore updates to existing revisions
4155 if ((doc.rev in oldDoc.revs) && !opts.new_edits) {
4156 return false;
4157 }
4158
4159 var isRoot = /^1-/.test(doc.rev);
4160
4161 // Reattach first writes after a deletion to last deleted tree
4162 if (oldDoc.deleted && !doc.deleted && opts.new_edits && isRoot) {
4163 var tmp = doc.revs[doc.rev].data;
4164 tmp._rev = oldDoc.rev;
4165 tmp._id = oldDoc.id;
4166 doc = convertDocFormat(parseDoc(tmp, opts.new_edits, dbOpts));
4167 }
4168
4169 var merged = merge(oldDoc.rev_tree, doc.rev_tree[0], docsRevsLimit(doc));
4170 doc.stemmedRevs = merged.stemmedRevs;
4171 doc.rev_tree = merged.tree;
4172
4173 // Merge the old and new rev data
4174 var revs = oldDoc.revs;
4175 revs[doc.rev] = doc.revs[doc.rev];
4176 doc.revs = revs;
4177
4178 doc.attachments = oldDoc.attachments;
4179
4180 var inConflict = opts.new_edits && (((oldDoc.deleted && doc.deleted) ||
4181 (!oldDoc.deleted && merged.conflicts !== 'new_leaf') ||
4182 (oldDoc.deleted && !doc.deleted && merged.conflicts === 'new_branch') ||
4183 (oldDoc.rev === doc.rev)));
4184
4185 if (inConflict) {
4186 return createError(REV_CONFLICT);
4187 }
4188
4189 doc.wasDeleted = oldDoc.deleted;
4190
4191 return doc;
4192 }
4193
4194 function write(txn, doc, i) {
4195
4196 // We copy the data from the winning revision into the root
4197 // of the document so that it can be indexed
4198 var winningRev$$1 = winningRev(doc);
4199 // rev of new doc for attachments and to return it
4200 var writtenRev = doc.rev;
4201 var isLocal = /^_local/.test(doc.id);
4202
4203 var theDoc = doc.revs[winningRev$$1].data;
4204
4205 const isNewDoc = doc.isNewDoc;
4206
4207 if (rewriteEnabled) {
4208 // doc.data is what we index, so we need to clone and rewrite it, and clean
4209 // it up for indexability
4210 var result = rewrite(theDoc);
4211 if (result) {
4212 doc.data = result;
4213 delete doc.data._attachments;
4214 } else {
4215 doc.data = theDoc;
4216 }
4217 } else {
4218 doc.data = theDoc;
4219 }
4220
4221 doc.rev = winningRev$$1;
4222 // .deleted needs to be an int for indexing
4223 doc.deleted = doc.revs[winningRev$$1].deleted ? 1 : 0;
4224
4225 // Bump the seq for every new (non local) revision written
4226 // TODO: index expects a unique seq, not sure if ignoring local will
4227 // work
4228 if (!isLocal) {
4229 doc.seq = ++metadata.seq;
4230
4231 var delta = 0;
4232 // If its a new document, we wont decrement if deleted
4233 if (doc.isNewDoc) {
4234 delta = doc.deleted ? 0 : 1;
4235 } else if (doc.wasDeleted !== doc.deleted) {
4236 delta = doc.deleted ? -1 : 1;
4237 }
4238 metadata.doc_count += delta;
4239 }
4240 delete doc.isNewDoc;
4241 delete doc.wasDeleted;
4242
4243 // If there have been revisions stemmed when merging trees,
4244 // delete their data
4245 let revsToDelete = doc.stemmedRevs || [];
4246
4247 if (autoCompaction && !isNewDoc) {
4248 const result = compactTree(doc);
4249 if (result.length) {
4250 revsToDelete = revsToDelete.concat(result);
4251 }
4252 }
4253
4254 if (revsToDelete.length) {
4255 revsToDelete.forEach(function (rev) { delete doc.revs[rev]; });
4256 }
4257
4258 delete doc.stemmedRevs;
4259
4260 if (!('attachments' in doc)) {
4261 doc.attachments = {};
4262 }
4263
4264 if (theDoc._attachments) {
4265 for (var k in theDoc._attachments) {
4266 var attachment = theDoc._attachments[k];
4267 if (attachment.stub) {
4268 if (!(attachment.digest in doc.attachments)) {
4269 error = createError(MISSING_STUB);
4270 // TODO: Not sure how safe this manual abort is, seeing
4271 // console issues
4272 txn.abort();
4273 return;
4274 }
4275
4276 if (revHasAttachment(doc, writtenRev, attachment.digest)) {
4277 doc.attachments[attachment.digest].revs[writtenRev] = true;
4278 }
4279
4280 } else {
4281
4282 doc.attachments[attachment.digest] = attachment;
4283 doc.attachments[attachment.digest].revs = {};
4284 doc.attachments[attachment.digest].revs[writtenRev] = true;
4285
4286 theDoc._attachments[k] = {
4287 stub: true,
4288 digest: attachment.digest,
4289 content_type: attachment.content_type,
4290 length: attachment.length,
4291 revpos: parseInt(writtenRev, 10)
4292 };
4293 }
4294 }
4295 }
4296
4297 // Local documents have different revision handling
4298 if (isLocal && doc.deleted) {
4299 txn.objectStore(DOC_STORE)["delete"](doc.id).onsuccess = function () {
4300 results[i] = {
4301 ok: true,
4302 id: doc.id,
4303 rev: '0-0'
4304 };
4305 };
4306 updateSeq(i);
4307 return;
4308 }
4309
4310 txn.objectStore(DOC_STORE).put(doc).onsuccess = function () {
4311 results[i] = {
4312 ok: true,
4313 id: doc.id,
4314 rev: writtenRev
4315 };
4316 updateSeq(i);
4317 };
4318 }
4319
4320 function updateSeq(i) {
4321 if (i === lastWriteIndex) {
4322 txn.objectStore(META_STORE).put(metadata);
4323 }
4324 }
4325
4326 function preProcessAttachment(attachment) {
4327 if (attachment.stub) {
4328 return Promise.resolve(attachment);
4329 }
4330
4331 var binData;
4332 if (typeof attachment.data === 'string') {
4333 binData = parseBase64(attachment.data);
4334 if (binData.error) {
4335 return Promise.reject(binData.error);
4336 }
4337 attachment.data = binStringToBluffer(binData, attachment.content_type);
4338 } else {
4339 binData = attachment.data;
4340 }
4341
4342 return new Promise(function (resolve) {
4343 binaryMd5(binData, function (result) {
4344 attachment.digest = 'md5-' + result;
4345 attachment.length = binData.size || binData.length || 0;
4346 resolve(attachment);
4347 });
4348 });
4349 }
4350
4351 function preProcessAttachments() {
4352 var promises = docs.map(function (doc) {
4353 var data = doc.revs[doc.rev].data;
4354 if (!data._attachments) {
4355 return Promise.resolve(data);
4356 }
4357 var attachments = Object.keys(data._attachments).map(function (k) {
4358 data._attachments[k].name = k;
4359 return preProcessAttachment(data._attachments[k]);
4360 });
4361
4362 return Promise.all(attachments).then(function (newAttachments) {
4363 var processed = {};
4364 newAttachments.forEach(function (attachment) {
4365 processed[attachment.name] = attachment;
4366 delete attachment.name;
4367 });
4368 data._attachments = processed;
4369 return data;
4370 });
4371 });
4372 return Promise.all(promises);
4373 }
4374
4375 for (var i = 0, len = req.docs.length; i < len; i++) {
4376 var result;
4377 // TODO: We should get rid of throwing for invalid docs, also not sure
4378 // why this is needed in idb-next and not idb
4379 try {
4380 result = parseDoc(req.docs[i], opts.new_edits, dbOpts);
4381 } catch (err) {
4382 result = err;
4383 }
4384 if (result.error) {
4385 return callback(result);
4386 }
4387
4388 // Ideally parseDoc would return data in this format, but it is currently
4389 // shared so we need to convert
4390 docs.push(convertDocFormat(result));
4391 }
4392
4393 preProcessAttachments().then(function () {
4394 api._openTransactionSafely([DOC_STORE, META_STORE], 'readwrite', function (err, _txn) {
4395 if (err) {
4396 return callback(err);
4397 }
4398
4399 txn = _txn;
4400
4401 txn.onabort = function () {
4402 callback(error || createError(UNKNOWN_ERROR, 'transaction was aborted'));
4403 };
4404 txn.ontimeout = idbError(callback);
4405
4406 txn.oncomplete = function () {
4407 idbChanges.notify(dbOpts.name);
4408 callback(null, results);
4409 };
4410
4411 // We would like to use promises here, but idb sucks
4412 fetchExistingDocs(txn, docs);
4413 });
4414 })["catch"](function (err) {
4415 callback(err);
4416 });
4417}
4418
4419function allDocsKeys(keys, docStore, allDocsInner) {
4420 // It's not guaranted to be returned in right order
4421 var valuesBatch = new Array(keys.length);
4422 var count = 0;
4423 keys.forEach(function (key, index) {
4424 docStore.get(key).onsuccess = function (event) {
4425 if (event.target.result) {
4426 valuesBatch[index] = event.target.result;
4427 } else {
4428 valuesBatch[index] = {key: key, error: 'not_found'};
4429 }
4430 count++;
4431 if (count === keys.length) {
4432 valuesBatch.forEach(function (doc) {
4433 allDocsInner(doc);
4434 });
4435 }
4436 };
4437 });
4438}
4439
4440function createKeyRange(start, end, inclusiveEnd, key, descending) {
4441 try {
4442 if (start && end) {
4443 if (descending) {
4444 return IDBKeyRange.bound(end, start, !inclusiveEnd, false);
4445 } else {
4446 return IDBKeyRange.bound(start, end, false, !inclusiveEnd);
4447 }
4448 } else if (start) {
4449 if (descending) {
4450 return IDBKeyRange.upperBound(start);
4451 } else {
4452 return IDBKeyRange.lowerBound(start);
4453 }
4454 } else if (end) {
4455 if (descending) {
4456 return IDBKeyRange.lowerBound(end, !inclusiveEnd);
4457 } else {
4458 return IDBKeyRange.upperBound(end, !inclusiveEnd);
4459 }
4460 } else if (key) {
4461 return IDBKeyRange.only(key);
4462 }
4463 } catch (e) {
4464 return {error: e};
4465 }
4466 return null;
4467}
4468
4469function handleKeyRangeError(opts, metadata, err, callback) {
4470 if (err.name === "DataError" && err.code === 0) {
4471 // data error, start is less than end
4472 var returnVal = {
4473 total_rows: metadata.doc_count,
4474 offset: opts.skip,
4475 rows: []
4476 };
4477 /* istanbul ignore if */
4478 if (opts.update_seq) {
4479 returnVal.update_seq = metadata.seq;
4480 }
4481 return callback(null, returnVal);
4482 }
4483 callback(createError(IDB_ERROR, err.name, err.message));
4484}
4485
4486function allDocs (txn, metadata, opts, callback) {
4487 if (txn.error) {
4488 return callback(txn.error);
4489 }
4490
4491 // TODO: Weird hack, I dont like it
4492 if (opts.limit === 0) {
4493 var returnVal = {
4494 total_rows: metadata.doc_count,
4495 offset: opts.skip,
4496 rows: []
4497 };
4498
4499 /* istanbul ignore if */
4500 if (opts.update_seq) {
4501 returnVal.update_seq = metadata.seq;
4502 }
4503 return callback(null, returnVal);
4504 }
4505
4506 var results = [];
4507 var processing = [];
4508
4509 var start = 'startkey' in opts ? opts.startkey : false;
4510 var end = 'endkey' in opts ? opts.endkey : false;
4511 var key = 'key' in opts ? opts.key : false;
4512 var keys = 'keys' in opts ? opts.keys : false;
4513 var skip = opts.skip || 0;
4514 var limit = typeof opts.limit === 'number' ? opts.limit : -1;
4515 var inclusiveEnd = opts.inclusive_end !== false;
4516 var descending = 'descending' in opts && opts.descending ? 'prev' : null;
4517
4518 var keyRange;
4519 if (!keys) {
4520 keyRange = createKeyRange(start, end, inclusiveEnd, key, descending);
4521 if (keyRange && keyRange.error) {
4522 return handleKeyRangeError(opts, metadata, keyRange.error, callback);
4523 }
4524 }
4525
4526 var docStore = txn.txn.objectStore(DOC_STORE);
4527
4528 txn.txn.oncomplete = onTxnComplete;
4529
4530 if (keys) {
4531 return allDocsKeys(opts.keys, docStore, allDocsInner);
4532 }
4533
4534 function include_doc(row, doc) {
4535 var docData = doc.revs[doc.rev].data;
4536
4537 row.doc = docData;
4538 row.doc._id = doc.id;
4539 row.doc._rev = doc.rev;
4540 if (opts.conflicts) {
4541 var conflicts = collectConflicts(doc);
4542 if (conflicts.length) {
4543 row.doc._conflicts = conflicts;
4544 }
4545 }
4546 if (opts.attachments && docData._attachments) {
4547 for (var name in docData._attachments) {
4548 processing.push(processAttachment(name, doc, row.doc, opts.binary));
4549 }
4550 }
4551 }
4552
4553 function allDocsInner(doc) {
4554 if (doc.error && keys) {
4555 // key was not found with "keys" requests
4556 results.push(doc);
4557 return true;
4558 }
4559
4560 var row = {
4561 id: doc.id,
4562 key: doc.id,
4563 value: {
4564 rev: doc.rev
4565 }
4566 };
4567
4568 var deleted = doc.deleted;
4569 if (deleted) {
4570 if (keys) {
4571 results.push(row);
4572 row.value.deleted = true;
4573 row.doc = null;
4574 }
4575 } else if (skip-- <= 0) {
4576 results.push(row);
4577 if (opts.include_docs) {
4578 include_doc(row, doc);
4579 }
4580 if (--limit === 0) {
4581 return false;
4582 }
4583 }
4584 return true;
4585 }
4586
4587 function onTxnComplete() {
4588 Promise.all(processing).then(function () {
4589 var returnVal = {
4590 total_rows: metadata.doc_count,
4591 offset: 0,
4592 rows: results
4593 };
4594
4595 /* istanbul ignore if */
4596 if (opts.update_seq) {
4597 returnVal.update_seq = metadata.seq;
4598 }
4599 callback(null, returnVal);
4600 });
4601 }
4602
4603 var cursor = descending ?
4604 docStore.openCursor(keyRange, descending) :
4605 docStore.openCursor(keyRange);
4606
4607 cursor.onsuccess = function (e) {
4608
4609 var doc = e.target.result && e.target.result.value;
4610
4611 // Happens if opts does not have limit,
4612 // because cursor will end normally then,
4613 // when all docs are retrieved.
4614 // Would not be needed, if getAll() optimization was used like in #6059
4615 if (!doc) { return; }
4616
4617 // Skip local docs
4618 if (/^_local/.test(doc.id)) {
4619 return e.target.result["continue"]();
4620 }
4621
4622 var continueCursor = allDocsInner(doc);
4623 if (continueCursor) {
4624 e.target.result["continue"]();
4625 }
4626 };
4627
4628}
4629
4630function changes (txn, idbChanges, api, dbOpts, opts) {
4631 if (txn.error) {
4632 return opts.complete(txn.error);
4633 }
4634
4635 if (opts.continuous) {
4636 var id = dbOpts.name + ':' + uuid$1();
4637 idbChanges.addListener(dbOpts.name, id, api, opts);
4638 idbChanges.notify(dbOpts.name);
4639 return {
4640 cancel: function () {
4641 idbChanges.removeListener(dbOpts.name, id);
4642 }
4643 };
4644 }
4645
4646 var limit = 'limit' in opts ? opts.limit : -1;
4647 if (limit === 0) {
4648 limit = 1;
4649 }
4650
4651 var store = txn.txn.objectStore(DOC_STORE).index('seq');
4652
4653 var filter = filterChange(opts);
4654 var received = 0;
4655
4656 var lastSeq = opts.since || 0;
4657 var results = [];
4658
4659 var processing = [];
4660
4661 function onReqSuccess(e) {
4662 if (!e.target.result) { return; }
4663 var cursor = e.target.result;
4664 var doc = cursor.value;
4665 // Overwrite doc.data, which may have been rewritten (see rewrite.js) with
4666 // the clean version for that rev
4667 doc.data = doc.revs[doc.rev].data;
4668 doc.data._id = doc.id;
4669 doc.data._rev = doc.rev;
4670 if (doc.deleted) {
4671 doc.data._deleted = true;
4672 }
4673
4674 if (opts.doc_ids && opts.doc_ids.indexOf(doc.id) === -1) {
4675 return cursor["continue"]();
4676 }
4677
4678 // WARNING: expecting possible old format
4679 var change = opts.processChange(doc.data, doc, opts);
4680 change.seq = doc.seq;
4681 lastSeq = doc.seq;
4682 var filtered = filter(change);
4683
4684 // If its an error
4685 if (typeof filtered === 'object') {
4686 return opts.complete(filtered);
4687 }
4688
4689 if (filtered) {
4690 received++;
4691 if (opts.return_docs) {
4692 results.push(change);
4693 }
4694
4695 if (opts.include_docs && opts.attachments && doc.data._attachments) {
4696 var promises = [];
4697 for (var name in doc.data._attachments) {
4698 var p = processAttachment(name, doc, change.doc, opts.binary);
4699 // We add the processing promise to 2 arrays, one tracks all
4700 // the promises needed before we fire onChange, the other
4701 // ensure we process all attachments before onComplete
4702 promises.push(p);
4703 processing.push(p);
4704 }
4705
4706 Promise.all(promises).then(function () {
4707 opts.onChange(change);
4708 });
4709 } else {
4710 opts.onChange(change);
4711 }
4712 }
4713 if (received !== limit) {
4714 cursor["continue"]();
4715 }
4716 }
4717
4718 function onTxnComplete() {
4719 Promise.all(processing).then(function () {
4720 opts.complete(null, {
4721 results: results,
4722 last_seq: lastSeq
4723 });
4724 });
4725 }
4726
4727 var req;
4728 if (opts.descending) {
4729 req = store.openCursor(null, 'prev');
4730 } else {
4731 req = store.openCursor(IDBKeyRange.lowerBound(opts.since, true));
4732 }
4733
4734 txn.txn.oncomplete = onTxnComplete;
4735 req.onsuccess = onReqSuccess;
4736}
4737
4738function getRevisionTree (txn, id, callback) {
4739 if (txn.error) {
4740 return callback(txn.error);
4741 }
4742
4743 var req = txn.txn.objectStore(DOC_STORE).get(id);
4744 req.onsuccess = function (e) {
4745 if (!e.target.result) {
4746 callback(createError(MISSING_DOC));
4747 } else {
4748 callback(null, e.target.result.rev_tree);
4749 }
4750 };
4751}
4752
4753function doCompaction (txn, id, revs, callback) {
4754 if (txn.error) {
4755 return callback(txn.error);
4756 }
4757
4758 var docStore = txn.txn.objectStore(DOC_STORE);
4759
4760 docStore.get(id).onsuccess = function (e) {
4761 var doc = e.target.result;
4762
4763 traverseRevTree(doc.rev_tree, function (isLeaf, pos, revHash, ctx, opts) {
4764 var rev = pos + '-' + revHash;
4765 if (revs.indexOf(rev) !== -1) {
4766 opts.status = 'missing';
4767 }
4768 });
4769
4770 var attachments = [];
4771
4772 revs.forEach(function (rev) {
4773 if (rev in doc.revs) {
4774 // Make a list of attachments that are used by the revisions being
4775 // deleted
4776 if (doc.revs[rev].data._attachments) {
4777 for (var k in doc.revs[rev].data._attachments) {
4778 attachments.push(doc.revs[rev].data._attachments[k].digest);
4779 }
4780 }
4781 delete doc.revs[rev];
4782 }
4783 });
4784
4785 // Attachments have a list of revisions that are using them, when
4786 // that list becomes empty we can delete the attachment.
4787 attachments.forEach(function (digest) {
4788 revs.forEach(function (rev) {
4789 delete doc.attachments[digest].revs[rev];
4790 });
4791 if (!Object.keys(doc.attachments[digest].revs).length) {
4792 delete doc.attachments[digest];
4793 }
4794 });
4795
4796 docStore.put(doc);
4797 };
4798
4799 txn.txn.oncomplete = function () {
4800 callback();
4801 };
4802}
4803
4804function destroy (dbOpts, openDatabases, idbChanges, callback) {
4805
4806 idbChanges.removeAllListeners(dbOpts.name);
4807
4808 function doDestroy() {
4809 var req = indexedDB.deleteDatabase(dbOpts.name);
4810 req.onsuccess = function () {
4811 delete openDatabases[dbOpts.name];
4812 callback(null, {ok: true});
4813 };
4814 }
4815
4816 // If the database is open we need to close it
4817 if (dbOpts.name in openDatabases) {
4818 openDatabases[dbOpts.name].then(function (res) {
4819 res.idb.close();
4820 doDestroy();
4821 });
4822 } else {
4823 doDestroy();
4824 }
4825
4826}
4827
4828// Adapted from
4829// https://github.com/pouchdb/pouchdb/blob/master/packages/node_modules/pouchdb-find/src/adapters/local/find/query-planner.js#L20-L24
4830// This could change / improve in the future?
4831var COUCH_COLLATE_LO = null;
4832var COUCH_COLLATE_HI = '\uffff'; // actually used as {"\uffff": {}}
4833
4834// Adapted from: https://www.w3.org/TR/IndexedDB/#compare-two-keys
4835// Importantly, *there is no upper bound possible* in idb. The ideal data
4836// structure an infintely deep array:
4837// var IDB_COLLATE_HI = []; IDB_COLLATE_HI.push(IDB_COLLATE_HI)
4838// But IDBKeyRange is not a fan of shenanigans, so I've just gone with 12 layers
4839// because it looks nice and surely that's enough!
4840var IDB_COLLATE_LO = Number.NEGATIVE_INFINITY;
4841var IDB_COLLATE_HI = [[[[[[[[[[[[]]]]]]]]]]]];
4842
4843//
4844// TODO: this should be made offical somewhere and used by AllDocs / get /
4845// changes etc as well.
4846//
4847function externaliseRecord(idbDoc) {
4848 var doc = idbDoc.revs[idbDoc.rev].data;
4849 doc._id = idbDoc.id;
4850 doc._rev = idbDoc.rev;
4851 if (idbDoc.deleted) {
4852 doc._deleted = true;
4853 }
4854
4855 return doc;
4856}
4857
4858/**
4859 * Generates a keyrange based on the opts passed to query
4860 *
4861 * The first key is always 0, as that's how we're filtering out deleted entries.
4862 */
4863function generateKeyRange(opts) {
4864 function defined(obj, k) {
4865 return obj[k] !== void 0;
4866 }
4867
4868 // Converts a valid CouchDB key into a valid IndexedDB one
4869 function convert(key, exact) {
4870 // The first item in every native index is doc.deleted, and we always want
4871 // to only search documents that are not deleted.
4872 // "foo" -> [0, "foo"]
4873 var filterDeleted = [0].concat(key);
4874
4875 return filterDeleted.map(function (k) {
4876 // null, true and false are not indexable by indexeddb. When we write
4877 // these values we convert them to these constants, and so when we
4878 // query for them we need to convert the query also.
4879 if (k === null && exact) {
4880 // for non-exact queries we treat null as a collate property
4881 // see `if (!exact)` block below
4882 return IDB_NULL;
4883 } else if (k === true) {
4884 return IDB_TRUE;
4885 } else if (k === false) {
4886 return IDB_FALSE;
4887 }
4888
4889 if (!exact) {
4890 // We get passed CouchDB's collate low and high values, so for non-exact
4891 // ranged queries we're going to convert them to our IDB equivalents
4892 if (k === COUCH_COLLATE_LO) {
4893 return IDB_COLLATE_LO;
4894 } else if (Object.prototype.hasOwnProperty.call(k, COUCH_COLLATE_HI)) {
4895 return IDB_COLLATE_HI;
4896 }
4897 }
4898
4899 return k;
4900 });
4901 }
4902
4903 // CouchDB and so PouchdB defaults to true. We need to make this explicit as
4904 // we invert these later for IndexedDB.
4905 if (!defined(opts, 'inclusive_end')) {
4906 opts.inclusive_end = true;
4907 }
4908 if (!defined(opts, 'inclusive_start')) {
4909 opts.inclusive_start = true;
4910 }
4911
4912 if (opts.descending) {
4913 // Flip before generating. We'll check descending again later when performing
4914 // an index request
4915 var realEndkey = opts.startkey,
4916 realInclusiveEnd = opts.inclusive_start;
4917
4918 opts.startkey = opts.endkey;
4919 opts.endkey = realEndkey;
4920 opts.inclusive_start = opts.inclusive_end;
4921 opts.inclusive_end = realInclusiveEnd;
4922 }
4923
4924 try {
4925 if (defined(opts, 'key')) {
4926 return IDBKeyRange.only(convert(opts.key, true));
4927 }
4928
4929 if (defined(opts, 'startkey') && !defined(opts, 'endkey')) {
4930 // lowerBound, but without the deleted docs.
4931 // [1] is the start of the deleted doc range, and we don't want to include then.
4932 return IDBKeyRange.bound(
4933 convert(opts.startkey), [1],
4934 !opts.inclusive_start, true
4935 );
4936 }
4937
4938 if (!defined(opts, 'startkey') && defined(opts, 'endkey')) {
4939 return IDBKeyRange.upperBound(convert(opts.endkey), !opts.inclusive_end);
4940 }
4941
4942 if (defined(opts, 'startkey') && defined(opts, 'endkey')) {
4943 return IDBKeyRange.bound(
4944 convert(opts.startkey), convert(opts.endkey),
4945 !opts.inclusive_start, !opts.inclusive_end
4946 );
4947 }
4948
4949 return IDBKeyRange.only([0]);
4950 } catch (err) {
4951 console.error('Could not generate keyRange', err, opts);
4952 throw Error('Could not generate key range with ' + JSON.stringify(opts));
4953 }
4954}
4955
4956function getIndexHandle(pdb, fields, reject) {
4957 var indexName = naturalIndexName(fields);
4958
4959 return new Promise(function (resolve) {
4960 pdb._openTransactionSafely([DOC_STORE], 'readonly', function (err, txn) {
4961 if (err) {
4962 return idbError(reject)(err);
4963 }
4964
4965 txn.onabort = idbError(reject);
4966 txn.ontimeout = idbError(reject);
4967
4968 var existingIndexNames = Array.from(txn.objectStore(DOC_STORE).indexNames);
4969
4970 if (existingIndexNames.indexOf(indexName) === -1) {
4971 // The index is missing, force a db restart and try again
4972 pdb._freshen()
4973 .then(function () { return getIndexHandle(pdb, fields, reject); })
4974 .then(resolve);
4975 } else {
4976 resolve(txn.objectStore(DOC_STORE).index(indexName));
4977 }
4978 });
4979 });
4980}
4981
4982// In theory we should return something like the doc example below, but find
4983// only needs rows: [{doc: {...}}], so I think we can just not bother for now
4984// {
4985// "offset" : 0,
4986// "rows": [{
4987// "id": "doc3",
4988// "key": "Lisa Says",
4989// "value": null,
4990// "doc": {
4991// "_id": "doc3",
4992// "_rev": "1-z",
4993// "title": "Lisa Says"
4994// }
4995// }],
4996// "total_rows" : 4
4997// }
4998function query(idb, signature, opts, fallback) {
4999 // At this stage, in the current implementation, find has already gone through
5000 // and determined if the index already exists from PouchDB's perspective (eg
5001 // there is a design doc for it).
5002 //
5003 // If we find that the index doesn't exist this means we have to close and
5004 // re-open the DB to correct indexes before proceeding, at which point the
5005 // index should exist.
5006
5007 var pdb = this;
5008
5009 // Assumption, there will be only one /, between the design document name
5010 // and the view name.
5011 var parts = signature.split('/');
5012
5013 return new Promise(function (resolve, reject) {
5014 pdb.get('_design/' + parts[0]).then(function (ddoc) {
5015 if (isPartialFilterView(ddoc, parts[1])) {
5016 // Fix for #8522
5017 // An IndexedDB index is always over all entries. And there is no way to filter them.
5018 // Therefore the normal findAbstractMapper will be used
5019 // for indexes with partial_filter_selector.
5020 return fallback(signature, opts).then(resolve, reject);
5021 }
5022
5023 var fields = rawIndexFields(ddoc, parts[1]);
5024 if (!fields) {
5025 throw new Error('ddoc ' + ddoc._id +' with view ' + parts[1] +
5026 ' does not have map.options.def.fields defined.');
5027 }
5028
5029 var skip = opts.skip;
5030 var limit = Number.isInteger(opts.limit) && opts.limit;
5031
5032 return getIndexHandle(pdb, fields, reject)
5033 .then(function (indexHandle) {
5034 var keyRange = generateKeyRange(opts);
5035 var req = indexHandle.openCursor(keyRange, opts.descending ? 'prev' : 'next');
5036
5037 var rows = [];
5038 req.onerror = idbError(reject);
5039 req.onsuccess = function (e) {
5040 var cursor = e.target.result;
5041
5042 if (!cursor || limit === 0) {
5043 return resolve({
5044 rows: rows
5045 });
5046 }
5047
5048 if (skip) {
5049 cursor.advance(skip);
5050 skip = false;
5051 return;
5052 }
5053
5054 if (limit) {
5055 limit = limit - 1;
5056 }
5057
5058 rows.push({doc: externaliseRecord(cursor.value)});
5059 cursor["continue"]();
5060 };
5061 });
5062 })[
5063 "catch"](reject);
5064 });
5065
5066}
5067
5068function viewCleanup(idb, fallback) {
5069 // I'm not sure we have to do anything here.
5070 //
5071 // One option is to just close and re-open the DB, which performs the same
5072 // action. The only reason you'd want to call this is if you deleted a bunch
5073 // of indexes and wanted the space back immediately.
5074 //
5075 // Otherwise index cleanup happens when:
5076 // - A DB is opened
5077 // - A find query is performed against an index that doesn't exist but should
5078
5079 // Fix for #8522
5080 // On views with partial_filter_selector the standard find-abstract-mapper is used.
5081 // Its indexes must be cleaned up.
5082 // Fallback is the standard viewCleanup.
5083 return fallback();
5084}
5085
5086function purgeAttachments(doc, revs) {
5087 if (!doc.attachments) {
5088 // If there are no attachments, doc.attachments is an empty object
5089 return {};
5090 }
5091
5092 // Iterate over all attachments and remove the respective revs
5093 for (let key in doc.attachments) {
5094 const attachment = doc.attachments[key];
5095
5096 for (let rev of revs) {
5097 if (attachment.revs[rev]) {
5098 delete attachment.revs[rev];
5099 }
5100 }
5101
5102 if (Object.keys(attachment.revs).length === 0) {
5103 delete doc.attachments[key];
5104 }
5105 }
5106
5107 return doc.attachments;
5108}
5109
5110// `purge()` expects a path of revisions in its revs argument that:
5111// - starts with a leaf rev
5112// - continues sequentially with the remaining revs of that leaf’s branch
5113//
5114// eg. for this rev tree:
5115// 1-9692 ▶ 2-37aa ▶ 3-df22 ▶ 4-6e94 ▶ 5-df4a ▶ 6-6a3a ▶ 7-57e5
5116// ┃ ┗━━━━━━▶ 5-8d8c ▶ 6-65e0
5117// ┗━━━━━━▶ 3-43f6 ▶ 4-a3b4
5118//
5119// …if you wanted to purge '7-57e5', you would provide ['7-57e5', '6-6a3a', '5-df4a']
5120//
5121// The purge adapter implementation in `pouchdb-core` uses the helper function `findPathToLeaf`
5122// from `pouchdb-merge` to construct this array correctly. Since this purge implementation is
5123// only ever called from there, we do no additional checks here as to whether `revs` actually
5124// fulfills the criteria above, since `findPathToLeaf` already does these.
5125function purge(txn, docId, revs, callback) {
5126 if (txn.error) {
5127 return callback(txn.error);
5128 }
5129
5130 const docStore = txn.txn.objectStore(DOC_STORE);
5131 const deletedRevs = [];
5132 let documentWasRemovedCompletely = false;
5133 docStore.get(docId).onsuccess = (e) => {
5134 const doc = e.target.result;
5135
5136 // we could do a dry run here to check if revs is a proper path towards a leaf in the rev tree
5137
5138 for (const rev of revs) {
5139 // purge rev from tree
5140 doc.rev_tree = removeLeafFromRevTree(doc.rev_tree, rev);
5141
5142 // assign new revs
5143 delete doc.revs[rev];
5144 deletedRevs.push(rev);
5145 }
5146
5147 if (doc.rev_tree.length === 0) {
5148 // if the rev tree is empty, we can delete the entire document
5149 docStore["delete"](doc.id);
5150 documentWasRemovedCompletely = true;
5151 return;
5152 }
5153
5154 // find new winning rev
5155 doc.rev = winningRev(doc);
5156 doc.data = doc.revs[doc.rev].data;
5157 doc.attachments = purgeAttachments(doc, revs);
5158
5159 // finally, write the purged doc
5160 docStore.put(doc);
5161 };
5162
5163 txn.txn.oncomplete = function () {
5164 callback(null, {
5165 ok: true,
5166 deletedRevs,
5167 documentWasRemovedCompletely
5168 });
5169 };
5170}
5171
5172var ADAPTER_NAME = 'indexeddb';
5173
5174// TODO: Constructor should be capitalised
5175var idbChanges = new Changes();
5176
5177// A shared list of database handles
5178var openDatabases = {};
5179
5180function IdbPouch(dbOpts, callback) {
5181
5182 if (dbOpts.view_adapter) {
5183 console.log('Please note that the indexeddb adapter manages _find indexes itself, therefore it is not using your specified view_adapter');
5184 }
5185
5186 var api = this;
5187 var metadata = {};
5188
5189 // Wrapper that gives you an active DB handle. You probably want $t.
5190 var $ = function (fun) {
5191 return function () {
5192 var args = Array.prototype.slice.call(arguments);
5193 setup(openDatabases, api, dbOpts).then(function (res) {
5194 metadata = res.metadata;
5195 args.unshift(res.idb);
5196 fun.apply(api, args);
5197 })["catch"](function (err) {
5198 var last = args.pop();
5199 if (typeof last === 'function') {
5200 last(err);
5201 } else {
5202 console.error(err);
5203 }
5204 });
5205 };
5206 };
5207 // the promise version of $
5208 var $p = function (fun) {
5209 return function () {
5210 var args = Array.prototype.slice.call(arguments);
5211
5212 return setup(openDatabases, api, dbOpts).then(function (res) {
5213 metadata = res.metadata;
5214 args.unshift(res.idb);
5215
5216 return fun.apply(api, args);
5217 });
5218 };
5219 };
5220 // Wrapper that gives you a safe transaction handle. It's important to use
5221 // this instead of opening your own transaction from a db handle got from $,
5222 // because in the time between getting the db handle and opening the
5223 // transaction it may have been invalidated by index changes.
5224 var $t = function (fun, stores, mode) {
5225 stores = stores || [DOC_STORE];
5226 mode = mode || 'readonly';
5227
5228 return function () {
5229 var args = Array.prototype.slice.call(arguments);
5230 var txn = {};
5231 setup(openDatabases, api, dbOpts).then(function (res) {
5232 metadata = res.metadata;
5233 txn.txn = res.idb.transaction(stores, mode);
5234 })["catch"](function (err) {
5235 console.error('Failed to establish transaction safely');
5236 console.error(err);
5237 txn.error = err;
5238 }).then(function () {
5239 args.unshift(txn);
5240 fun.apply(api, args);
5241 });
5242 };
5243 };
5244
5245 api._openTransactionSafely = function (stores, mode, callback) {
5246 $t(function (txn, callback) {
5247 callback(txn.error, txn.txn);
5248 }, stores, mode)(callback);
5249 };
5250
5251 api._remote = false;
5252 api.type = function () { return ADAPTER_NAME; };
5253
5254 api._id = $(function (_, cb) {
5255 cb(null, metadata.db_uuid);
5256 });
5257
5258 api._info = $(function (_, cb) {
5259 return info(metadata, cb);
5260 });
5261
5262 api._get = $t(get);
5263
5264 api._bulkDocs = $(function (_, req, opts, callback) {
5265 bulkDocs(api, req, opts, metadata, dbOpts, idbChanges, callback);
5266 });
5267
5268 api._allDocs = $t(function (txn, opts, cb) {
5269 allDocs(txn, metadata, opts, cb);
5270 });
5271
5272 api._getAttachment = $t(getAttachment);
5273
5274 api._changes = $t(function (txn, opts) {
5275 changes(txn, idbChanges, api, dbOpts, opts);
5276 });
5277
5278 api._getRevisionTree = $t(getRevisionTree);
5279 api._doCompaction = $t(doCompaction, [DOC_STORE], 'readwrite');
5280
5281 api._customFindAbstractMapper = {
5282 query: $p(query),
5283 viewCleanup: $p(viewCleanup)
5284 };
5285
5286 api._destroy = function (opts, callback) {
5287 return destroy(dbOpts, openDatabases, idbChanges, callback);
5288 };
5289
5290 api._close = $(function (db, cb) {
5291 delete openDatabases[dbOpts.name];
5292 db.close();
5293 cb();
5294 });
5295
5296 // Closing and re-opening the DB re-generates native indexes
5297 api._freshen = function () {
5298 return new Promise(function (resolve) {
5299 api._close(function () {
5300 $(resolve)();
5301 });
5302 });
5303 };
5304
5305 api._purge = $t(purge, [DOC_STORE], 'readwrite');
5306
5307 // TODO: this setTimeout seems nasty, if its needed lets
5308 // figure out / explain why
5309 setTimeout(function () {
5310 callback(null, api);
5311 });
5312}
5313
5314// TODO: this isnt really valid permanently, just being lazy to start
5315IdbPouch.valid = function () {
5316 return true;
5317};
5318
5319function IndexeddbPouchPlugin (PouchDB) {
5320 PouchDB.adapter(ADAPTER_NAME, IdbPouch, true);
5321}
5322
5323// this code only runs in the browser, as its own dist/ script
5324
5325if (typeof PouchDB === 'undefined') {
5326 guardedConsole('error', 'indexeddb adapter plugin error: ' +
5327 'Cannot find global "PouchDB" object! ' +
5328 'Did you remember to include pouchdb.js?');
5329} else {
5330 PouchDB.plugin(IndexeddbPouchPlugin);
5331}
5332
5333},{"10":10,"2":2,"3":3,"9":9}]},{},[25]);
5334
\No newline at end of file