UNPKG

376 kBJavaScriptView Raw
1// PouchDB 9.0.0
2//
3// (c) 2012-2024 Dale Harvey and the PouchDB team
4// PouchDB may be freely distributed under the Apache license, version 2.0.
5// For all details and documentation:
6// http://pouchdb.com
7(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.PouchDB = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(_dereq_,module,exports){
8// Copyright Joyent, Inc. and other Node contributors.
9//
10// Permission is hereby granted, free of charge, to any person obtaining a
11// copy of this software and associated documentation files (the
12// "Software"), to deal in the Software without restriction, including
13// without limitation the rights to use, copy, modify, merge, publish,
14// distribute, sublicense, and/or sell copies of the Software, and to permit
15// persons to whom the Software is furnished to do so, subject to the
16// following conditions:
17//
18// The above copyright notice and this permission notice shall be included
19// in all copies or substantial portions of the Software.
20//
21// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
22// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
23// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
24// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
25// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
26// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
27// USE OR OTHER DEALINGS IN THE SOFTWARE.
28
29var objectCreate = Object.create || objectCreatePolyfill
30var objectKeys = Object.keys || objectKeysPolyfill
31var bind = Function.prototype.bind || functionBindPolyfill
32
33function EventEmitter() {
34 if (!this._events || !Object.prototype.hasOwnProperty.call(this, '_events')) {
35 this._events = objectCreate(null);
36 this._eventsCount = 0;
37 }
38
39 this._maxListeners = this._maxListeners || undefined;
40}
41module.exports = EventEmitter;
42
43// Backwards-compat with node 0.10.x
44EventEmitter.EventEmitter = EventEmitter;
45
46EventEmitter.prototype._events = undefined;
47EventEmitter.prototype._maxListeners = undefined;
48
49// By default EventEmitters will print a warning if more than 10 listeners are
50// added to it. This is a useful default which helps finding memory leaks.
51var defaultMaxListeners = 10;
52
53var hasDefineProperty;
54try {
55 var o = {};
56 if (Object.defineProperty) Object.defineProperty(o, 'x', { value: 0 });
57 hasDefineProperty = o.x === 0;
58} catch (err) { hasDefineProperty = false }
59if (hasDefineProperty) {
60 Object.defineProperty(EventEmitter, 'defaultMaxListeners', {
61 enumerable: true,
62 get: function() {
63 return defaultMaxListeners;
64 },
65 set: function(arg) {
66 // check whether the input is a positive number (whose value is zero or
67 // greater and not a NaN).
68 if (typeof arg !== 'number' || arg < 0 || arg !== arg)
69 throw new TypeError('"defaultMaxListeners" must be a positive number');
70 defaultMaxListeners = arg;
71 }
72 });
73} else {
74 EventEmitter.defaultMaxListeners = defaultMaxListeners;
75}
76
77// Obviously not all Emitters should be limited to 10. This function allows
78// that to be increased. Set to zero for unlimited.
79EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) {
80 if (typeof n !== 'number' || n < 0 || isNaN(n))
81 throw new TypeError('"n" argument must be a positive number');
82 this._maxListeners = n;
83 return this;
84};
85
86function $getMaxListeners(that) {
87 if (that._maxListeners === undefined)
88 return EventEmitter.defaultMaxListeners;
89 return that._maxListeners;
90}
91
92EventEmitter.prototype.getMaxListeners = function getMaxListeners() {
93 return $getMaxListeners(this);
94};
95
96// These standalone emit* functions are used to optimize calling of event
97// handlers for fast cases because emit() itself often has a variable number of
98// arguments and can be deoptimized because of that. These functions always have
99// the same number of arguments and thus do not get deoptimized, so the code
100// inside them can execute faster.
101function emitNone(handler, isFn, self) {
102 if (isFn)
103 handler.call(self);
104 else {
105 var len = handler.length;
106 var listeners = arrayClone(handler, len);
107 for (var i = 0; i < len; ++i)
108 listeners[i].call(self);
109 }
110}
111function emitOne(handler, isFn, self, arg1) {
112 if (isFn)
113 handler.call(self, arg1);
114 else {
115 var len = handler.length;
116 var listeners = arrayClone(handler, len);
117 for (var i = 0; i < len; ++i)
118 listeners[i].call(self, arg1);
119 }
120}
121function emitTwo(handler, isFn, self, arg1, arg2) {
122 if (isFn)
123 handler.call(self, arg1, arg2);
124 else {
125 var len = handler.length;
126 var listeners = arrayClone(handler, len);
127 for (var i = 0; i < len; ++i)
128 listeners[i].call(self, arg1, arg2);
129 }
130}
131function emitThree(handler, isFn, self, arg1, arg2, arg3) {
132 if (isFn)
133 handler.call(self, arg1, arg2, arg3);
134 else {
135 var len = handler.length;
136 var listeners = arrayClone(handler, len);
137 for (var i = 0; i < len; ++i)
138 listeners[i].call(self, arg1, arg2, arg3);
139 }
140}
141
142function emitMany(handler, isFn, self, args) {
143 if (isFn)
144 handler.apply(self, args);
145 else {
146 var len = handler.length;
147 var listeners = arrayClone(handler, len);
148 for (var i = 0; i < len; ++i)
149 listeners[i].apply(self, args);
150 }
151}
152
153EventEmitter.prototype.emit = function emit(type) {
154 var er, handler, len, args, i, events;
155 var doError = (type === 'error');
156
157 events = this._events;
158 if (events)
159 doError = (doError && events.error == null);
160 else if (!doError)
161 return false;
162
163 // If there is no 'error' event listener then throw.
164 if (doError) {
165 if (arguments.length > 1)
166 er = arguments[1];
167 if (er instanceof Error) {
168 throw er; // Unhandled 'error' event
169 } else {
170 // At least give some kind of context to the user
171 var err = new Error('Unhandled "error" event. (' + er + ')');
172 err.context = er;
173 throw err;
174 }
175 return false;
176 }
177
178 handler = events[type];
179
180 if (!handler)
181 return false;
182
183 var isFn = typeof handler === 'function';
184 len = arguments.length;
185 switch (len) {
186 // fast cases
187 case 1:
188 emitNone(handler, isFn, this);
189 break;
190 case 2:
191 emitOne(handler, isFn, this, arguments[1]);
192 break;
193 case 3:
194 emitTwo(handler, isFn, this, arguments[1], arguments[2]);
195 break;
196 case 4:
197 emitThree(handler, isFn, this, arguments[1], arguments[2], arguments[3]);
198 break;
199 // slower
200 default:
201 args = new Array(len - 1);
202 for (i = 1; i < len; i++)
203 args[i - 1] = arguments[i];
204 emitMany(handler, isFn, this, args);
205 }
206
207 return true;
208};
209
210function _addListener(target, type, listener, prepend) {
211 var m;
212 var events;
213 var existing;
214
215 if (typeof listener !== 'function')
216 throw new TypeError('"listener" argument must be a function');
217
218 events = target._events;
219 if (!events) {
220 events = target._events = objectCreate(null);
221 target._eventsCount = 0;
222 } else {
223 // To avoid recursion in the case that type === "newListener"! Before
224 // adding it to the listeners, first emit "newListener".
225 if (events.newListener) {
226 target.emit('newListener', type,
227 listener.listener ? listener.listener : listener);
228
229 // Re-assign `events` because a newListener handler could have caused the
230 // this._events to be assigned to a new object
231 events = target._events;
232 }
233 existing = events[type];
234 }
235
236 if (!existing) {
237 // Optimize the case of one listener. Don't need the extra array object.
238 existing = events[type] = listener;
239 ++target._eventsCount;
240 } else {
241 if (typeof existing === 'function') {
242 // Adding the second element, need to change to array.
243 existing = events[type] =
244 prepend ? [listener, existing] : [existing, listener];
245 } else {
246 // If we've already got an array, just append.
247 if (prepend) {
248 existing.unshift(listener);
249 } else {
250 existing.push(listener);
251 }
252 }
253
254 // Check for listener leak
255 if (!existing.warned) {
256 m = $getMaxListeners(target);
257 if (m && m > 0 && existing.length > m) {
258 existing.warned = true;
259 var w = new Error('Possible EventEmitter memory leak detected. ' +
260 existing.length + ' "' + String(type) + '" listeners ' +
261 'added. Use emitter.setMaxListeners() to ' +
262 'increase limit.');
263 w.name = 'MaxListenersExceededWarning';
264 w.emitter = target;
265 w.type = type;
266 w.count = existing.length;
267 if (typeof console === 'object' && console.warn) {
268 console.warn('%s: %s', w.name, w.message);
269 }
270 }
271 }
272 }
273
274 return target;
275}
276
277EventEmitter.prototype.addListener = function addListener(type, listener) {
278 return _addListener(this, type, listener, false);
279};
280
281EventEmitter.prototype.on = EventEmitter.prototype.addListener;
282
283EventEmitter.prototype.prependListener =
284 function prependListener(type, listener) {
285 return _addListener(this, type, listener, true);
286 };
287
288function onceWrapper() {
289 if (!this.fired) {
290 this.target.removeListener(this.type, this.wrapFn);
291 this.fired = true;
292 switch (arguments.length) {
293 case 0:
294 return this.listener.call(this.target);
295 case 1:
296 return this.listener.call(this.target, arguments[0]);
297 case 2:
298 return this.listener.call(this.target, arguments[0], arguments[1]);
299 case 3:
300 return this.listener.call(this.target, arguments[0], arguments[1],
301 arguments[2]);
302 default:
303 var args = new Array(arguments.length);
304 for (var i = 0; i < args.length; ++i)
305 args[i] = arguments[i];
306 this.listener.apply(this.target, args);
307 }
308 }
309}
310
311function _onceWrap(target, type, listener) {
312 var state = { fired: false, wrapFn: undefined, target: target, type: type, listener: listener };
313 var wrapped = bind.call(onceWrapper, state);
314 wrapped.listener = listener;
315 state.wrapFn = wrapped;
316 return wrapped;
317}
318
319EventEmitter.prototype.once = function once(type, listener) {
320 if (typeof listener !== 'function')
321 throw new TypeError('"listener" argument must be a function');
322 this.on(type, _onceWrap(this, type, listener));
323 return this;
324};
325
326EventEmitter.prototype.prependOnceListener =
327 function prependOnceListener(type, listener) {
328 if (typeof listener !== 'function')
329 throw new TypeError('"listener" argument must be a function');
330 this.prependListener(type, _onceWrap(this, type, listener));
331 return this;
332 };
333
334// Emits a 'removeListener' event if and only if the listener was removed.
335EventEmitter.prototype.removeListener =
336 function removeListener(type, listener) {
337 var list, events, position, i, originalListener;
338
339 if (typeof listener !== 'function')
340 throw new TypeError('"listener" argument must be a function');
341
342 events = this._events;
343 if (!events)
344 return this;
345
346 list = events[type];
347 if (!list)
348 return this;
349
350 if (list === listener || list.listener === listener) {
351 if (--this._eventsCount === 0)
352 this._events = objectCreate(null);
353 else {
354 delete events[type];
355 if (events.removeListener)
356 this.emit('removeListener', type, list.listener || listener);
357 }
358 } else if (typeof list !== 'function') {
359 position = -1;
360
361 for (i = list.length - 1; i >= 0; i--) {
362 if (list[i] === listener || list[i].listener === listener) {
363 originalListener = list[i].listener;
364 position = i;
365 break;
366 }
367 }
368
369 if (position < 0)
370 return this;
371
372 if (position === 0)
373 list.shift();
374 else
375 spliceOne(list, position);
376
377 if (list.length === 1)
378 events[type] = list[0];
379
380 if (events.removeListener)
381 this.emit('removeListener', type, originalListener || listener);
382 }
383
384 return this;
385 };
386
387EventEmitter.prototype.removeAllListeners =
388 function removeAllListeners(type) {
389 var listeners, events, i;
390
391 events = this._events;
392 if (!events)
393 return this;
394
395 // not listening for removeListener, no need to emit
396 if (!events.removeListener) {
397 if (arguments.length === 0) {
398 this._events = objectCreate(null);
399 this._eventsCount = 0;
400 } else if (events[type]) {
401 if (--this._eventsCount === 0)
402 this._events = objectCreate(null);
403 else
404 delete events[type];
405 }
406 return this;
407 }
408
409 // emit removeListener for all listeners on all events
410 if (arguments.length === 0) {
411 var keys = objectKeys(events);
412 var key;
413 for (i = 0; i < keys.length; ++i) {
414 key = keys[i];
415 if (key === 'removeListener') continue;
416 this.removeAllListeners(key);
417 }
418 this.removeAllListeners('removeListener');
419 this._events = objectCreate(null);
420 this._eventsCount = 0;
421 return this;
422 }
423
424 listeners = events[type];
425
426 if (typeof listeners === 'function') {
427 this.removeListener(type, listeners);
428 } else if (listeners) {
429 // LIFO order
430 for (i = listeners.length - 1; i >= 0; i--) {
431 this.removeListener(type, listeners[i]);
432 }
433 }
434
435 return this;
436 };
437
438function _listeners(target, type, unwrap) {
439 var events = target._events;
440
441 if (!events)
442 return [];
443
444 var evlistener = events[type];
445 if (!evlistener)
446 return [];
447
448 if (typeof evlistener === 'function')
449 return unwrap ? [evlistener.listener || evlistener] : [evlistener];
450
451 return unwrap ? unwrapListeners(evlistener) : arrayClone(evlistener, evlistener.length);
452}
453
454EventEmitter.prototype.listeners = function listeners(type) {
455 return _listeners(this, type, true);
456};
457
458EventEmitter.prototype.rawListeners = function rawListeners(type) {
459 return _listeners(this, type, false);
460};
461
462EventEmitter.listenerCount = function(emitter, type) {
463 if (typeof emitter.listenerCount === 'function') {
464 return emitter.listenerCount(type);
465 } else {
466 return listenerCount.call(emitter, type);
467 }
468};
469
470EventEmitter.prototype.listenerCount = listenerCount;
471function listenerCount(type) {
472 var events = this._events;
473
474 if (events) {
475 var evlistener = events[type];
476
477 if (typeof evlistener === 'function') {
478 return 1;
479 } else if (evlistener) {
480 return evlistener.length;
481 }
482 }
483
484 return 0;
485}
486
487EventEmitter.prototype.eventNames = function eventNames() {
488 return this._eventsCount > 0 ? Reflect.ownKeys(this._events) : [];
489};
490
491// About 1.5x faster than the two-arg version of Array#splice().
492function spliceOne(list, index) {
493 for (var i = index, k = i + 1, n = list.length; k < n; i += 1, k += 1)
494 list[i] = list[k];
495 list.pop();
496}
497
498function arrayClone(arr, n) {
499 var copy = new Array(n);
500 for (var i = 0; i < n; ++i)
501 copy[i] = arr[i];
502 return copy;
503}
504
505function unwrapListeners(arr) {
506 var ret = new Array(arr.length);
507 for (var i = 0; i < ret.length; ++i) {
508 ret[i] = arr[i].listener || arr[i];
509 }
510 return ret;
511}
512
513function objectCreatePolyfill(proto) {
514 var F = function() {};
515 F.prototype = proto;
516 return new F;
517}
518function objectKeysPolyfill(obj) {
519 var keys = [];
520 for (var k in obj) if (Object.prototype.hasOwnProperty.call(obj, k)) {
521 keys.push(k);
522 }
523 return k;
524}
525function functionBindPolyfill(context) {
526 var fn = this;
527 return function () {
528 return fn.apply(context, arguments);
529 };
530}
531
532},{}],2:[function(_dereq_,module,exports){
533// shim for using process in browser
534var process = module.exports = {};
535
536// cached from whatever global is present so that test runners that stub it
537// don't break things. But we need to wrap it in a try catch in case it is
538// wrapped in strict mode code which doesn't define any globals. It's inside a
539// function because try/catches deoptimize in certain engines.
540
541var cachedSetTimeout;
542var cachedClearTimeout;
543
544function defaultSetTimout() {
545 throw new Error('setTimeout has not been defined');
546}
547function defaultClearTimeout () {
548 throw new Error('clearTimeout has not been defined');
549}
550(function () {
551 try {
552 if (typeof setTimeout === 'function') {
553 cachedSetTimeout = setTimeout;
554 } else {
555 cachedSetTimeout = defaultSetTimout;
556 }
557 } catch (e) {
558 cachedSetTimeout = defaultSetTimout;
559 }
560 try {
561 if (typeof clearTimeout === 'function') {
562 cachedClearTimeout = clearTimeout;
563 } else {
564 cachedClearTimeout = defaultClearTimeout;
565 }
566 } catch (e) {
567 cachedClearTimeout = defaultClearTimeout;
568 }
569} ())
570function runTimeout(fun) {
571 if (cachedSetTimeout === setTimeout) {
572 //normal enviroments in sane situations
573 return setTimeout(fun, 0);
574 }
575 // if setTimeout wasn't available but was latter defined
576 if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) {
577 cachedSetTimeout = setTimeout;
578 return setTimeout(fun, 0);
579 }
580 try {
581 // when when somebody has screwed with setTimeout but no I.E. maddness
582 return cachedSetTimeout(fun, 0);
583 } catch(e){
584 try {
585 // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
586 return cachedSetTimeout.call(null, fun, 0);
587 } catch(e){
588 // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error
589 return cachedSetTimeout.call(this, fun, 0);
590 }
591 }
592
593
594}
595function runClearTimeout(marker) {
596 if (cachedClearTimeout === clearTimeout) {
597 //normal enviroments in sane situations
598 return clearTimeout(marker);
599 }
600 // if clearTimeout wasn't available but was latter defined
601 if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) {
602 cachedClearTimeout = clearTimeout;
603 return clearTimeout(marker);
604 }
605 try {
606 // when when somebody has screwed with setTimeout but no I.E. maddness
607 return cachedClearTimeout(marker);
608 } catch (e){
609 try {
610 // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
611 return cachedClearTimeout.call(null, marker);
612 } catch (e){
613 // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error.
614 // Some versions of I.E. have different rules for clearTimeout vs setTimeout
615 return cachedClearTimeout.call(this, marker);
616 }
617 }
618
619
620
621}
622var queue = [];
623var draining = false;
624var currentQueue;
625var queueIndex = -1;
626
627function cleanUpNextTick() {
628 if (!draining || !currentQueue) {
629 return;
630 }
631 draining = false;
632 if (currentQueue.length) {
633 queue = currentQueue.concat(queue);
634 } else {
635 queueIndex = -1;
636 }
637 if (queue.length) {
638 drainQueue();
639 }
640}
641
642function drainQueue() {
643 if (draining) {
644 return;
645 }
646 var timeout = runTimeout(cleanUpNextTick);
647 draining = true;
648
649 var len = queue.length;
650 while(len) {
651 currentQueue = queue;
652 queue = [];
653 while (++queueIndex < len) {
654 if (currentQueue) {
655 currentQueue[queueIndex].run();
656 }
657 }
658 queueIndex = -1;
659 len = queue.length;
660 }
661 currentQueue = null;
662 draining = false;
663 runClearTimeout(timeout);
664}
665
666process.nextTick = function (fun) {
667 var args = new Array(arguments.length - 1);
668 if (arguments.length > 1) {
669 for (var i = 1; i < arguments.length; i++) {
670 args[i - 1] = arguments[i];
671 }
672 }
673 queue.push(new Item(fun, args));
674 if (queue.length === 1 && !draining) {
675 runTimeout(drainQueue);
676 }
677};
678
679// v8 likes predictible objects
680function Item(fun, array) {
681 this.fun = fun;
682 this.array = array;
683}
684Item.prototype.run = function () {
685 this.fun.apply(null, this.array);
686};
687process.title = 'browser';
688process.browser = true;
689process.env = {};
690process.argv = [];
691process.version = ''; // empty string to avoid regexp issues
692process.versions = {};
693
694function noop() {}
695
696process.on = noop;
697process.addListener = noop;
698process.once = noop;
699process.off = noop;
700process.removeListener = noop;
701process.removeAllListeners = noop;
702process.emit = noop;
703process.prependListener = noop;
704process.prependOnceListener = noop;
705
706process.listeners = function (name) { return [] }
707
708process.binding = function (name) {
709 throw new Error('process.binding is not supported');
710};
711
712process.cwd = function () { return '/' };
713process.chdir = function (dir) {
714 throw new Error('process.chdir is not supported');
715};
716process.umask = function() { return 0; };
717
718},{}],3:[function(_dereq_,module,exports){
719(function (factory) {
720 if (typeof exports === 'object') {
721 // Node/CommonJS
722 module.exports = factory();
723 } else if (typeof define === 'function' && define.amd) {
724 // AMD
725 define(factory);
726 } else {
727 // Browser globals (with support for web workers)
728 var glob;
729
730 try {
731 glob = window;
732 } catch (e) {
733 glob = self;
734 }
735
736 glob.SparkMD5 = factory();
737 }
738}(function (undefined) {
739
740 'use strict';
741
742 /*
743 * Fastest md5 implementation around (JKM md5).
744 * Credits: Joseph Myers
745 *
746 * @see http://www.myersdaily.org/joseph/javascript/md5-text.html
747 * @see http://jsperf.com/md5-shootout/7
748 */
749
750 /* this function is much faster,
751 so if possible we use it. Some IEs
752 are the only ones I know of that
753 need the idiotic second function,
754 generated by an if clause. */
755 var add32 = function (a, b) {
756 return (a + b) & 0xFFFFFFFF;
757 },
758 hex_chr = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'];
759
760
761 function cmn(q, a, b, x, s, t) {
762 a = add32(add32(a, q), add32(x, t));
763 return add32((a << s) | (a >>> (32 - s)), b);
764 }
765
766 function md5cycle(x, k) {
767 var a = x[0],
768 b = x[1],
769 c = x[2],
770 d = x[3];
771
772 a += (b & c | ~b & d) + k[0] - 680876936 | 0;
773 a = (a << 7 | a >>> 25) + b | 0;
774 d += (a & b | ~a & c) + k[1] - 389564586 | 0;
775 d = (d << 12 | d >>> 20) + a | 0;
776 c += (d & a | ~d & b) + k[2] + 606105819 | 0;
777 c = (c << 17 | c >>> 15) + d | 0;
778 b += (c & d | ~c & a) + k[3] - 1044525330 | 0;
779 b = (b << 22 | b >>> 10) + c | 0;
780 a += (b & c | ~b & d) + k[4] - 176418897 | 0;
781 a = (a << 7 | a >>> 25) + b | 0;
782 d += (a & b | ~a & c) + k[5] + 1200080426 | 0;
783 d = (d << 12 | d >>> 20) + a | 0;
784 c += (d & a | ~d & b) + k[6] - 1473231341 | 0;
785 c = (c << 17 | c >>> 15) + d | 0;
786 b += (c & d | ~c & a) + k[7] - 45705983 | 0;
787 b = (b << 22 | b >>> 10) + c | 0;
788 a += (b & c | ~b & d) + k[8] + 1770035416 | 0;
789 a = (a << 7 | a >>> 25) + b | 0;
790 d += (a & b | ~a & c) + k[9] - 1958414417 | 0;
791 d = (d << 12 | d >>> 20) + a | 0;
792 c += (d & a | ~d & b) + k[10] - 42063 | 0;
793 c = (c << 17 | c >>> 15) + d | 0;
794 b += (c & d | ~c & a) + k[11] - 1990404162 | 0;
795 b = (b << 22 | b >>> 10) + c | 0;
796 a += (b & c | ~b & d) + k[12] + 1804603682 | 0;
797 a = (a << 7 | a >>> 25) + b | 0;
798 d += (a & b | ~a & c) + k[13] - 40341101 | 0;
799 d = (d << 12 | d >>> 20) + a | 0;
800 c += (d & a | ~d & b) + k[14] - 1502002290 | 0;
801 c = (c << 17 | c >>> 15) + d | 0;
802 b += (c & d | ~c & a) + k[15] + 1236535329 | 0;
803 b = (b << 22 | b >>> 10) + c | 0;
804
805 a += (b & d | c & ~d) + k[1] - 165796510 | 0;
806 a = (a << 5 | a >>> 27) + b | 0;
807 d += (a & c | b & ~c) + k[6] - 1069501632 | 0;
808 d = (d << 9 | d >>> 23) + a | 0;
809 c += (d & b | a & ~b) + k[11] + 643717713 | 0;
810 c = (c << 14 | c >>> 18) + d | 0;
811 b += (c & a | d & ~a) + k[0] - 373897302 | 0;
812 b = (b << 20 | b >>> 12) + c | 0;
813 a += (b & d | c & ~d) + k[5] - 701558691 | 0;
814 a = (a << 5 | a >>> 27) + b | 0;
815 d += (a & c | b & ~c) + k[10] + 38016083 | 0;
816 d = (d << 9 | d >>> 23) + a | 0;
817 c += (d & b | a & ~b) + k[15] - 660478335 | 0;
818 c = (c << 14 | c >>> 18) + d | 0;
819 b += (c & a | d & ~a) + k[4] - 405537848 | 0;
820 b = (b << 20 | b >>> 12) + c | 0;
821 a += (b & d | c & ~d) + k[9] + 568446438 | 0;
822 a = (a << 5 | a >>> 27) + b | 0;
823 d += (a & c | b & ~c) + k[14] - 1019803690 | 0;
824 d = (d << 9 | d >>> 23) + a | 0;
825 c += (d & b | a & ~b) + k[3] - 187363961 | 0;
826 c = (c << 14 | c >>> 18) + d | 0;
827 b += (c & a | d & ~a) + k[8] + 1163531501 | 0;
828 b = (b << 20 | b >>> 12) + c | 0;
829 a += (b & d | c & ~d) + k[13] - 1444681467 | 0;
830 a = (a << 5 | a >>> 27) + b | 0;
831 d += (a & c | b & ~c) + k[2] - 51403784 | 0;
832 d = (d << 9 | d >>> 23) + a | 0;
833 c += (d & b | a & ~b) + k[7] + 1735328473 | 0;
834 c = (c << 14 | c >>> 18) + d | 0;
835 b += (c & a | d & ~a) + k[12] - 1926607734 | 0;
836 b = (b << 20 | b >>> 12) + c | 0;
837
838 a += (b ^ c ^ d) + k[5] - 378558 | 0;
839 a = (a << 4 | a >>> 28) + b | 0;
840 d += (a ^ b ^ c) + k[8] - 2022574463 | 0;
841 d = (d << 11 | d >>> 21) + a | 0;
842 c += (d ^ a ^ b) + k[11] + 1839030562 | 0;
843 c = (c << 16 | c >>> 16) + d | 0;
844 b += (c ^ d ^ a) + k[14] - 35309556 | 0;
845 b = (b << 23 | b >>> 9) + c | 0;
846 a += (b ^ c ^ d) + k[1] - 1530992060 | 0;
847 a = (a << 4 | a >>> 28) + b | 0;
848 d += (a ^ b ^ c) + k[4] + 1272893353 | 0;
849 d = (d << 11 | d >>> 21) + a | 0;
850 c += (d ^ a ^ b) + k[7] - 155497632 | 0;
851 c = (c << 16 | c >>> 16) + d | 0;
852 b += (c ^ d ^ a) + k[10] - 1094730640 | 0;
853 b = (b << 23 | b >>> 9) + c | 0;
854 a += (b ^ c ^ d) + k[13] + 681279174 | 0;
855 a = (a << 4 | a >>> 28) + b | 0;
856 d += (a ^ b ^ c) + k[0] - 358537222 | 0;
857 d = (d << 11 | d >>> 21) + a | 0;
858 c += (d ^ a ^ b) + k[3] - 722521979 | 0;
859 c = (c << 16 | c >>> 16) + d | 0;
860 b += (c ^ d ^ a) + k[6] + 76029189 | 0;
861 b = (b << 23 | b >>> 9) + c | 0;
862 a += (b ^ c ^ d) + k[9] - 640364487 | 0;
863 a = (a << 4 | a >>> 28) + b | 0;
864 d += (a ^ b ^ c) + k[12] - 421815835 | 0;
865 d = (d << 11 | d >>> 21) + a | 0;
866 c += (d ^ a ^ b) + k[15] + 530742520 | 0;
867 c = (c << 16 | c >>> 16) + d | 0;
868 b += (c ^ d ^ a) + k[2] - 995338651 | 0;
869 b = (b << 23 | b >>> 9) + c | 0;
870
871 a += (c ^ (b | ~d)) + k[0] - 198630844 | 0;
872 a = (a << 6 | a >>> 26) + b | 0;
873 d += (b ^ (a | ~c)) + k[7] + 1126891415 | 0;
874 d = (d << 10 | d >>> 22) + a | 0;
875 c += (a ^ (d | ~b)) + k[14] - 1416354905 | 0;
876 c = (c << 15 | c >>> 17) + d | 0;
877 b += (d ^ (c | ~a)) + k[5] - 57434055 | 0;
878 b = (b << 21 |b >>> 11) + c | 0;
879 a += (c ^ (b | ~d)) + k[12] + 1700485571 | 0;
880 a = (a << 6 | a >>> 26) + b | 0;
881 d += (b ^ (a | ~c)) + k[3] - 1894986606 | 0;
882 d = (d << 10 | d >>> 22) + a | 0;
883 c += (a ^ (d | ~b)) + k[10] - 1051523 | 0;
884 c = (c << 15 | c >>> 17) + d | 0;
885 b += (d ^ (c | ~a)) + k[1] - 2054922799 | 0;
886 b = (b << 21 |b >>> 11) + c | 0;
887 a += (c ^ (b | ~d)) + k[8] + 1873313359 | 0;
888 a = (a << 6 | a >>> 26) + b | 0;
889 d += (b ^ (a | ~c)) + k[15] - 30611744 | 0;
890 d = (d << 10 | d >>> 22) + a | 0;
891 c += (a ^ (d | ~b)) + k[6] - 1560198380 | 0;
892 c = (c << 15 | c >>> 17) + d | 0;
893 b += (d ^ (c | ~a)) + k[13] + 1309151649 | 0;
894 b = (b << 21 |b >>> 11) + c | 0;
895 a += (c ^ (b | ~d)) + k[4] - 145523070 | 0;
896 a = (a << 6 | a >>> 26) + b | 0;
897 d += (b ^ (a | ~c)) + k[11] - 1120210379 | 0;
898 d = (d << 10 | d >>> 22) + a | 0;
899 c += (a ^ (d | ~b)) + k[2] + 718787259 | 0;
900 c = (c << 15 | c >>> 17) + d | 0;
901 b += (d ^ (c | ~a)) + k[9] - 343485551 | 0;
902 b = (b << 21 | b >>> 11) + c | 0;
903
904 x[0] = a + x[0] | 0;
905 x[1] = b + x[1] | 0;
906 x[2] = c + x[2] | 0;
907 x[3] = d + x[3] | 0;
908 }
909
910 function md5blk(s) {
911 var md5blks = [],
912 i; /* Andy King said do it this way. */
913
914 for (i = 0; i < 64; i += 4) {
915 md5blks[i >> 2] = s.charCodeAt(i) + (s.charCodeAt(i + 1) << 8) + (s.charCodeAt(i + 2) << 16) + (s.charCodeAt(i + 3) << 24);
916 }
917 return md5blks;
918 }
919
920 function md5blk_array(a) {
921 var md5blks = [],
922 i; /* Andy King said do it this way. */
923
924 for (i = 0; i < 64; i += 4) {
925 md5blks[i >> 2] = a[i] + (a[i + 1] << 8) + (a[i + 2] << 16) + (a[i + 3] << 24);
926 }
927 return md5blks;
928 }
929
930 function md51(s) {
931 var n = s.length,
932 state = [1732584193, -271733879, -1732584194, 271733878],
933 i,
934 length,
935 tail,
936 tmp,
937 lo,
938 hi;
939
940 for (i = 64; i <= n; i += 64) {
941 md5cycle(state, md5blk(s.substring(i - 64, i)));
942 }
943 s = s.substring(i - 64);
944 length = s.length;
945 tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
946 for (i = 0; i < length; i += 1) {
947 tail[i >> 2] |= s.charCodeAt(i) << ((i % 4) << 3);
948 }
949 tail[i >> 2] |= 0x80 << ((i % 4) << 3);
950 if (i > 55) {
951 md5cycle(state, tail);
952 for (i = 0; i < 16; i += 1) {
953 tail[i] = 0;
954 }
955 }
956
957 // Beware that the final length might not fit in 32 bits so we take care of that
958 tmp = n * 8;
959 tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
960 lo = parseInt(tmp[2], 16);
961 hi = parseInt(tmp[1], 16) || 0;
962
963 tail[14] = lo;
964 tail[15] = hi;
965
966 md5cycle(state, tail);
967 return state;
968 }
969
970 function md51_array(a) {
971 var n = a.length,
972 state = [1732584193, -271733879, -1732584194, 271733878],
973 i,
974 length,
975 tail,
976 tmp,
977 lo,
978 hi;
979
980 for (i = 64; i <= n; i += 64) {
981 md5cycle(state, md5blk_array(a.subarray(i - 64, i)));
982 }
983
984 // Not sure if it is a bug, however IE10 will always produce a sub array of length 1
985 // containing the last element of the parent array if the sub array specified starts
986 // beyond the length of the parent array - weird.
987 // https://connect.microsoft.com/IE/feedback/details/771452/typed-array-subarray-issue
988 a = (i - 64) < n ? a.subarray(i - 64) : new Uint8Array(0);
989
990 length = a.length;
991 tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
992 for (i = 0; i < length; i += 1) {
993 tail[i >> 2] |= a[i] << ((i % 4) << 3);
994 }
995
996 tail[i >> 2] |= 0x80 << ((i % 4) << 3);
997 if (i > 55) {
998 md5cycle(state, tail);
999 for (i = 0; i < 16; i += 1) {
1000 tail[i] = 0;
1001 }
1002 }
1003
1004 // Beware that the final length might not fit in 32 bits so we take care of that
1005 tmp = n * 8;
1006 tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
1007 lo = parseInt(tmp[2], 16);
1008 hi = parseInt(tmp[1], 16) || 0;
1009
1010 tail[14] = lo;
1011 tail[15] = hi;
1012
1013 md5cycle(state, tail);
1014
1015 return state;
1016 }
1017
1018 function rhex(n) {
1019 var s = '',
1020 j;
1021 for (j = 0; j < 4; j += 1) {
1022 s += hex_chr[(n >> (j * 8 + 4)) & 0x0F] + hex_chr[(n >> (j * 8)) & 0x0F];
1023 }
1024 return s;
1025 }
1026
1027 function hex(x) {
1028 var i;
1029 for (i = 0; i < x.length; i += 1) {
1030 x[i] = rhex(x[i]);
1031 }
1032 return x.join('');
1033 }
1034
1035 // In some cases the fast add32 function cannot be used..
1036 if (hex(md51('hello')) !== '5d41402abc4b2a76b9719d911017c592') {
1037 add32 = function (x, y) {
1038 var lsw = (x & 0xFFFF) + (y & 0xFFFF),
1039 msw = (x >> 16) + (y >> 16) + (lsw >> 16);
1040 return (msw << 16) | (lsw & 0xFFFF);
1041 };
1042 }
1043
1044 // ---------------------------------------------------
1045
1046 /**
1047 * ArrayBuffer slice polyfill.
1048 *
1049 * @see https://github.com/ttaubert/node-arraybuffer-slice
1050 */
1051
1052 if (typeof ArrayBuffer !== 'undefined' && !ArrayBuffer.prototype.slice) {
1053 (function () {
1054 function clamp(val, length) {
1055 val = (val | 0) || 0;
1056
1057 if (val < 0) {
1058 return Math.max(val + length, 0);
1059 }
1060
1061 return Math.min(val, length);
1062 }
1063
1064 ArrayBuffer.prototype.slice = function (from, to) {
1065 var length = this.byteLength,
1066 begin = clamp(from, length),
1067 end = length,
1068 num,
1069 target,
1070 targetArray,
1071 sourceArray;
1072
1073 if (to !== undefined) {
1074 end = clamp(to, length);
1075 }
1076
1077 if (begin > end) {
1078 return new ArrayBuffer(0);
1079 }
1080
1081 num = end - begin;
1082 target = new ArrayBuffer(num);
1083 targetArray = new Uint8Array(target);
1084
1085 sourceArray = new Uint8Array(this, begin, num);
1086 targetArray.set(sourceArray);
1087
1088 return target;
1089 };
1090 })();
1091 }
1092
1093 // ---------------------------------------------------
1094
1095 /**
1096 * Helpers.
1097 */
1098
1099 function toUtf8(str) {
1100 if (/[\u0080-\uFFFF]/.test(str)) {
1101 str = unescape(encodeURIComponent(str));
1102 }
1103
1104 return str;
1105 }
1106
1107 function utf8Str2ArrayBuffer(str, returnUInt8Array) {
1108 var length = str.length,
1109 buff = new ArrayBuffer(length),
1110 arr = new Uint8Array(buff),
1111 i;
1112
1113 for (i = 0; i < length; i += 1) {
1114 arr[i] = str.charCodeAt(i);
1115 }
1116
1117 return returnUInt8Array ? arr : buff;
1118 }
1119
1120 function arrayBuffer2Utf8Str(buff) {
1121 return String.fromCharCode.apply(null, new Uint8Array(buff));
1122 }
1123
1124 function concatenateArrayBuffers(first, second, returnUInt8Array) {
1125 var result = new Uint8Array(first.byteLength + second.byteLength);
1126
1127 result.set(new Uint8Array(first));
1128 result.set(new Uint8Array(second), first.byteLength);
1129
1130 return returnUInt8Array ? result : result.buffer;
1131 }
1132
1133 function hexToBinaryString(hex) {
1134 var bytes = [],
1135 length = hex.length,
1136 x;
1137
1138 for (x = 0; x < length - 1; x += 2) {
1139 bytes.push(parseInt(hex.substr(x, 2), 16));
1140 }
1141
1142 return String.fromCharCode.apply(String, bytes);
1143 }
1144
1145 // ---------------------------------------------------
1146
1147 /**
1148 * SparkMD5 OOP implementation.
1149 *
1150 * Use this class to perform an incremental md5, otherwise use the
1151 * static methods instead.
1152 */
1153
1154 function SparkMD5() {
1155 // call reset to init the instance
1156 this.reset();
1157 }
1158
1159 /**
1160 * Appends a string.
1161 * A conversion will be applied if an utf8 string is detected.
1162 *
1163 * @param {String} str The string to be appended
1164 *
1165 * @return {SparkMD5} The instance itself
1166 */
1167 SparkMD5.prototype.append = function (str) {
1168 // Converts the string to utf8 bytes if necessary
1169 // Then append as binary
1170 this.appendBinary(toUtf8(str));
1171
1172 return this;
1173 };
1174
1175 /**
1176 * Appends a binary string.
1177 *
1178 * @param {String} contents The binary string to be appended
1179 *
1180 * @return {SparkMD5} The instance itself
1181 */
1182 SparkMD5.prototype.appendBinary = function (contents) {
1183 this._buff += contents;
1184 this._length += contents.length;
1185
1186 var length = this._buff.length,
1187 i;
1188
1189 for (i = 64; i <= length; i += 64) {
1190 md5cycle(this._hash, md5blk(this._buff.substring(i - 64, i)));
1191 }
1192
1193 this._buff = this._buff.substring(i - 64);
1194
1195 return this;
1196 };
1197
1198 /**
1199 * Finishes the incremental computation, reseting the internal state and
1200 * returning the result.
1201 *
1202 * @param {Boolean} raw True to get the raw string, false to get the hex string
1203 *
1204 * @return {String} The result
1205 */
1206 SparkMD5.prototype.end = function (raw) {
1207 var buff = this._buff,
1208 length = buff.length,
1209 i,
1210 tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
1211 ret;
1212
1213 for (i = 0; i < length; i += 1) {
1214 tail[i >> 2] |= buff.charCodeAt(i) << ((i % 4) << 3);
1215 }
1216
1217 this._finish(tail, length);
1218 ret = hex(this._hash);
1219
1220 if (raw) {
1221 ret = hexToBinaryString(ret);
1222 }
1223
1224 this.reset();
1225
1226 return ret;
1227 };
1228
1229 /**
1230 * Resets the internal state of the computation.
1231 *
1232 * @return {SparkMD5} The instance itself
1233 */
1234 SparkMD5.prototype.reset = function () {
1235 this._buff = '';
1236 this._length = 0;
1237 this._hash = [1732584193, -271733879, -1732584194, 271733878];
1238
1239 return this;
1240 };
1241
1242 /**
1243 * Gets the internal state of the computation.
1244 *
1245 * @return {Object} The state
1246 */
1247 SparkMD5.prototype.getState = function () {
1248 return {
1249 buff: this._buff,
1250 length: this._length,
1251 hash: this._hash.slice()
1252 };
1253 };
1254
1255 /**
1256 * Gets the internal state of the computation.
1257 *
1258 * @param {Object} state The state
1259 *
1260 * @return {SparkMD5} The instance itself
1261 */
1262 SparkMD5.prototype.setState = function (state) {
1263 this._buff = state.buff;
1264 this._length = state.length;
1265 this._hash = state.hash;
1266
1267 return this;
1268 };
1269
1270 /**
1271 * Releases memory used by the incremental buffer and other additional
1272 * resources. If you plan to use the instance again, use reset instead.
1273 */
1274 SparkMD5.prototype.destroy = function () {
1275 delete this._hash;
1276 delete this._buff;
1277 delete this._length;
1278 };
1279
1280 /**
1281 * Finish the final calculation based on the tail.
1282 *
1283 * @param {Array} tail The tail (will be modified)
1284 * @param {Number} length The length of the remaining buffer
1285 */
1286 SparkMD5.prototype._finish = function (tail, length) {
1287 var i = length,
1288 tmp,
1289 lo,
1290 hi;
1291
1292 tail[i >> 2] |= 0x80 << ((i % 4) << 3);
1293 if (i > 55) {
1294 md5cycle(this._hash, tail);
1295 for (i = 0; i < 16; i += 1) {
1296 tail[i] = 0;
1297 }
1298 }
1299
1300 // Do the final computation based on the tail and length
1301 // Beware that the final length may not fit in 32 bits so we take care of that
1302 tmp = this._length * 8;
1303 tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
1304 lo = parseInt(tmp[2], 16);
1305 hi = parseInt(tmp[1], 16) || 0;
1306
1307 tail[14] = lo;
1308 tail[15] = hi;
1309 md5cycle(this._hash, tail);
1310 };
1311
1312 /**
1313 * Performs the md5 hash on a string.
1314 * A conversion will be applied if utf8 string is detected.
1315 *
1316 * @param {String} str The string
1317 * @param {Boolean} [raw] True to get the raw string, false to get the hex string
1318 *
1319 * @return {String} The result
1320 */
1321 SparkMD5.hash = function (str, raw) {
1322 // Converts the string to utf8 bytes if necessary
1323 // Then compute it using the binary function
1324 return SparkMD5.hashBinary(toUtf8(str), raw);
1325 };
1326
1327 /**
1328 * Performs the md5 hash on a binary string.
1329 *
1330 * @param {String} content The binary string
1331 * @param {Boolean} [raw] True to get the raw string, false to get the hex string
1332 *
1333 * @return {String} The result
1334 */
1335 SparkMD5.hashBinary = function (content, raw) {
1336 var hash = md51(content),
1337 ret = hex(hash);
1338
1339 return raw ? hexToBinaryString(ret) : ret;
1340 };
1341
1342 // ---------------------------------------------------
1343
1344 /**
1345 * SparkMD5 OOP implementation for array buffers.
1346 *
1347 * Use this class to perform an incremental md5 ONLY for array buffers.
1348 */
1349 SparkMD5.ArrayBuffer = function () {
1350 // call reset to init the instance
1351 this.reset();
1352 };
1353
1354 /**
1355 * Appends an array buffer.
1356 *
1357 * @param {ArrayBuffer} arr The array to be appended
1358 *
1359 * @return {SparkMD5.ArrayBuffer} The instance itself
1360 */
1361 SparkMD5.ArrayBuffer.prototype.append = function (arr) {
1362 var buff = concatenateArrayBuffers(this._buff.buffer, arr, true),
1363 length = buff.length,
1364 i;
1365
1366 this._length += arr.byteLength;
1367
1368 for (i = 64; i <= length; i += 64) {
1369 md5cycle(this._hash, md5blk_array(buff.subarray(i - 64, i)));
1370 }
1371
1372 this._buff = (i - 64) < length ? new Uint8Array(buff.buffer.slice(i - 64)) : new Uint8Array(0);
1373
1374 return this;
1375 };
1376
1377 /**
1378 * Finishes the incremental computation, reseting the internal state and
1379 * returning the result.
1380 *
1381 * @param {Boolean} raw True to get the raw string, false to get the hex string
1382 *
1383 * @return {String} The result
1384 */
1385 SparkMD5.ArrayBuffer.prototype.end = function (raw) {
1386 var buff = this._buff,
1387 length = buff.length,
1388 tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
1389 i,
1390 ret;
1391
1392 for (i = 0; i < length; i += 1) {
1393 tail[i >> 2] |= buff[i] << ((i % 4) << 3);
1394 }
1395
1396 this._finish(tail, length);
1397 ret = hex(this._hash);
1398
1399 if (raw) {
1400 ret = hexToBinaryString(ret);
1401 }
1402
1403 this.reset();
1404
1405 return ret;
1406 };
1407
1408 /**
1409 * Resets the internal state of the computation.
1410 *
1411 * @return {SparkMD5.ArrayBuffer} The instance itself
1412 */
1413 SparkMD5.ArrayBuffer.prototype.reset = function () {
1414 this._buff = new Uint8Array(0);
1415 this._length = 0;
1416 this._hash = [1732584193, -271733879, -1732584194, 271733878];
1417
1418 return this;
1419 };
1420
1421 /**
1422 * Gets the internal state of the computation.
1423 *
1424 * @return {Object} The state
1425 */
1426 SparkMD5.ArrayBuffer.prototype.getState = function () {
1427 var state = SparkMD5.prototype.getState.call(this);
1428
1429 // Convert buffer to a string
1430 state.buff = arrayBuffer2Utf8Str(state.buff);
1431
1432 return state;
1433 };
1434
1435 /**
1436 * Gets the internal state of the computation.
1437 *
1438 * @param {Object} state The state
1439 *
1440 * @return {SparkMD5.ArrayBuffer} The instance itself
1441 */
1442 SparkMD5.ArrayBuffer.prototype.setState = function (state) {
1443 // Convert string to buffer
1444 state.buff = utf8Str2ArrayBuffer(state.buff, true);
1445
1446 return SparkMD5.prototype.setState.call(this, state);
1447 };
1448
1449 SparkMD5.ArrayBuffer.prototype.destroy = SparkMD5.prototype.destroy;
1450
1451 SparkMD5.ArrayBuffer.prototype._finish = SparkMD5.prototype._finish;
1452
1453 /**
1454 * Performs the md5 hash on an array buffer.
1455 *
1456 * @param {ArrayBuffer} arr The array buffer
1457 * @param {Boolean} [raw] True to get the raw string, false to get the hex one
1458 *
1459 * @return {String} The result
1460 */
1461 SparkMD5.ArrayBuffer.hash = function (arr, raw) {
1462 var hash = md51_array(new Uint8Array(arr)),
1463 ret = hex(hash);
1464
1465 return raw ? hexToBinaryString(ret) : ret;
1466 };
1467
1468 return SparkMD5;
1469}));
1470
1471},{}],4:[function(_dereq_,module,exports){
1472"use strict";
1473
1474Object.defineProperty(exports, "__esModule", {
1475 value: true
1476});
1477Object.defineProperty(exports, "v1", {
1478 enumerable: true,
1479 get: function () {
1480 return _v.default;
1481 }
1482});
1483Object.defineProperty(exports, "v3", {
1484 enumerable: true,
1485 get: function () {
1486 return _v2.default;
1487 }
1488});
1489Object.defineProperty(exports, "v4", {
1490 enumerable: true,
1491 get: function () {
1492 return _v3.default;
1493 }
1494});
1495Object.defineProperty(exports, "v5", {
1496 enumerable: true,
1497 get: function () {
1498 return _v4.default;
1499 }
1500});
1501Object.defineProperty(exports, "NIL", {
1502 enumerable: true,
1503 get: function () {
1504 return _nil.default;
1505 }
1506});
1507Object.defineProperty(exports, "version", {
1508 enumerable: true,
1509 get: function () {
1510 return _version.default;
1511 }
1512});
1513Object.defineProperty(exports, "validate", {
1514 enumerable: true,
1515 get: function () {
1516 return _validate.default;
1517 }
1518});
1519Object.defineProperty(exports, "stringify", {
1520 enumerable: true,
1521 get: function () {
1522 return _stringify.default;
1523 }
1524});
1525Object.defineProperty(exports, "parse", {
1526 enumerable: true,
1527 get: function () {
1528 return _parse.default;
1529 }
1530});
1531
1532var _v = _interopRequireDefault(_dereq_("./v1.js"));
1533
1534var _v2 = _interopRequireDefault(_dereq_("./v3.js"));
1535
1536var _v3 = _interopRequireDefault(_dereq_("./v4.js"));
1537
1538var _v4 = _interopRequireDefault(_dereq_("./v5.js"));
1539
1540var _nil = _interopRequireDefault(_dereq_("./nil.js"));
1541
1542var _version = _interopRequireDefault(_dereq_("./version.js"));
1543
1544var _validate = _interopRequireDefault(_dereq_("./validate.js"));
1545
1546var _stringify = _interopRequireDefault(_dereq_("./stringify.js"));
1547
1548var _parse = _interopRequireDefault(_dereq_("./parse.js"));
1549
1550function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
1551},{"./nil.js":6,"./parse.js":7,"./stringify.js":11,"./v1.js":12,"./v3.js":13,"./v4.js":15,"./v5.js":16,"./validate.js":17,"./version.js":18}],5:[function(_dereq_,module,exports){
1552"use strict";
1553
1554Object.defineProperty(exports, "__esModule", {
1555 value: true
1556});
1557exports.default = void 0;
1558
1559/*
1560 * Browser-compatible JavaScript MD5
1561 *
1562 * Modification of JavaScript MD5
1563 * https://github.com/blueimp/JavaScript-MD5
1564 *
1565 * Copyright 2011, Sebastian Tschan
1566 * https://blueimp.net
1567 *
1568 * Licensed under the MIT license:
1569 * https://opensource.org/licenses/MIT
1570 *
1571 * Based on
1572 * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message
1573 * Digest Algorithm, as defined in RFC 1321.
1574 * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009
1575 * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
1576 * Distributed under the BSD License
1577 * See http://pajhome.org.uk/crypt/md5 for more info.
1578 */
1579function md5(bytes) {
1580 if (typeof bytes === 'string') {
1581 const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
1582
1583 bytes = new Uint8Array(msg.length);
1584
1585 for (let i = 0; i < msg.length; ++i) {
1586 bytes[i] = msg.charCodeAt(i);
1587 }
1588 }
1589
1590 return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8));
1591}
1592/*
1593 * Convert an array of little-endian words to an array of bytes
1594 */
1595
1596
1597function md5ToHexEncodedArray(input) {
1598 const output = [];
1599 const length32 = input.length * 32;
1600 const hexTab = '0123456789abcdef';
1601
1602 for (let i = 0; i < length32; i += 8) {
1603 const x = input[i >> 5] >>> i % 32 & 0xff;
1604 const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16);
1605 output.push(hex);
1606 }
1607
1608 return output;
1609}
1610/**
1611 * Calculate output length with padding and bit length
1612 */
1613
1614
1615function getOutputLength(inputLength8) {
1616 return (inputLength8 + 64 >>> 9 << 4) + 14 + 1;
1617}
1618/*
1619 * Calculate the MD5 of an array of little-endian words, and a bit length.
1620 */
1621
1622
1623function wordsToMd5(x, len) {
1624 /* append padding */
1625 x[len >> 5] |= 0x80 << len % 32;
1626 x[getOutputLength(len) - 1] = len;
1627 let a = 1732584193;
1628 let b = -271733879;
1629 let c = -1732584194;
1630 let d = 271733878;
1631
1632 for (let i = 0; i < x.length; i += 16) {
1633 const olda = a;
1634 const oldb = b;
1635 const oldc = c;
1636 const oldd = d;
1637 a = md5ff(a, b, c, d, x[i], 7, -680876936);
1638 d = md5ff(d, a, b, c, x[i + 1], 12, -389564586);
1639 c = md5ff(c, d, a, b, x[i + 2], 17, 606105819);
1640 b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330);
1641 a = md5ff(a, b, c, d, x[i + 4], 7, -176418897);
1642 d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426);
1643 c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341);
1644 b = md5ff(b, c, d, a, x[i + 7], 22, -45705983);
1645 a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416);
1646 d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417);
1647 c = md5ff(c, d, a, b, x[i + 10], 17, -42063);
1648 b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162);
1649 a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682);
1650 d = md5ff(d, a, b, c, x[i + 13], 12, -40341101);
1651 c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290);
1652 b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329);
1653 a = md5gg(a, b, c, d, x[i + 1], 5, -165796510);
1654 d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632);
1655 c = md5gg(c, d, a, b, x[i + 11], 14, 643717713);
1656 b = md5gg(b, c, d, a, x[i], 20, -373897302);
1657 a = md5gg(a, b, c, d, x[i + 5], 5, -701558691);
1658 d = md5gg(d, a, b, c, x[i + 10], 9, 38016083);
1659 c = md5gg(c, d, a, b, x[i + 15], 14, -660478335);
1660 b = md5gg(b, c, d, a, x[i + 4], 20, -405537848);
1661 a = md5gg(a, b, c, d, x[i + 9], 5, 568446438);
1662 d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690);
1663 c = md5gg(c, d, a, b, x[i + 3], 14, -187363961);
1664 b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501);
1665 a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467);
1666 d = md5gg(d, a, b, c, x[i + 2], 9, -51403784);
1667 c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473);
1668 b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734);
1669 a = md5hh(a, b, c, d, x[i + 5], 4, -378558);
1670 d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463);
1671 c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562);
1672 b = md5hh(b, c, d, a, x[i + 14], 23, -35309556);
1673 a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060);
1674 d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353);
1675 c = md5hh(c, d, a, b, x[i + 7], 16, -155497632);
1676 b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640);
1677 a = md5hh(a, b, c, d, x[i + 13], 4, 681279174);
1678 d = md5hh(d, a, b, c, x[i], 11, -358537222);
1679 c = md5hh(c, d, a, b, x[i + 3], 16, -722521979);
1680 b = md5hh(b, c, d, a, x[i + 6], 23, 76029189);
1681 a = md5hh(a, b, c, d, x[i + 9], 4, -640364487);
1682 d = md5hh(d, a, b, c, x[i + 12], 11, -421815835);
1683 c = md5hh(c, d, a, b, x[i + 15], 16, 530742520);
1684 b = md5hh(b, c, d, a, x[i + 2], 23, -995338651);
1685 a = md5ii(a, b, c, d, x[i], 6, -198630844);
1686 d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415);
1687 c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905);
1688 b = md5ii(b, c, d, a, x[i + 5], 21, -57434055);
1689 a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571);
1690 d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606);
1691 c = md5ii(c, d, a, b, x[i + 10], 15, -1051523);
1692 b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799);
1693 a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359);
1694 d = md5ii(d, a, b, c, x[i + 15], 10, -30611744);
1695 c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380);
1696 b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649);
1697 a = md5ii(a, b, c, d, x[i + 4], 6, -145523070);
1698 d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379);
1699 c = md5ii(c, d, a, b, x[i + 2], 15, 718787259);
1700 b = md5ii(b, c, d, a, x[i + 9], 21, -343485551);
1701 a = safeAdd(a, olda);
1702 b = safeAdd(b, oldb);
1703 c = safeAdd(c, oldc);
1704 d = safeAdd(d, oldd);
1705 }
1706
1707 return [a, b, c, d];
1708}
1709/*
1710 * Convert an array bytes to an array of little-endian words
1711 * Characters >255 have their high-byte silently ignored.
1712 */
1713
1714
1715function bytesToWords(input) {
1716 if (input.length === 0) {
1717 return [];
1718 }
1719
1720 const length8 = input.length * 8;
1721 const output = new Uint32Array(getOutputLength(length8));
1722
1723 for (let i = 0; i < length8; i += 8) {
1724 output[i >> 5] |= (input[i / 8] & 0xff) << i % 32;
1725 }
1726
1727 return output;
1728}
1729/*
1730 * Add integers, wrapping at 2^32. This uses 16-bit operations internally
1731 * to work around bugs in some JS interpreters.
1732 */
1733
1734
1735function safeAdd(x, y) {
1736 const lsw = (x & 0xffff) + (y & 0xffff);
1737 const msw = (x >> 16) + (y >> 16) + (lsw >> 16);
1738 return msw << 16 | lsw & 0xffff;
1739}
1740/*
1741 * Bitwise rotate a 32-bit number to the left.
1742 */
1743
1744
1745function bitRotateLeft(num, cnt) {
1746 return num << cnt | num >>> 32 - cnt;
1747}
1748/*
1749 * These functions implement the four basic operations the algorithm uses.
1750 */
1751
1752
1753function md5cmn(q, a, b, x, s, t) {
1754 return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b);
1755}
1756
1757function md5ff(a, b, c, d, x, s, t) {
1758 return md5cmn(b & c | ~b & d, a, b, x, s, t);
1759}
1760
1761function md5gg(a, b, c, d, x, s, t) {
1762 return md5cmn(b & d | c & ~d, a, b, x, s, t);
1763}
1764
1765function md5hh(a, b, c, d, x, s, t) {
1766 return md5cmn(b ^ c ^ d, a, b, x, s, t);
1767}
1768
1769function md5ii(a, b, c, d, x, s, t) {
1770 return md5cmn(c ^ (b | ~d), a, b, x, s, t);
1771}
1772
1773var _default = md5;
1774exports.default = _default;
1775},{}],6:[function(_dereq_,module,exports){
1776"use strict";
1777
1778Object.defineProperty(exports, "__esModule", {
1779 value: true
1780});
1781exports.default = void 0;
1782var _default = '00000000-0000-0000-0000-000000000000';
1783exports.default = _default;
1784},{}],7:[function(_dereq_,module,exports){
1785"use strict";
1786
1787Object.defineProperty(exports, "__esModule", {
1788 value: true
1789});
1790exports.default = void 0;
1791
1792var _validate = _interopRequireDefault(_dereq_("./validate.js"));
1793
1794function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
1795
1796function parse(uuid) {
1797 if (!(0, _validate.default)(uuid)) {
1798 throw TypeError('Invalid UUID');
1799 }
1800
1801 let v;
1802 const arr = new Uint8Array(16); // Parse ########-....-....-....-............
1803
1804 arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;
1805 arr[1] = v >>> 16 & 0xff;
1806 arr[2] = v >>> 8 & 0xff;
1807 arr[3] = v & 0xff; // Parse ........-####-....-....-............
1808
1809 arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;
1810 arr[5] = v & 0xff; // Parse ........-....-####-....-............
1811
1812 arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;
1813 arr[7] = v & 0xff; // Parse ........-....-....-####-............
1814
1815 arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;
1816 arr[9] = v & 0xff; // Parse ........-....-....-....-############
1817 // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
1818
1819 arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;
1820 arr[11] = v / 0x100000000 & 0xff;
1821 arr[12] = v >>> 24 & 0xff;
1822 arr[13] = v >>> 16 & 0xff;
1823 arr[14] = v >>> 8 & 0xff;
1824 arr[15] = v & 0xff;
1825 return arr;
1826}
1827
1828var _default = parse;
1829exports.default = _default;
1830},{"./validate.js":17}],8:[function(_dereq_,module,exports){
1831"use strict";
1832
1833Object.defineProperty(exports, "__esModule", {
1834 value: true
1835});
1836exports.default = void 0;
1837var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;
1838exports.default = _default;
1839},{}],9:[function(_dereq_,module,exports){
1840"use strict";
1841
1842Object.defineProperty(exports, "__esModule", {
1843 value: true
1844});
1845exports.default = rng;
1846// Unique ID creation requires a high quality random # generator. In the browser we therefore
1847// require the crypto API and do not support built-in fallback to lower quality random number
1848// generators (like Math.random()).
1849let getRandomValues;
1850const rnds8 = new Uint8Array(16);
1851
1852function rng() {
1853 // lazy load so that environments that need to polyfill have a chance to do so
1854 if (!getRandomValues) {
1855 // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also,
1856 // find the complete implementation of crypto (msCrypto) on IE11.
1857 getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto);
1858
1859 if (!getRandomValues) {
1860 throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported');
1861 }
1862 }
1863
1864 return getRandomValues(rnds8);
1865}
1866},{}],10:[function(_dereq_,module,exports){
1867"use strict";
1868
1869Object.defineProperty(exports, "__esModule", {
1870 value: true
1871});
1872exports.default = void 0;
1873
1874// Adapted from Chris Veness' SHA1 code at
1875// http://www.movable-type.co.uk/scripts/sha1.html
1876function f(s, x, y, z) {
1877 switch (s) {
1878 case 0:
1879 return x & y ^ ~x & z;
1880
1881 case 1:
1882 return x ^ y ^ z;
1883
1884 case 2:
1885 return x & y ^ x & z ^ y & z;
1886
1887 case 3:
1888 return x ^ y ^ z;
1889 }
1890}
1891
1892function ROTL(x, n) {
1893 return x << n | x >>> 32 - n;
1894}
1895
1896function sha1(bytes) {
1897 const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6];
1898 const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0];
1899
1900 if (typeof bytes === 'string') {
1901 const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
1902
1903 bytes = [];
1904
1905 for (let i = 0; i < msg.length; ++i) {
1906 bytes.push(msg.charCodeAt(i));
1907 }
1908 } else if (!Array.isArray(bytes)) {
1909 // Convert Array-like to Array
1910 bytes = Array.prototype.slice.call(bytes);
1911 }
1912
1913 bytes.push(0x80);
1914 const l = bytes.length / 4 + 2;
1915 const N = Math.ceil(l / 16);
1916 const M = new Array(N);
1917
1918 for (let i = 0; i < N; ++i) {
1919 const arr = new Uint32Array(16);
1920
1921 for (let j = 0; j < 16; ++j) {
1922 arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3];
1923 }
1924
1925 M[i] = arr;
1926 }
1927
1928 M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32);
1929 M[N - 1][14] = Math.floor(M[N - 1][14]);
1930 M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff;
1931
1932 for (let i = 0; i < N; ++i) {
1933 const W = new Uint32Array(80);
1934
1935 for (let t = 0; t < 16; ++t) {
1936 W[t] = M[i][t];
1937 }
1938
1939 for (let t = 16; t < 80; ++t) {
1940 W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1);
1941 }
1942
1943 let a = H[0];
1944 let b = H[1];
1945 let c = H[2];
1946 let d = H[3];
1947 let e = H[4];
1948
1949 for (let t = 0; t < 80; ++t) {
1950 const s = Math.floor(t / 20);
1951 const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0;
1952 e = d;
1953 d = c;
1954 c = ROTL(b, 30) >>> 0;
1955 b = a;
1956 a = T;
1957 }
1958
1959 H[0] = H[0] + a >>> 0;
1960 H[1] = H[1] + b >>> 0;
1961 H[2] = H[2] + c >>> 0;
1962 H[3] = H[3] + d >>> 0;
1963 H[4] = H[4] + e >>> 0;
1964 }
1965
1966 return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff];
1967}
1968
1969var _default = sha1;
1970exports.default = _default;
1971},{}],11:[function(_dereq_,module,exports){
1972"use strict";
1973
1974Object.defineProperty(exports, "__esModule", {
1975 value: true
1976});
1977exports.default = void 0;
1978
1979var _validate = _interopRequireDefault(_dereq_("./validate.js"));
1980
1981function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
1982
1983/**
1984 * Convert array of 16 byte values to UUID string format of the form:
1985 * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
1986 */
1987const byteToHex = [];
1988
1989for (let i = 0; i < 256; ++i) {
1990 byteToHex.push((i + 0x100).toString(16).substr(1));
1991}
1992
1993function stringify(arr, offset = 0) {
1994 // Note: Be careful editing this code! It's been tuned for performance
1995 // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
1996 const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one
1997 // of the following:
1998 // - One or more input array values don't map to a hex octet (leading to
1999 // "undefined" in the uuid)
2000 // - Invalid input values for the RFC `version` or `variant` fields
2001
2002 if (!(0, _validate.default)(uuid)) {
2003 throw TypeError('Stringified UUID is invalid');
2004 }
2005
2006 return uuid;
2007}
2008
2009var _default = stringify;
2010exports.default = _default;
2011},{"./validate.js":17}],12:[function(_dereq_,module,exports){
2012"use strict";
2013
2014Object.defineProperty(exports, "__esModule", {
2015 value: true
2016});
2017exports.default = void 0;
2018
2019var _rng = _interopRequireDefault(_dereq_("./rng.js"));
2020
2021var _stringify = _interopRequireDefault(_dereq_("./stringify.js"));
2022
2023function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2024
2025// **`v1()` - Generate time-based UUID**
2026//
2027// Inspired by https://github.com/LiosK/UUID.js
2028// and http://docs.python.org/library/uuid.html
2029let _nodeId;
2030
2031let _clockseq; // Previous uuid creation time
2032
2033
2034let _lastMSecs = 0;
2035let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details
2036
2037function v1(options, buf, offset) {
2038 let i = buf && offset || 0;
2039 const b = buf || new Array(16);
2040 options = options || {};
2041 let node = options.node || _nodeId;
2042 let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not
2043 // specified. We do this lazily to minimize issues related to insufficient
2044 // system entropy. See #189
2045
2046 if (node == null || clockseq == null) {
2047 const seedBytes = options.random || (options.rng || _rng.default)();
2048
2049 if (node == null) {
2050 // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
2051 node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];
2052 }
2053
2054 if (clockseq == null) {
2055 // Per 4.2.2, randomize (14 bit) clockseq
2056 clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
2057 }
2058 } // UUID timestamps are 100 nano-second units since the Gregorian epoch,
2059 // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
2060 // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
2061 // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
2062
2063
2064 let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock
2065 // cycle to simulate higher resolution clock
2066
2067 let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)
2068
2069 const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression
2070
2071 if (dt < 0 && options.clockseq === undefined) {
2072 clockseq = clockseq + 1 & 0x3fff;
2073 } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
2074 // time interval
2075
2076
2077 if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
2078 nsecs = 0;
2079 } // Per 4.2.1.2 Throw error if too many uuids are requested
2080
2081
2082 if (nsecs >= 10000) {
2083 throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");
2084 }
2085
2086 _lastMSecs = msecs;
2087 _lastNSecs = nsecs;
2088 _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
2089
2090 msecs += 12219292800000; // `time_low`
2091
2092 const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
2093 b[i++] = tl >>> 24 & 0xff;
2094 b[i++] = tl >>> 16 & 0xff;
2095 b[i++] = tl >>> 8 & 0xff;
2096 b[i++] = tl & 0xff; // `time_mid`
2097
2098 const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;
2099 b[i++] = tmh >>> 8 & 0xff;
2100 b[i++] = tmh & 0xff; // `time_high_and_version`
2101
2102 b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
2103
2104 b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
2105
2106 b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`
2107
2108 b[i++] = clockseq & 0xff; // `node`
2109
2110 for (let n = 0; n < 6; ++n) {
2111 b[i + n] = node[n];
2112 }
2113
2114 return buf || (0, _stringify.default)(b);
2115}
2116
2117var _default = v1;
2118exports.default = _default;
2119},{"./rng.js":9,"./stringify.js":11}],13:[function(_dereq_,module,exports){
2120"use strict";
2121
2122Object.defineProperty(exports, "__esModule", {
2123 value: true
2124});
2125exports.default = void 0;
2126
2127var _v = _interopRequireDefault(_dereq_("./v35.js"));
2128
2129var _md = _interopRequireDefault(_dereq_("./md5.js"));
2130
2131function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2132
2133const v3 = (0, _v.default)('v3', 0x30, _md.default);
2134var _default = v3;
2135exports.default = _default;
2136},{"./md5.js":5,"./v35.js":14}],14:[function(_dereq_,module,exports){
2137"use strict";
2138
2139Object.defineProperty(exports, "__esModule", {
2140 value: true
2141});
2142exports.default = _default;
2143exports.URL = exports.DNS = void 0;
2144
2145var _stringify = _interopRequireDefault(_dereq_("./stringify.js"));
2146
2147var _parse = _interopRequireDefault(_dereq_("./parse.js"));
2148
2149function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2150
2151function stringToBytes(str) {
2152 str = unescape(encodeURIComponent(str)); // UTF8 escape
2153
2154 const bytes = [];
2155
2156 for (let i = 0; i < str.length; ++i) {
2157 bytes.push(str.charCodeAt(i));
2158 }
2159
2160 return bytes;
2161}
2162
2163const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
2164exports.DNS = DNS;
2165const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';
2166exports.URL = URL;
2167
2168function _default(name, version, hashfunc) {
2169 function generateUUID(value, namespace, buf, offset) {
2170 if (typeof value === 'string') {
2171 value = stringToBytes(value);
2172 }
2173
2174 if (typeof namespace === 'string') {
2175 namespace = (0, _parse.default)(namespace);
2176 }
2177
2178 if (namespace.length !== 16) {
2179 throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');
2180 } // Compute hash of namespace and value, Per 4.3
2181 // Future: Use spread syntax when supported on all platforms, e.g. `bytes =
2182 // hashfunc([...namespace, ... value])`
2183
2184
2185 let bytes = new Uint8Array(16 + value.length);
2186 bytes.set(namespace);
2187 bytes.set(value, namespace.length);
2188 bytes = hashfunc(bytes);
2189 bytes[6] = bytes[6] & 0x0f | version;
2190 bytes[8] = bytes[8] & 0x3f | 0x80;
2191
2192 if (buf) {
2193 offset = offset || 0;
2194
2195 for (let i = 0; i < 16; ++i) {
2196 buf[offset + i] = bytes[i];
2197 }
2198
2199 return buf;
2200 }
2201
2202 return (0, _stringify.default)(bytes);
2203 } // Function#name is not settable on some platforms (#270)
2204
2205
2206 try {
2207 generateUUID.name = name; // eslint-disable-next-line no-empty
2208 } catch (err) {} // For CommonJS default export support
2209
2210
2211 generateUUID.DNS = DNS;
2212 generateUUID.URL = URL;
2213 return generateUUID;
2214}
2215},{"./parse.js":7,"./stringify.js":11}],15:[function(_dereq_,module,exports){
2216"use strict";
2217
2218Object.defineProperty(exports, "__esModule", {
2219 value: true
2220});
2221exports.default = void 0;
2222
2223var _rng = _interopRequireDefault(_dereq_("./rng.js"));
2224
2225var _stringify = _interopRequireDefault(_dereq_("./stringify.js"));
2226
2227function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2228
2229function v4(options, buf, offset) {
2230 options = options || {};
2231
2232 const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
2233
2234
2235 rnds[6] = rnds[6] & 0x0f | 0x40;
2236 rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
2237
2238 if (buf) {
2239 offset = offset || 0;
2240
2241 for (let i = 0; i < 16; ++i) {
2242 buf[offset + i] = rnds[i];
2243 }
2244
2245 return buf;
2246 }
2247
2248 return (0, _stringify.default)(rnds);
2249}
2250
2251var _default = v4;
2252exports.default = _default;
2253},{"./rng.js":9,"./stringify.js":11}],16:[function(_dereq_,module,exports){
2254"use strict";
2255
2256Object.defineProperty(exports, "__esModule", {
2257 value: true
2258});
2259exports.default = void 0;
2260
2261var _v = _interopRequireDefault(_dereq_("./v35.js"));
2262
2263var _sha = _interopRequireDefault(_dereq_("./sha1.js"));
2264
2265function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2266
2267const v5 = (0, _v.default)('v5', 0x50, _sha.default);
2268var _default = v5;
2269exports.default = _default;
2270},{"./sha1.js":10,"./v35.js":14}],17:[function(_dereq_,module,exports){
2271"use strict";
2272
2273Object.defineProperty(exports, "__esModule", {
2274 value: true
2275});
2276exports.default = void 0;
2277
2278var _regex = _interopRequireDefault(_dereq_("./regex.js"));
2279
2280function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2281
2282function validate(uuid) {
2283 return typeof uuid === 'string' && _regex.default.test(uuid);
2284}
2285
2286var _default = validate;
2287exports.default = _default;
2288},{"./regex.js":8}],18:[function(_dereq_,module,exports){
2289"use strict";
2290
2291Object.defineProperty(exports, "__esModule", {
2292 value: true
2293});
2294exports.default = void 0;
2295
2296var _validate = _interopRequireDefault(_dereq_("./validate.js"));
2297
2298function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2299
2300function version(uuid) {
2301 if (!(0, _validate.default)(uuid)) {
2302 throw TypeError('Invalid UUID');
2303 }
2304
2305 return parseInt(uuid.substr(14, 1), 16);
2306}
2307
2308var _default = version;
2309exports.default = _default;
2310},{"./validate.js":17}],19:[function(_dereq_,module,exports){
2311'use strict';
2312
2313/**
2314 * Stringify/parse functions that don't operate
2315 * recursively, so they avoid call stack exceeded
2316 * errors.
2317 */
2318exports.stringify = function stringify(input) {
2319 var queue = [];
2320 queue.push({obj: input});
2321
2322 var res = '';
2323 var next, obj, prefix, val, i, arrayPrefix, keys, k, key, value, objPrefix;
2324 while ((next = queue.pop())) {
2325 obj = next.obj;
2326 prefix = next.prefix || '';
2327 val = next.val || '';
2328 res += prefix;
2329 if (val) {
2330 res += val;
2331 } else if (typeof obj !== 'object') {
2332 res += typeof obj === 'undefined' ? null : JSON.stringify(obj);
2333 } else if (obj === null) {
2334 res += 'null';
2335 } else if (Array.isArray(obj)) {
2336 queue.push({val: ']'});
2337 for (i = obj.length - 1; i >= 0; i--) {
2338 arrayPrefix = i === 0 ? '' : ',';
2339 queue.push({obj: obj[i], prefix: arrayPrefix});
2340 }
2341 queue.push({val: '['});
2342 } else { // object
2343 keys = [];
2344 for (k in obj) {
2345 if (obj.hasOwnProperty(k)) {
2346 keys.push(k);
2347 }
2348 }
2349 queue.push({val: '}'});
2350 for (i = keys.length - 1; i >= 0; i--) {
2351 key = keys[i];
2352 value = obj[key];
2353 objPrefix = (i > 0 ? ',' : '');
2354 objPrefix += JSON.stringify(key) + ':';
2355 queue.push({obj: value, prefix: objPrefix});
2356 }
2357 queue.push({val: '{'});
2358 }
2359 }
2360 return res;
2361};
2362
2363// Convenience function for the parse function.
2364// This pop function is basically copied from
2365// pouchCollate.parseIndexableString
2366function pop(obj, stack, metaStack) {
2367 var lastMetaElement = metaStack[metaStack.length - 1];
2368 if (obj === lastMetaElement.element) {
2369 // popping a meta-element, e.g. an object whose value is another object
2370 metaStack.pop();
2371 lastMetaElement = metaStack[metaStack.length - 1];
2372 }
2373 var element = lastMetaElement.element;
2374 var lastElementIndex = lastMetaElement.index;
2375 if (Array.isArray(element)) {
2376 element.push(obj);
2377 } else if (lastElementIndex === stack.length - 2) { // obj with key+value
2378 var key = stack.pop();
2379 element[key] = obj;
2380 } else {
2381 stack.push(obj); // obj with key only
2382 }
2383}
2384
2385exports.parse = function (str) {
2386 var stack = [];
2387 var metaStack = []; // stack for arrays and objects
2388 var i = 0;
2389 var collationIndex,parsedNum,numChar;
2390 var parsedString,lastCh,numConsecutiveSlashes,ch;
2391 var arrayElement, objElement;
2392 while (true) {
2393 collationIndex = str[i++];
2394 if (collationIndex === '}' ||
2395 collationIndex === ']' ||
2396 typeof collationIndex === 'undefined') {
2397 if (stack.length === 1) {
2398 return stack.pop();
2399 } else {
2400 pop(stack.pop(), stack, metaStack);
2401 continue;
2402 }
2403 }
2404 switch (collationIndex) {
2405 case ' ':
2406 case '\t':
2407 case '\n':
2408 case ':':
2409 case ',':
2410 break;
2411 case 'n':
2412 i += 3; // 'ull'
2413 pop(null, stack, metaStack);
2414 break;
2415 case 't':
2416 i += 3; // 'rue'
2417 pop(true, stack, metaStack);
2418 break;
2419 case 'f':
2420 i += 4; // 'alse'
2421 pop(false, stack, metaStack);
2422 break;
2423 case '0':
2424 case '1':
2425 case '2':
2426 case '3':
2427 case '4':
2428 case '5':
2429 case '6':
2430 case '7':
2431 case '8':
2432 case '9':
2433 case '-':
2434 parsedNum = '';
2435 i--;
2436 while (true) {
2437 numChar = str[i++];
2438 if (/[\d\.\-e\+]/.test(numChar)) {
2439 parsedNum += numChar;
2440 } else {
2441 i--;
2442 break;
2443 }
2444 }
2445 pop(parseFloat(parsedNum), stack, metaStack);
2446 break;
2447 case '"':
2448 parsedString = '';
2449 lastCh = void 0;
2450 numConsecutiveSlashes = 0;
2451 while (true) {
2452 ch = str[i++];
2453 if (ch !== '"' || (lastCh === '\\' &&
2454 numConsecutiveSlashes % 2 === 1)) {
2455 parsedString += ch;
2456 lastCh = ch;
2457 if (lastCh === '\\') {
2458 numConsecutiveSlashes++;
2459 } else {
2460 numConsecutiveSlashes = 0;
2461 }
2462 } else {
2463 break;
2464 }
2465 }
2466 pop(JSON.parse('"' + parsedString + '"'), stack, metaStack);
2467 break;
2468 case '[':
2469 arrayElement = { element: [], index: stack.length };
2470 stack.push(arrayElement.element);
2471 metaStack.push(arrayElement);
2472 break;
2473 case '{':
2474 objElement = { element: {}, index: stack.length };
2475 stack.push(objElement.element);
2476 metaStack.push(objElement);
2477 break;
2478 default:
2479 throw new Error(
2480 'unexpectedly reached end of input: ' + collationIndex);
2481 }
2482 }
2483};
2484
2485},{}],20:[function(_dereq_,module,exports){
2486(function (process){(function (){
2487'use strict';
2488
2489function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
2490
2491var Md5 = _interopDefault(_dereq_('spark-md5'));
2492var uuid = _dereq_('uuid');
2493var vuvuzela = _interopDefault(_dereq_('vuvuzela'));
2494var EE = _interopDefault(_dereq_('events'));
2495
2496function isBinaryObject(object) {
2497 return (typeof ArrayBuffer !== 'undefined' && object instanceof ArrayBuffer) ||
2498 (typeof Blob !== 'undefined' && object instanceof Blob);
2499}
2500
2501/**
2502 * @template {ArrayBuffer | Blob} T
2503 * @param {T} object
2504 * @returns {T}
2505 */
2506function cloneBinaryObject(object) {
2507 return object instanceof ArrayBuffer
2508 ? object.slice(0)
2509 : object.slice(0, object.size, object.type);
2510}
2511
2512// most of this is borrowed from lodash.isPlainObject:
2513// https://github.com/fis-components/lodash.isplainobject/
2514// blob/29c358140a74f252aeb08c9eb28bef86f2217d4a/index.js
2515
2516var funcToString = Function.prototype.toString;
2517var objectCtorString = funcToString.call(Object);
2518
2519function isPlainObject(value) {
2520 var proto = Object.getPrototypeOf(value);
2521 /* istanbul ignore if */
2522 if (proto === null) { // not sure when this happens, but I guess it can
2523 return true;
2524 }
2525 var Ctor = proto.constructor;
2526 return (typeof Ctor == 'function' &&
2527 Ctor instanceof Ctor && funcToString.call(Ctor) == objectCtorString);
2528}
2529
2530function clone(object) {
2531 var newObject;
2532 var i;
2533 var len;
2534
2535 if (!object || typeof object !== 'object') {
2536 return object;
2537 }
2538
2539 if (Array.isArray(object)) {
2540 newObject = [];
2541 for (i = 0, len = object.length; i < len; i++) {
2542 newObject[i] = clone(object[i]);
2543 }
2544 return newObject;
2545 }
2546
2547 // special case: to avoid inconsistencies between IndexedDB
2548 // and other backends, we automatically stringify Dates
2549 if (object instanceof Date && isFinite(object)) {
2550 return object.toISOString();
2551 }
2552
2553 if (isBinaryObject(object)) {
2554 return cloneBinaryObject(object);
2555 }
2556
2557 if (!isPlainObject(object)) {
2558 return object; // don't clone objects like Workers
2559 }
2560
2561 newObject = {};
2562 for (i in object) {
2563 /* istanbul ignore else */
2564 if (Object.prototype.hasOwnProperty.call(object, i)) {
2565 var value = clone(object[i]);
2566 if (typeof value !== 'undefined') {
2567 newObject[i] = value;
2568 }
2569 }
2570 }
2571 return newObject;
2572}
2573
2574function once(fun) {
2575 var called = false;
2576 return function (...args) {
2577 /* istanbul ignore if */
2578 if (called) {
2579 // this is a smoke test and should never actually happen
2580 throw new Error('once called more than once');
2581 } else {
2582 called = true;
2583 fun.apply(this, args);
2584 }
2585 };
2586}
2587
2588function toPromise(func) {
2589 //create the function we will be returning
2590 return function (...args) {
2591 // Clone arguments
2592 args = clone(args);
2593 var self = this;
2594 // if the last argument is a function, assume its a callback
2595 var usedCB = (typeof args[args.length - 1] === 'function') ? args.pop() : false;
2596 var promise = new Promise(function (fulfill, reject) {
2597 var resp;
2598 try {
2599 var callback = once(function (err, mesg) {
2600 if (err) {
2601 reject(err);
2602 } else {
2603 fulfill(mesg);
2604 }
2605 });
2606 // create a callback for this invocation
2607 // apply the function in the orig context
2608 args.push(callback);
2609 resp = func.apply(self, args);
2610 if (resp && typeof resp.then === 'function') {
2611 fulfill(resp);
2612 }
2613 } catch (e) {
2614 reject(e);
2615 }
2616 });
2617 // if there is a callback, call it back
2618 if (usedCB) {
2619 promise.then(function (result) {
2620 usedCB(null, result);
2621 }, usedCB);
2622 }
2623 return promise;
2624 };
2625}
2626
2627function logApiCall(self, name, args) {
2628 /* istanbul ignore if */
2629 if (self.constructor.listeners('debug').length) {
2630 var logArgs = ['api', self.name, name];
2631 for (var i = 0; i < args.length - 1; i++) {
2632 logArgs.push(args[i]);
2633 }
2634 self.constructor.emit('debug', logArgs);
2635
2636 // override the callback itself to log the response
2637 var origCallback = args[args.length - 1];
2638 args[args.length - 1] = function (err, res) {
2639 var responseArgs = ['api', self.name, name];
2640 responseArgs = responseArgs.concat(
2641 err ? ['error', err] : ['success', res]
2642 );
2643 self.constructor.emit('debug', responseArgs);
2644 origCallback(err, res);
2645 };
2646 }
2647}
2648
2649function adapterFun(name, callback) {
2650 return toPromise(function (...args) {
2651 if (this._closed) {
2652 return Promise.reject(new Error('database is closed'));
2653 }
2654 if (this._destroyed) {
2655 return Promise.reject(new Error('database is destroyed'));
2656 }
2657 var self = this;
2658 logApiCall(self, name, args);
2659 if (!this.taskqueue.isReady) {
2660 return new Promise(function (fulfill, reject) {
2661 self.taskqueue.addTask(function (failed) {
2662 if (failed) {
2663 reject(failed);
2664 } else {
2665 fulfill(self[name].apply(self, args));
2666 }
2667 });
2668 });
2669 }
2670 return callback.apply(this, args);
2671 });
2672}
2673
2674// like underscore/lodash _.pick()
2675function pick(obj, arr) {
2676 var res = {};
2677 for (var i = 0, len = arr.length; i < len; i++) {
2678 var prop = arr[i];
2679 if (prop in obj) {
2680 res[prop] = obj[prop];
2681 }
2682 }
2683 return res;
2684}
2685
2686// Most browsers throttle concurrent requests at 6, so it's silly
2687// to shim _bulk_get by trying to launch potentially hundreds of requests
2688// and then letting the majority time out. We can handle this ourselves.
2689var MAX_NUM_CONCURRENT_REQUESTS = 6;
2690
2691function identityFunction(x) {
2692 return x;
2693}
2694
2695function formatResultForOpenRevsGet(result) {
2696 return [{
2697 ok: result
2698 }];
2699}
2700
2701// shim for P/CouchDB adapters that don't directly implement _bulk_get
2702function bulkGet(db, opts, callback) {
2703 var requests = opts.docs;
2704
2705 // consolidate into one request per doc if possible
2706 var requestsById = new Map();
2707 requests.forEach(function (request) {
2708 if (requestsById.has(request.id)) {
2709 requestsById.get(request.id).push(request);
2710 } else {
2711 requestsById.set(request.id, [request]);
2712 }
2713 });
2714
2715 var numDocs = requestsById.size;
2716 var numDone = 0;
2717 var perDocResults = new Array(numDocs);
2718
2719 function collapseResultsAndFinish() {
2720 var results = [];
2721 perDocResults.forEach(function (res) {
2722 res.docs.forEach(function (info) {
2723 results.push({
2724 id: res.id,
2725 docs: [info]
2726 });
2727 });
2728 });
2729 callback(null, {results});
2730 }
2731
2732 function checkDone() {
2733 if (++numDone === numDocs) {
2734 collapseResultsAndFinish();
2735 }
2736 }
2737
2738 function gotResult(docIndex, id, docs) {
2739 perDocResults[docIndex] = {id, docs};
2740 checkDone();
2741 }
2742
2743 var allRequests = [];
2744 requestsById.forEach(function (value, key) {
2745 allRequests.push(key);
2746 });
2747
2748 var i = 0;
2749
2750 function nextBatch() {
2751
2752 if (i >= allRequests.length) {
2753 return;
2754 }
2755
2756 var upTo = Math.min(i + MAX_NUM_CONCURRENT_REQUESTS, allRequests.length);
2757 var batch = allRequests.slice(i, upTo);
2758 processBatch(batch, i);
2759 i += batch.length;
2760 }
2761
2762 function processBatch(batch, offset) {
2763 batch.forEach(function (docId, j) {
2764 var docIdx = offset + j;
2765 var docRequests = requestsById.get(docId);
2766
2767 // just use the first request as the "template"
2768 // TODO: The _bulk_get API allows for more subtle use cases than this,
2769 // but for now it is unlikely that there will be a mix of different
2770 // "atts_since" or "attachments" in the same request, since it's just
2771 // replicate.js that is using this for the moment.
2772 // Also, atts_since is aspirational, since we don't support it yet.
2773 var docOpts = pick(docRequests[0], ['atts_since', 'attachments']);
2774 docOpts.open_revs = docRequests.map(function (request) {
2775 // rev is optional, open_revs disallowed
2776 return request.rev;
2777 });
2778
2779 // remove falsey / undefined revisions
2780 docOpts.open_revs = docOpts.open_revs.filter(identityFunction);
2781
2782 var formatResult = identityFunction;
2783
2784 if (docOpts.open_revs.length === 0) {
2785 delete docOpts.open_revs;
2786
2787 // when fetching only the "winning" leaf,
2788 // transform the result so it looks like an open_revs
2789 // request
2790 formatResult = formatResultForOpenRevsGet;
2791 }
2792
2793 // globally-supplied options
2794 ['revs', 'attachments', 'binary', 'ajax', 'latest'].forEach(function (param) {
2795 if (param in opts) {
2796 docOpts[param] = opts[param];
2797 }
2798 });
2799 db.get(docId, docOpts, function (err, res) {
2800 var result;
2801 /* istanbul ignore if */
2802 if (err) {
2803 result = [{error: err}];
2804 } else {
2805 result = formatResult(res);
2806 }
2807 gotResult(docIdx, docId, result);
2808 nextBatch();
2809 });
2810 });
2811 }
2812
2813 nextBatch();
2814
2815}
2816
2817var hasLocal;
2818
2819try {
2820 localStorage.setItem('_pouch_check_localstorage', 1);
2821 hasLocal = !!localStorage.getItem('_pouch_check_localstorage');
2822} catch (e) {
2823 hasLocal = false;
2824}
2825
2826function hasLocalStorage() {
2827 return hasLocal;
2828}
2829
2830const nextTick = typeof queueMicrotask === "function"
2831 ? queueMicrotask
2832 : function nextTick(fn) {
2833 Promise.resolve().then(fn);
2834 };
2835
2836class Changes extends EE {
2837 constructor() {
2838 super();
2839
2840 this._listeners = {};
2841
2842 if (hasLocalStorage()) {
2843 addEventListener("storage", (e) => {
2844 this.emit(e.key);
2845 });
2846 }
2847 }
2848
2849 addListener(dbName, id, db, opts) {
2850 if (this._listeners[id]) {
2851 return;
2852 }
2853 var inprogress = false;
2854 var self = this;
2855 function eventFunction() {
2856 if (!self._listeners[id]) {
2857 return;
2858 }
2859 if (inprogress) {
2860 inprogress = 'waiting';
2861 return;
2862 }
2863 inprogress = true;
2864 var changesOpts = pick(opts, [
2865 'style', 'include_docs', 'attachments', 'conflicts', 'filter',
2866 'doc_ids', 'view', 'since', 'query_params', 'binary', 'return_docs'
2867 ]);
2868
2869 function onError() {
2870 inprogress = false;
2871 }
2872
2873 db.changes(changesOpts).on('change', function (c) {
2874 if (c.seq > opts.since && !opts.cancelled) {
2875 opts.since = c.seq;
2876 opts.onChange(c);
2877 }
2878 }).on('complete', function () {
2879 if (inprogress === 'waiting') {
2880 nextTick(eventFunction);
2881 }
2882 inprogress = false;
2883 }).on('error', onError);
2884 }
2885 this._listeners[id] = eventFunction;
2886 this.on(dbName, eventFunction);
2887 }
2888
2889 removeListener(dbName, id) {
2890 if (!(id in this._listeners)) {
2891 return;
2892 }
2893 super.removeListener(dbName, this._listeners[id]);
2894 delete this._listeners[id];
2895 }
2896
2897 notifyLocalWindows(dbName) {
2898 //do a useless change on a storage thing
2899 //in order to get other windows's listeners to activate
2900 if (hasLocalStorage()) {
2901 localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a";
2902 }
2903 }
2904
2905 notify(dbName) {
2906 this.emit(dbName);
2907 this.notifyLocalWindows(dbName);
2908 }
2909}
2910
2911function guardedConsole(method) {
2912 /* istanbul ignore else */
2913 if (typeof console !== 'undefined' && typeof console[method] === 'function') {
2914 var args = Array.prototype.slice.call(arguments, 1);
2915 console[method].apply(console, args);
2916 }
2917}
2918
2919function randomNumber(min, max) {
2920 var maxTimeout = 600000; // Hard-coded default of 10 minutes
2921 min = parseInt(min, 10) || 0;
2922 max = parseInt(max, 10);
2923 if (max !== max || max <= min) {
2924 max = (min || 1) << 1; //doubling
2925 } else {
2926 max = max + 1;
2927 }
2928 // In order to not exceed maxTimeout, pick a random value between half of maxTimeout and maxTimeout
2929 if (max > maxTimeout) {
2930 min = maxTimeout >> 1; // divide by two
2931 max = maxTimeout;
2932 }
2933 var ratio = Math.random();
2934 var range = max - min;
2935
2936 return ~~(range * ratio + min); // ~~ coerces to an int, but fast.
2937}
2938
2939function defaultBackOff(min) {
2940 var max = 0;
2941 if (!min) {
2942 max = 2000;
2943 }
2944 return randomNumber(min, max);
2945}
2946
2947// designed to give info to browser users, who are disturbed
2948// when they see http errors in the console
2949function explainError(status, str) {
2950 guardedConsole('info', 'The above ' + status + ' is totally normal. ' + str);
2951}
2952
2953class PouchError extends Error {
2954 constructor(status, error, reason) {
2955 super();
2956 this.status = status;
2957 this.name = error;
2958 this.message = reason;
2959 this.error = true;
2960 }
2961
2962 toString() {
2963 return JSON.stringify({
2964 status: this.status,
2965 name: this.name,
2966 message: this.message,
2967 reason: this.reason
2968 });
2969 }
2970}
2971
2972var UNAUTHORIZED = new PouchError(401, 'unauthorized', "Name or password is incorrect.");
2973var MISSING_BULK_DOCS = new PouchError(400, 'bad_request', "Missing JSON list of 'docs'");
2974var MISSING_DOC = new PouchError(404, 'not_found', 'missing');
2975var REV_CONFLICT = new PouchError(409, 'conflict', 'Document update conflict');
2976var INVALID_ID = new PouchError(400, 'bad_request', '_id field must contain a string');
2977var MISSING_ID = new PouchError(412, 'missing_id', '_id is required for puts');
2978var RESERVED_ID = new PouchError(400, 'bad_request', 'Only reserved document ids may start with underscore.');
2979var NOT_OPEN = new PouchError(412, 'precondition_failed', 'Database not open');
2980var UNKNOWN_ERROR = new PouchError(500, 'unknown_error', 'Database encountered an unknown error');
2981var BAD_ARG = new PouchError(500, 'badarg', 'Some query argument is invalid');
2982var INVALID_REQUEST = new PouchError(400, 'invalid_request', 'Request was invalid');
2983var QUERY_PARSE_ERROR = new PouchError(400, 'query_parse_error', 'Some query parameter is invalid');
2984var DOC_VALIDATION = new PouchError(500, 'doc_validation', 'Bad special document member');
2985var BAD_REQUEST = new PouchError(400, 'bad_request', 'Something wrong with the request');
2986var NOT_AN_OBJECT = new PouchError(400, 'bad_request', 'Document must be a JSON object');
2987var DB_MISSING = new PouchError(404, 'not_found', 'Database not found');
2988var IDB_ERROR = new PouchError(500, 'indexed_db_went_bad', 'unknown');
2989var WSQ_ERROR = new PouchError(500, 'web_sql_went_bad', 'unknown');
2990var LDB_ERROR = new PouchError(500, 'levelDB_went_went_bad', 'unknown');
2991var FORBIDDEN = new PouchError(403, 'forbidden', 'Forbidden by design doc validate_doc_update function');
2992var INVALID_REV = new PouchError(400, 'bad_request', 'Invalid rev format');
2993var FILE_EXISTS = new PouchError(412, 'file_exists', 'The database could not be created, the file already exists.');
2994var MISSING_STUB = new PouchError(412, 'missing_stub', 'A pre-existing attachment stub wasn\'t found');
2995var INVALID_URL = new PouchError(413, 'invalid_url', 'Provided URL is invalid');
2996
2997function createError(error, reason) {
2998 function CustomPouchError(reason) {
2999 // inherit error properties from our parent error manually
3000 // so as to allow proper JSON parsing.
3001 var names = Object.getOwnPropertyNames(error);
3002 for (var i = 0, len = names.length; i < len; i++) {
3003 if (typeof error[names[i]] !== 'function') {
3004 this[names[i]] = error[names[i]];
3005 }
3006 }
3007
3008 if (this.stack === undefined) {
3009 this.stack = (new Error()).stack;
3010 }
3011
3012 if (reason !== undefined) {
3013 this.reason = reason;
3014 }
3015 }
3016 CustomPouchError.prototype = PouchError.prototype;
3017 return new CustomPouchError(reason);
3018}
3019
3020function generateErrorFromResponse(err) {
3021
3022 if (typeof err !== 'object') {
3023 var data = err;
3024 err = UNKNOWN_ERROR;
3025 err.data = data;
3026 }
3027
3028 if ('error' in err && err.error === 'conflict') {
3029 err.name = 'conflict';
3030 err.status = 409;
3031 }
3032
3033 if (!('name' in err)) {
3034 err.name = err.error || 'unknown';
3035 }
3036
3037 if (!('status' in err)) {
3038 err.status = 500;
3039 }
3040
3041 if (!('message' in err)) {
3042 err.message = err.message || err.reason;
3043 }
3044
3045 if (!('stack' in err)) {
3046 err.stack = (new Error()).stack;
3047 }
3048
3049 return err;
3050}
3051
3052function tryFilter(filter, doc, req) {
3053 try {
3054 return !filter(doc, req);
3055 } catch (err) {
3056 var msg = 'Filter function threw: ' + err.toString();
3057 return createError(BAD_REQUEST, msg);
3058 }
3059}
3060
3061function filterChange(opts) {
3062 var req = {};
3063 var hasFilter = opts.filter && typeof opts.filter === 'function';
3064 req.query = opts.query_params;
3065
3066 return function filter(change) {
3067 if (!change.doc) {
3068 // CSG sends events on the changes feed that don't have documents,
3069 // this hack makes a whole lot of existing code robust.
3070 change.doc = {};
3071 }
3072
3073 var filterReturn = hasFilter && tryFilter(opts.filter, change.doc, req);
3074
3075 if (typeof filterReturn === 'object') {
3076 return filterReturn;
3077 }
3078
3079 if (filterReturn) {
3080 return false;
3081 }
3082
3083 if (!opts.include_docs) {
3084 delete change.doc;
3085 } else if (!opts.attachments) {
3086 for (var att in change.doc._attachments) {
3087 /* istanbul ignore else */
3088 if (Object.prototype.hasOwnProperty.call(change.doc._attachments, att)) {
3089 change.doc._attachments[att].stub = true;
3090 }
3091 }
3092 }
3093 return true;
3094 };
3095}
3096
3097// shim for Function.prototype.name,
3098
3099// Determine id an ID is valid
3100// - invalid IDs begin with an underescore that does not begin '_design' or
3101// '_local'
3102// - any other string value is a valid id
3103// Returns the specific error object for each case
3104function invalidIdError(id) {
3105 var err;
3106 if (!id) {
3107 err = createError(MISSING_ID);
3108 } else if (typeof id !== 'string') {
3109 err = createError(INVALID_ID);
3110 } else if (/^_/.test(id) && !(/^_(design|local)/).test(id)) {
3111 err = createError(RESERVED_ID);
3112 }
3113 if (err) {
3114 throw err;
3115 }
3116}
3117
3118// Checks if a PouchDB object is "remote" or not. This is
3119
3120function isRemote(db) {
3121 if (typeof db._remote === 'boolean') {
3122 return db._remote;
3123 }
3124 /* istanbul ignore next */
3125 if (typeof db.type === 'function') {
3126 guardedConsole('warn',
3127 'db.type() is deprecated and will be removed in ' +
3128 'a future version of PouchDB');
3129 return db.type() === 'http';
3130 }
3131 /* istanbul ignore next */
3132 return false;
3133}
3134
3135function listenerCount(ee, type) {
3136 return 'listenerCount' in ee ? ee.listenerCount(type) :
3137 EE.listenerCount(ee, type);
3138}
3139
3140function parseDesignDocFunctionName(s) {
3141 if (!s) {
3142 return null;
3143 }
3144 var parts = s.split('/');
3145 if (parts.length === 2) {
3146 return parts;
3147 }
3148 if (parts.length === 1) {
3149 return [s, s];
3150 }
3151 return null;
3152}
3153
3154function normalizeDesignDocFunctionName(s) {
3155 var normalized = parseDesignDocFunctionName(s);
3156 return normalized ? normalized.join('/') : null;
3157}
3158
3159// originally parseUri 1.2.2, now patched by us
3160// (c) Steven Levithan <stevenlevithan.com>
3161// MIT License
3162var keys = ["source", "protocol", "authority", "userInfo", "user", "password",
3163 "host", "port", "relative", "path", "directory", "file", "query", "anchor"];
3164var qName ="queryKey";
3165var qParser = /(?:^|&)([^&=]*)=?([^&]*)/g;
3166
3167// use the "loose" parser
3168/* eslint no-useless-escape: 0 */
3169var parser = /^(?:(?![^:@]+:[^:@\/]*@)([^:\/?#.]+):)?(?:\/\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/;
3170
3171function parseUri(str) {
3172 var m = parser.exec(str);
3173 var uri = {};
3174 var i = 14;
3175
3176 while (i--) {
3177 var key = keys[i];
3178 var value = m[i] || "";
3179 var encoded = ['user', 'password'].indexOf(key) !== -1;
3180 uri[key] = encoded ? decodeURIComponent(value) : value;
3181 }
3182
3183 uri[qName] = {};
3184 uri[keys[12]].replace(qParser, function ($0, $1, $2) {
3185 if ($1) {
3186 uri[qName][$1] = $2;
3187 }
3188 });
3189
3190 return uri;
3191}
3192
3193// Based on https://github.com/alexdavid/scope-eval v0.0.3
3194// (source: https://unpkg.com/scope-eval@0.0.3/scope_eval.js)
3195// This is basically just a wrapper around new Function()
3196
3197function scopeEval(source, scope) {
3198 var keys = [];
3199 var values = [];
3200 for (var key in scope) {
3201 if (Object.prototype.hasOwnProperty.call(scope, key)) {
3202 keys.push(key);
3203 values.push(scope[key]);
3204 }
3205 }
3206 keys.push(source);
3207 return Function.apply(null, keys).apply(null, values);
3208}
3209
3210// this is essentially the "update sugar" function from daleharvey/pouchdb#1388
3211// the diffFun tells us what delta to apply to the doc. it either returns
3212// the doc, or false if it doesn't need to do an update after all
3213function upsert(db, docId, diffFun) {
3214 return db.get(docId)
3215 .catch(function (err) {
3216 /* istanbul ignore next */
3217 if (err.status !== 404) {
3218 throw err;
3219 }
3220 return {};
3221 })
3222 .then(function (doc) {
3223 // the user might change the _rev, so save it for posterity
3224 var docRev = doc._rev;
3225 var newDoc = diffFun(doc);
3226
3227 if (!newDoc) {
3228 // if the diffFun returns falsy, we short-circuit as
3229 // an optimization
3230 return {updated: false, rev: docRev};
3231 }
3232
3233 // users aren't allowed to modify these values,
3234 // so reset them here
3235 newDoc._id = docId;
3236 newDoc._rev = docRev;
3237 return tryAndPut(db, newDoc, diffFun);
3238 });
3239}
3240
3241function tryAndPut(db, doc, diffFun) {
3242 return db.put(doc).then(function (res) {
3243 return {
3244 updated: true,
3245 rev: res.rev
3246 };
3247 }, function (err) {
3248 /* istanbul ignore next */
3249 if (err.status !== 409) {
3250 throw err;
3251 }
3252 return upsert(db, doc._id, diffFun);
3253 });
3254}
3255
3256var thisAtob = function (str) {
3257 return atob(str);
3258};
3259
3260var thisBtoa = function (str) {
3261 return btoa(str);
3262};
3263
3264// Abstracts constructing a Blob object, so it also works in older
3265// browsers that don't support the native Blob constructor (e.g.
3266// old QtWebKit versions, Android < 4.4).
3267function createBlob(parts, properties) {
3268 /* global BlobBuilder,MSBlobBuilder,MozBlobBuilder,WebKitBlobBuilder */
3269 parts = parts || [];
3270 properties = properties || {};
3271 try {
3272 return new Blob(parts, properties);
3273 } catch (e) {
3274 if (e.name !== "TypeError") {
3275 throw e;
3276 }
3277 var Builder = typeof BlobBuilder !== 'undefined' ? BlobBuilder :
3278 typeof MSBlobBuilder !== 'undefined' ? MSBlobBuilder :
3279 typeof MozBlobBuilder !== 'undefined' ? MozBlobBuilder :
3280 WebKitBlobBuilder;
3281 var builder = new Builder();
3282 for (var i = 0; i < parts.length; i += 1) {
3283 builder.append(parts[i]);
3284 }
3285 return builder.getBlob(properties.type);
3286 }
3287}
3288
3289// From http://stackoverflow.com/questions/14967647/ (continues on next line)
3290// encode-decode-image-with-base64-breaks-image (2013-04-21)
3291function binaryStringToArrayBuffer(bin) {
3292 var length = bin.length;
3293 var buf = new ArrayBuffer(length);
3294 var arr = new Uint8Array(buf);
3295 for (var i = 0; i < length; i++) {
3296 arr[i] = bin.charCodeAt(i);
3297 }
3298 return buf;
3299}
3300
3301function binStringToBluffer(binString, type) {
3302 return createBlob([binaryStringToArrayBuffer(binString)], {type});
3303}
3304
3305function b64ToBluffer(b64, type) {
3306 return binStringToBluffer(thisAtob(b64), type);
3307}
3308
3309//Can't find original post, but this is close
3310//http://stackoverflow.com/questions/6965107/ (continues on next line)
3311//converting-between-strings-and-arraybuffers
3312function arrayBufferToBinaryString(buffer) {
3313 var binary = '';
3314 var bytes = new Uint8Array(buffer);
3315 var length = bytes.byteLength;
3316 for (var i = 0; i < length; i++) {
3317 binary += String.fromCharCode(bytes[i]);
3318 }
3319 return binary;
3320}
3321
3322// shim for browsers that don't support it
3323function readAsBinaryString(blob, callback) {
3324 var reader = new FileReader();
3325 var hasBinaryString = typeof reader.readAsBinaryString === 'function';
3326 reader.onloadend = function (e) {
3327 var result = e.target.result || '';
3328 if (hasBinaryString) {
3329 return callback(result);
3330 }
3331 callback(arrayBufferToBinaryString(result));
3332 };
3333 if (hasBinaryString) {
3334 reader.readAsBinaryString(blob);
3335 } else {
3336 reader.readAsArrayBuffer(blob);
3337 }
3338}
3339
3340function blobToBinaryString(blobOrBuffer, callback) {
3341 readAsBinaryString(blobOrBuffer, function (bin) {
3342 callback(bin);
3343 });
3344}
3345
3346function blobToBase64(blobOrBuffer, callback) {
3347 blobToBinaryString(blobOrBuffer, function (base64) {
3348 callback(thisBtoa(base64));
3349 });
3350}
3351
3352// simplified API. universal browser support is assumed
3353function readAsArrayBuffer(blob, callback) {
3354 var reader = new FileReader();
3355 reader.onloadend = function (e) {
3356 var result = e.target.result || new ArrayBuffer(0);
3357 callback(result);
3358 };
3359 reader.readAsArrayBuffer(blob);
3360}
3361
3362// this is not used in the browser
3363
3364var setImmediateShim = self.setImmediate || self.setTimeout;
3365var MD5_CHUNK_SIZE = 32768;
3366
3367function rawToBase64(raw) {
3368 return thisBtoa(raw);
3369}
3370
3371function appendBlob(buffer, blob, start, end, callback) {
3372 if (start > 0 || end < blob.size) {
3373 // only slice blob if we really need to
3374 blob = blob.slice(start, end);
3375 }
3376 readAsArrayBuffer(blob, function (arrayBuffer) {
3377 buffer.append(arrayBuffer);
3378 callback();
3379 });
3380}
3381
3382function appendString(buffer, string, start, end, callback) {
3383 if (start > 0 || end < string.length) {
3384 // only create a substring if we really need to
3385 string = string.substring(start, end);
3386 }
3387 buffer.appendBinary(string);
3388 callback();
3389}
3390
3391function binaryMd5(data, callback) {
3392 var inputIsString = typeof data === 'string';
3393 var len = inputIsString ? data.length : data.size;
3394 var chunkSize = Math.min(MD5_CHUNK_SIZE, len);
3395 var chunks = Math.ceil(len / chunkSize);
3396 var currentChunk = 0;
3397 var buffer = inputIsString ? new Md5() : new Md5.ArrayBuffer();
3398
3399 var append = inputIsString ? appendString : appendBlob;
3400
3401 function next() {
3402 setImmediateShim(loadNextChunk);
3403 }
3404
3405 function done() {
3406 var raw = buffer.end(true);
3407 var base64 = rawToBase64(raw);
3408 callback(base64);
3409 buffer.destroy();
3410 }
3411
3412 function loadNextChunk() {
3413 var start = currentChunk * chunkSize;
3414 var end = start + chunkSize;
3415 currentChunk++;
3416 if (currentChunk < chunks) {
3417 append(buffer, data, start, end, next);
3418 } else {
3419 append(buffer, data, start, end, done);
3420 }
3421 }
3422 loadNextChunk();
3423}
3424
3425function stringMd5(string) {
3426 return Md5.hash(string);
3427}
3428
3429/**
3430 * Creates a new revision string that does NOT include the revision height
3431 * For example '56649f1b0506c6ca9fda0746eb0cacdf'
3432 */
3433function rev(doc, deterministic_revs) {
3434 if (!deterministic_revs) {
3435 return uuid.v4().replace(/-/g, '').toLowerCase();
3436 }
3437
3438 var mutateableDoc = Object.assign({}, doc);
3439 delete mutateableDoc._rev_tree;
3440 return stringMd5(JSON.stringify(mutateableDoc));
3441}
3442
3443var uuid$1 = uuid.v4; // mimic old import, only v4 is ever used elsewhere
3444
3445// We fetch all leafs of the revision tree, and sort them based on tree length
3446// and whether they were deleted, undeleted documents with the longest revision
3447// tree (most edits) win
3448// The final sort algorithm is slightly documented in a sidebar here:
3449// http://guide.couchdb.org/draft/conflicts.html
3450function winningRev(metadata) {
3451 var winningId;
3452 var winningPos;
3453 var winningDeleted;
3454 var toVisit = metadata.rev_tree.slice();
3455 var node;
3456 while ((node = toVisit.pop())) {
3457 var tree = node.ids;
3458 var branches = tree[2];
3459 var pos = node.pos;
3460 if (branches.length) { // non-leaf
3461 for (var i = 0, len = branches.length; i < len; i++) {
3462 toVisit.push({pos: pos + 1, ids: branches[i]});
3463 }
3464 continue;
3465 }
3466 var deleted = !!tree[1].deleted;
3467 var id = tree[0];
3468 // sort by deleted, then pos, then id
3469 if (!winningId || (winningDeleted !== deleted ? winningDeleted :
3470 winningPos !== pos ? winningPos < pos : winningId < id)) {
3471 winningId = id;
3472 winningPos = pos;
3473 winningDeleted = deleted;
3474 }
3475 }
3476
3477 return winningPos + '-' + winningId;
3478}
3479
3480// Pretty much all below can be combined into a higher order function to
3481// traverse revisions
3482// The return value from the callback will be passed as context to all
3483// children of that node
3484function traverseRevTree(revs, callback) {
3485 var toVisit = revs.slice();
3486
3487 var node;
3488 while ((node = toVisit.pop())) {
3489 var pos = node.pos;
3490 var tree = node.ids;
3491 var branches = tree[2];
3492 var newCtx =
3493 callback(branches.length === 0, pos, tree[0], node.ctx, tree[1]);
3494 for (var i = 0, len = branches.length; i < len; i++) {
3495 toVisit.push({pos: pos + 1, ids: branches[i], ctx: newCtx});
3496 }
3497 }
3498}
3499
3500function sortByPos(a, b) {
3501 return a.pos - b.pos;
3502}
3503
3504function collectLeaves(revs) {
3505 var leaves = [];
3506 traverseRevTree(revs, function (isLeaf, pos, id, acc, opts) {
3507 if (isLeaf) {
3508 leaves.push({rev: pos + "-" + id, pos, opts});
3509 }
3510 });
3511 leaves.sort(sortByPos).reverse();
3512 for (var i = 0, len = leaves.length; i < len; i++) {
3513 delete leaves[i].pos;
3514 }
3515 return leaves;
3516}
3517
3518// returns revs of all conflicts that is leaves such that
3519// 1. are not deleted and
3520// 2. are different than winning revision
3521function collectConflicts(metadata) {
3522 var win = winningRev(metadata);
3523 var leaves = collectLeaves(metadata.rev_tree);
3524 var conflicts = [];
3525 for (var i = 0, len = leaves.length; i < len; i++) {
3526 var leaf = leaves[i];
3527 if (leaf.rev !== win && !leaf.opts.deleted) {
3528 conflicts.push(leaf.rev);
3529 }
3530 }
3531 return conflicts;
3532}
3533
3534// compact a tree by marking its non-leafs as missing,
3535// and return a list of revs to delete
3536function compactTree(metadata) {
3537 var revs = [];
3538 traverseRevTree(metadata.rev_tree, function (isLeaf, pos,
3539 revHash, ctx, opts) {
3540 if (opts.status === 'available' && !isLeaf) {
3541 revs.push(pos + '-' + revHash);
3542 opts.status = 'missing';
3543 }
3544 });
3545 return revs;
3546}
3547
3548// `findPathToLeaf()` returns an array of revs that goes from the specified
3549// leaf rev to the root of that leaf’s branch.
3550//
3551// eg. for this rev tree:
3552// 1-9692 ▶ 2-37aa ▶ 3-df22 ▶ 4-6e94 ▶ 5-df4a ▶ 6-6a3a ▶ 7-57e5
3553// ┃ ┗━━━━━━▶ 5-8d8c ▶ 6-65e0
3554// ┗━━━━━━▶ 3-43f6 ▶ 4-a3b4
3555//
3556// For a `targetRev` of '7-57e5', `findPathToLeaf()` would return ['7-57e5', '6-6a3a', '5-df4a']
3557// The `revs` argument has the same structure as what `revs_tree` has on e.g.
3558// the IndexedDB representation of the rev tree datastructure. Please refer to
3559// tests/unit/test.purge.js for examples of what these look like.
3560//
3561// This function will throw an error if:
3562// - The requested revision does not exist
3563// - The requested revision is not a leaf
3564function findPathToLeaf(revs, targetRev) {
3565 let path = [];
3566 const toVisit = revs.slice();
3567
3568 let node;
3569 while ((node = toVisit.pop())) {
3570 const { pos, ids: tree } = node;
3571 const rev = `${pos}-${tree[0]}`;
3572 const branches = tree[2];
3573
3574 // just assuming we're already working on the path up towards our desired leaf.
3575 path.push(rev);
3576
3577 // we've reached the leaf of our dreams, so return the computed path.
3578 if (rev === targetRev) {
3579 //…unleeeeess
3580 if (branches.length !== 0) {
3581 throw new Error('The requested revision is not a leaf');
3582 }
3583 return path.reverse();
3584 }
3585
3586 // this is based on the assumption that after we have a leaf (`branches.length == 0`), we handle the next
3587 // branch. this is true for all branches other than the path leading to the winning rev (which is 7-57e5 in
3588 // the example above. i've added a reset condition for branching nodes (`branches.length > 1`) as well.
3589 if (branches.length === 0 || branches.length > 1) {
3590 path = [];
3591 }
3592
3593 // as a next step, we push the branches of this node to `toVisit` for visiting it during the next iteration
3594 for (let i = 0, len = branches.length; i < len; i++) {
3595 toVisit.push({ pos: pos + 1, ids: branches[i] });
3596 }
3597 }
3598 if (path.length === 0) {
3599 throw new Error('The requested revision does not exist');
3600 }
3601 return path.reverse();
3602}
3603
3604// build up a list of all the paths to the leafs in this revision tree
3605function rootToLeaf(revs) {
3606 var paths = [];
3607 var toVisit = revs.slice();
3608 var node;
3609 while ((node = toVisit.pop())) {
3610 var pos = node.pos;
3611 var tree = node.ids;
3612 var id = tree[0];
3613 var opts = tree[1];
3614 var branches = tree[2];
3615 var isLeaf = branches.length === 0;
3616
3617 var history = node.history ? node.history.slice() : [];
3618 history.push({id, opts});
3619 if (isLeaf) {
3620 paths.push({pos: (pos + 1 - history.length), ids: history});
3621 }
3622 for (var i = 0, len = branches.length; i < len; i++) {
3623 toVisit.push({pos: pos + 1, ids: branches[i], history});
3624 }
3625 }
3626 return paths.reverse();
3627}
3628
3629// for a better overview of what this is doing, read:
3630
3631function sortByPos$1(a, b) {
3632 return a.pos - b.pos;
3633}
3634
3635// classic binary search
3636function binarySearch(arr, item, comparator) {
3637 var low = 0;
3638 var high = arr.length;
3639 var mid;
3640 while (low < high) {
3641 mid = (low + high) >>> 1;
3642 if (comparator(arr[mid], item) < 0) {
3643 low = mid + 1;
3644 } else {
3645 high = mid;
3646 }
3647 }
3648 return low;
3649}
3650
3651// assuming the arr is sorted, insert the item in the proper place
3652function insertSorted(arr, item, comparator) {
3653 var idx = binarySearch(arr, item, comparator);
3654 arr.splice(idx, 0, item);
3655}
3656
3657// Turn a path as a flat array into a tree with a single branch.
3658// If any should be stemmed from the beginning of the array, that's passed
3659// in as the second argument
3660function pathToTree(path, numStemmed) {
3661 var root;
3662 var leaf;
3663 for (var i = numStemmed, len = path.length; i < len; i++) {
3664 var node = path[i];
3665 var currentLeaf = [node.id, node.opts, []];
3666 if (leaf) {
3667 leaf[2].push(currentLeaf);
3668 leaf = currentLeaf;
3669 } else {
3670 root = leaf = currentLeaf;
3671 }
3672 }
3673 return root;
3674}
3675
3676// compare the IDs of two trees
3677function compareTree(a, b) {
3678 return a[0] < b[0] ? -1 : 1;
3679}
3680
3681// Merge two trees together
3682// The roots of tree1 and tree2 must be the same revision
3683function mergeTree(in_tree1, in_tree2) {
3684 var queue = [{tree1: in_tree1, tree2: in_tree2}];
3685 var conflicts = false;
3686 while (queue.length > 0) {
3687 var item = queue.pop();
3688 var tree1 = item.tree1;
3689 var tree2 = item.tree2;
3690
3691 if (tree1[1].status || tree2[1].status) {
3692 tree1[1].status =
3693 (tree1[1].status === 'available' ||
3694 tree2[1].status === 'available') ? 'available' : 'missing';
3695 }
3696
3697 for (var i = 0; i < tree2[2].length; i++) {
3698 if (!tree1[2][0]) {
3699 conflicts = 'new_leaf';
3700 tree1[2][0] = tree2[2][i];
3701 continue;
3702 }
3703
3704 var merged = false;
3705 for (var j = 0; j < tree1[2].length; j++) {
3706 if (tree1[2][j][0] === tree2[2][i][0]) {
3707 queue.push({tree1: tree1[2][j], tree2: tree2[2][i]});
3708 merged = true;
3709 }
3710 }
3711 if (!merged) {
3712 conflicts = 'new_branch';
3713 insertSorted(tree1[2], tree2[2][i], compareTree);
3714 }
3715 }
3716 }
3717 return {conflicts, tree: in_tree1};
3718}
3719
3720function doMerge(tree, path, dontExpand) {
3721 var restree = [];
3722 var conflicts = false;
3723 var merged = false;
3724 var res;
3725
3726 if (!tree.length) {
3727 return {tree: [path], conflicts: 'new_leaf'};
3728 }
3729
3730 for (var i = 0, len = tree.length; i < len; i++) {
3731 var branch = tree[i];
3732 if (branch.pos === path.pos && branch.ids[0] === path.ids[0]) {
3733 // Paths start at the same position and have the same root, so they need
3734 // merged
3735 res = mergeTree(branch.ids, path.ids);
3736 restree.push({pos: branch.pos, ids: res.tree});
3737 conflicts = conflicts || res.conflicts;
3738 merged = true;
3739 } else if (dontExpand !== true) {
3740 // The paths start at a different position, take the earliest path and
3741 // traverse up until it as at the same point from root as the path we
3742 // want to merge. If the keys match we return the longer path with the
3743 // other merged After stemming we don't want to expand the trees
3744
3745 var t1 = branch.pos < path.pos ? branch : path;
3746 var t2 = branch.pos < path.pos ? path : branch;
3747 var diff = t2.pos - t1.pos;
3748
3749 var candidateParents = [];
3750
3751 var trees = [];
3752 trees.push({ids: t1.ids, diff, parent: null, parentIdx: null});
3753 while (trees.length > 0) {
3754 var item = trees.pop();
3755 if (item.diff === 0) {
3756 if (item.ids[0] === t2.ids[0]) {
3757 candidateParents.push(item);
3758 }
3759 continue;
3760 }
3761 var elements = item.ids[2];
3762 for (var j = 0, elementsLen = elements.length; j < elementsLen; j++) {
3763 trees.push({
3764 ids: elements[j],
3765 diff: item.diff - 1,
3766 parent: item.ids,
3767 parentIdx: j
3768 });
3769 }
3770 }
3771
3772 var el = candidateParents[0];
3773
3774 if (!el) {
3775 restree.push(branch);
3776 } else {
3777 res = mergeTree(el.ids, t2.ids);
3778 el.parent[2][el.parentIdx] = res.tree;
3779 restree.push({pos: t1.pos, ids: t1.ids});
3780 conflicts = conflicts || res.conflicts;
3781 merged = true;
3782 }
3783 } else {
3784 restree.push(branch);
3785 }
3786 }
3787
3788 // We didnt find
3789 if (!merged) {
3790 restree.push(path);
3791 }
3792
3793 restree.sort(sortByPos$1);
3794
3795 return {
3796 tree: restree,
3797 conflicts: conflicts || 'internal_node'
3798 };
3799}
3800
3801// To ensure we don't grow the revision tree infinitely, we stem old revisions
3802function stem(tree, depth) {
3803 // First we break out the tree into a complete list of root to leaf paths
3804 var paths = rootToLeaf(tree);
3805 var stemmedRevs;
3806
3807 var result;
3808 for (var i = 0, len = paths.length; i < len; i++) {
3809 // Then for each path, we cut off the start of the path based on the
3810 // `depth` to stem to, and generate a new set of flat trees
3811 var path = paths[i];
3812 var stemmed = path.ids;
3813 var node;
3814 if (stemmed.length > depth) {
3815 // only do the stemming work if we actually need to stem
3816 if (!stemmedRevs) {
3817 stemmedRevs = {}; // avoid allocating this object unnecessarily
3818 }
3819 var numStemmed = stemmed.length - depth;
3820 node = {
3821 pos: path.pos + numStemmed,
3822 ids: pathToTree(stemmed, numStemmed)
3823 };
3824
3825 for (var s = 0; s < numStemmed; s++) {
3826 var rev = (path.pos + s) + '-' + stemmed[s].id;
3827 stemmedRevs[rev] = true;
3828 }
3829 } else { // no need to actually stem
3830 node = {
3831 pos: path.pos,
3832 ids: pathToTree(stemmed, 0)
3833 };
3834 }
3835
3836 // Then we remerge all those flat trees together, ensuring that we don't
3837 // connect trees that would go beyond the depth limit
3838 if (result) {
3839 result = doMerge(result, node, true).tree;
3840 } else {
3841 result = [node];
3842 }
3843 }
3844
3845 // this is memory-heavy per Chrome profiler, avoid unless we actually stemmed
3846 if (stemmedRevs) {
3847 traverseRevTree(result, function (isLeaf, pos, revHash) {
3848 // some revisions may have been removed in a branch but not in another
3849 delete stemmedRevs[pos + '-' + revHash];
3850 });
3851 }
3852
3853 return {
3854 tree: result,
3855 revs: stemmedRevs ? Object.keys(stemmedRevs) : []
3856 };
3857}
3858
3859function merge(tree, path, depth) {
3860 var newTree = doMerge(tree, path);
3861 var stemmed = stem(newTree.tree, depth);
3862 return {
3863 tree: stemmed.tree,
3864 stemmedRevs: stemmed.revs,
3865 conflicts: newTree.conflicts
3866 };
3867}
3868
3869// return true if a rev exists in the rev tree, false otherwise
3870function revExists(revs, rev) {
3871 var toVisit = revs.slice();
3872 var splitRev = rev.split('-');
3873 var targetPos = parseInt(splitRev[0], 10);
3874 var targetId = splitRev[1];
3875
3876 var node;
3877 while ((node = toVisit.pop())) {
3878 if (node.pos === targetPos && node.ids[0] === targetId) {
3879 return true;
3880 }
3881 var branches = node.ids[2];
3882 for (var i = 0, len = branches.length; i < len; i++) {
3883 toVisit.push({pos: node.pos + 1, ids: branches[i]});
3884 }
3885 }
3886 return false;
3887}
3888
3889function getTrees(node) {
3890 return node.ids;
3891}
3892
3893// check if a specific revision of a doc has been deleted
3894// - metadata: the metadata object from the doc store
3895// - rev: (optional) the revision to check. defaults to winning revision
3896function isDeleted(metadata, rev) {
3897 if (!rev) {
3898 rev = winningRev(metadata);
3899 }
3900 var id = rev.substring(rev.indexOf('-') + 1);
3901 var toVisit = metadata.rev_tree.map(getTrees);
3902
3903 var tree;
3904 while ((tree = toVisit.pop())) {
3905 if (tree[0] === id) {
3906 return !!tree[1].deleted;
3907 }
3908 toVisit = toVisit.concat(tree[2]);
3909 }
3910}
3911
3912function isLocalId(id) {
3913 return typeof id === 'string' && id.startsWith('_local/');
3914}
3915
3916// returns the current leaf node for a given revision
3917function latest(rev, metadata) {
3918 var toVisit = metadata.rev_tree.slice();
3919 var node;
3920 while ((node = toVisit.pop())) {
3921 var pos = node.pos;
3922 var tree = node.ids;
3923 var id = tree[0];
3924 var opts = tree[1];
3925 var branches = tree[2];
3926 var isLeaf = branches.length === 0;
3927
3928 var history = node.history ? node.history.slice() : [];
3929 history.push({id, pos, opts});
3930
3931 if (isLeaf) {
3932 for (var i = 0, len = history.length; i < len; i++) {
3933 var historyNode = history[i];
3934 var historyRev = historyNode.pos + '-' + historyNode.id;
3935
3936 if (historyRev === rev) {
3937 // return the rev of this leaf
3938 return pos + '-' + id;
3939 }
3940 }
3941 }
3942
3943 for (var j = 0, l = branches.length; j < l; j++) {
3944 toVisit.push({pos: pos + 1, ids: branches[j], history});
3945 }
3946 }
3947
3948 /* istanbul ignore next */
3949 throw new Error('Unable to resolve latest revision for id ' + metadata.id + ', rev ' + rev);
3950}
3951
3952function tryCatchInChangeListener(self, change, pending, lastSeq) {
3953 // isolate try/catches to avoid V8 deoptimizations
3954 try {
3955 self.emit('change', change, pending, lastSeq);
3956 } catch (e) {
3957 guardedConsole('error', 'Error in .on("change", function):', e);
3958 }
3959}
3960
3961function processChange(doc, metadata, opts) {
3962 var changeList = [{rev: doc._rev}];
3963 if (opts.style === 'all_docs') {
3964 changeList = collectLeaves(metadata.rev_tree)
3965 .map(function (x) { return {rev: x.rev}; });
3966 }
3967 var change = {
3968 id: metadata.id,
3969 changes: changeList,
3970 doc
3971 };
3972
3973 if (isDeleted(metadata, doc._rev)) {
3974 change.deleted = true;
3975 }
3976 if (opts.conflicts) {
3977 change.doc._conflicts = collectConflicts(metadata);
3978 if (!change.doc._conflicts.length) {
3979 delete change.doc._conflicts;
3980 }
3981 }
3982 return change;
3983}
3984
3985class Changes$1 extends EE {
3986 constructor(db, opts, callback) {
3987 super();
3988 this.db = db;
3989 opts = opts ? clone(opts) : {};
3990 var complete = opts.complete = once((err, resp) => {
3991 if (err) {
3992 if (listenerCount(this, 'error') > 0) {
3993 this.emit('error', err);
3994 }
3995 } else {
3996 this.emit('complete', resp);
3997 }
3998 this.removeAllListeners();
3999 db.removeListener('destroyed', onDestroy);
4000 });
4001 if (callback) {
4002 this.on('complete', function (resp) {
4003 callback(null, resp);
4004 });
4005 this.on('error', callback);
4006 }
4007 const onDestroy = () => {
4008 this.cancel();
4009 };
4010 db.once('destroyed', onDestroy);
4011
4012 opts.onChange = (change, pending, lastSeq) => {
4013 /* istanbul ignore if */
4014 if (this.isCancelled) {
4015 return;
4016 }
4017 tryCatchInChangeListener(this, change, pending, lastSeq);
4018 };
4019
4020 var promise = new Promise(function (fulfill, reject) {
4021 opts.complete = function (err, res) {
4022 if (err) {
4023 reject(err);
4024 } else {
4025 fulfill(res);
4026 }
4027 };
4028 });
4029 this.once('cancel', function () {
4030 db.removeListener('destroyed', onDestroy);
4031 opts.complete(null, {status: 'cancelled'});
4032 });
4033 this.then = promise.then.bind(promise);
4034 this['catch'] = promise['catch'].bind(promise);
4035 this.then(function (result) {
4036 complete(null, result);
4037 }, complete);
4038
4039
4040
4041 if (!db.taskqueue.isReady) {
4042 db.taskqueue.addTask((failed) => {
4043 if (failed) {
4044 opts.complete(failed);
4045 } else if (this.isCancelled) {
4046 this.emit('cancel');
4047 } else {
4048 this.validateChanges(opts);
4049 }
4050 });
4051 } else {
4052 this.validateChanges(opts);
4053 }
4054 }
4055
4056 cancel() {
4057 this.isCancelled = true;
4058 if (this.db.taskqueue.isReady) {
4059 this.emit('cancel');
4060 }
4061 }
4062
4063 validateChanges(opts) {
4064 var callback = opts.complete;
4065
4066 /* istanbul ignore else */
4067 if (PouchDB._changesFilterPlugin) {
4068 PouchDB._changesFilterPlugin.validate(opts, (err) => {
4069 if (err) {
4070 return callback(err);
4071 }
4072 this.doChanges(opts);
4073 });
4074 } else {
4075 this.doChanges(opts);
4076 }
4077 }
4078
4079 doChanges(opts) {
4080 var callback = opts.complete;
4081
4082 opts = clone(opts);
4083 if ('live' in opts && !('continuous' in opts)) {
4084 opts.continuous = opts.live;
4085 }
4086 opts.processChange = processChange;
4087
4088 if (opts.since === 'latest') {
4089 opts.since = 'now';
4090 }
4091 if (!opts.since) {
4092 opts.since = 0;
4093 }
4094 if (opts.since === 'now') {
4095 this.db.info().then((info) => {
4096 /* istanbul ignore if */
4097 if (this.isCancelled) {
4098 callback(null, {status: 'cancelled'});
4099 return;
4100 }
4101 opts.since = info.update_seq;
4102 this.doChanges(opts);
4103 }, callback);
4104 return;
4105 }
4106
4107 /* istanbul ignore else */
4108 if (PouchDB._changesFilterPlugin) {
4109 PouchDB._changesFilterPlugin.normalize(opts);
4110 if (PouchDB._changesFilterPlugin.shouldFilter(this, opts)) {
4111 return PouchDB._changesFilterPlugin.filter(this, opts);
4112 }
4113 } else {
4114 ['doc_ids', 'filter', 'selector', 'view'].forEach(function (key) {
4115 if (key in opts) {
4116 guardedConsole('warn',
4117 'The "' + key + '" option was passed in to changes/replicate, ' +
4118 'but pouchdb-changes-filter plugin is not installed, so it ' +
4119 'was ignored. Please install the plugin to enable filtering.'
4120 );
4121 }
4122 });
4123 }
4124
4125 if (!('descending' in opts)) {
4126 opts.descending = false;
4127 }
4128
4129 // 0 and 1 should return 1 document
4130 opts.limit = opts.limit === 0 ? 1 : opts.limit;
4131 opts.complete = callback;
4132 var newPromise = this.db._changes(opts);
4133 /* istanbul ignore else */
4134 if (newPromise && typeof newPromise.cancel === 'function') {
4135 const cancel = this.cancel;
4136 this.cancel = (...args) => {
4137 newPromise.cancel();
4138 cancel.apply(this, args);
4139 };
4140 }
4141 }
4142}
4143
4144/*
4145 * A generic pouch adapter
4146 */
4147
4148// Wrapper for functions that call the bulkdocs api with a single doc,
4149// if the first result is an error, return an error
4150function yankError(callback, docId) {
4151 return function (err, results) {
4152 if (err || (results[0] && results[0].error)) {
4153 err = err || results[0];
4154 err.docId = docId;
4155 callback(err);
4156 } else {
4157 callback(null, results.length ? results[0] : results);
4158 }
4159 };
4160}
4161
4162// clean docs given to us by the user
4163function cleanDocs(docs) {
4164 for (var i = 0; i < docs.length; i++) {
4165 var doc = docs[i];
4166 if (doc._deleted) {
4167 delete doc._attachments; // ignore atts for deleted docs
4168 } else if (doc._attachments) {
4169 // filter out extraneous keys from _attachments
4170 var atts = Object.keys(doc._attachments);
4171 for (var j = 0; j < atts.length; j++) {
4172 var att = atts[j];
4173 doc._attachments[att] = pick(doc._attachments[att],
4174 ['data', 'digest', 'content_type', 'length', 'revpos', 'stub']);
4175 }
4176 }
4177 }
4178}
4179
4180// compare two docs, first by _id then by _rev
4181function compareByIdThenRev(a, b) {
4182 if (a._id === b._id) {
4183 const aStart = a._revisions ? a._revisions.start : 0;
4184 const bStart = b._revisions ? b._revisions.start : 0;
4185 return aStart - bStart;
4186 }
4187 return a._id < b._id ? -1 : 1;
4188}
4189
4190// for every node in a revision tree computes its distance from the closest
4191// leaf
4192function computeHeight(revs) {
4193 var height = {};
4194 var edges = [];
4195 traverseRevTree(revs, function (isLeaf, pos, id, prnt) {
4196 var rev$$1 = pos + "-" + id;
4197 if (isLeaf) {
4198 height[rev$$1] = 0;
4199 }
4200 if (prnt !== undefined) {
4201 edges.push({from: prnt, to: rev$$1});
4202 }
4203 return rev$$1;
4204 });
4205
4206 edges.reverse();
4207 edges.forEach(function (edge) {
4208 if (height[edge.from] === undefined) {
4209 height[edge.from] = 1 + height[edge.to];
4210 } else {
4211 height[edge.from] = Math.min(height[edge.from], 1 + height[edge.to]);
4212 }
4213 });
4214 return height;
4215}
4216
4217function allDocsKeysParse(opts) {
4218 var keys = ('limit' in opts) ?
4219 opts.keys.slice(opts.skip, opts.limit + opts.skip) :
4220 (opts.skip > 0) ? opts.keys.slice(opts.skip) : opts.keys;
4221 opts.keys = keys;
4222 opts.skip = 0;
4223 delete opts.limit;
4224 if (opts.descending) {
4225 keys.reverse();
4226 opts.descending = false;
4227 }
4228}
4229
4230// all compaction is done in a queue, to avoid attaching
4231// too many listeners at once
4232function doNextCompaction(self) {
4233 var task = self._compactionQueue[0];
4234 var opts = task.opts;
4235 var callback = task.callback;
4236 self.get('_local/compaction').catch(function () {
4237 return false;
4238 }).then(function (doc) {
4239 if (doc && doc.last_seq) {
4240 opts.last_seq = doc.last_seq;
4241 }
4242 self._compact(opts, function (err, res) {
4243 /* istanbul ignore if */
4244 if (err) {
4245 callback(err);
4246 } else {
4247 callback(null, res);
4248 }
4249 nextTick(function () {
4250 self._compactionQueue.shift();
4251 if (self._compactionQueue.length) {
4252 doNextCompaction(self);
4253 }
4254 });
4255 });
4256 });
4257}
4258
4259function appendPurgeSeq(db, docId, rev$$1) {
4260 return db.get('_local/purges').then(function (doc) {
4261 const purgeSeq = doc.purgeSeq + 1;
4262 doc.purges.push({
4263 docId,
4264 rev: rev$$1,
4265 purgeSeq,
4266 });
4267 if (doc.purges.length > self.purged_infos_limit) {
4268 doc.purges.splice(0, doc.purges.length - self.purged_infos_limit);
4269 }
4270 doc.purgeSeq = purgeSeq;
4271 return doc;
4272 }).catch(function (err) {
4273 if (err.status !== 404) {
4274 throw err;
4275 }
4276 return {
4277 _id: '_local/purges',
4278 purges: [{
4279 docId,
4280 rev: rev$$1,
4281 purgeSeq: 0,
4282 }],
4283 purgeSeq: 0,
4284 };
4285 }).then(function (doc) {
4286 return db.put(doc);
4287 });
4288}
4289
4290function attachmentNameError(name) {
4291 if (name.charAt(0) === '_') {
4292 return name + ' is not a valid attachment name, attachment ' +
4293 'names cannot start with \'_\'';
4294 }
4295 return false;
4296}
4297
4298function isNotSingleDoc(doc) {
4299 return doc === null || typeof doc !== 'object' || Array.isArray(doc);
4300}
4301
4302const validRevRegex = /^\d+-[^-]*$/;
4303function isValidRev(rev$$1) {
4304 return typeof rev$$1 === 'string' && validRevRegex.test(rev$$1);
4305}
4306
4307class AbstractPouchDB extends EE {
4308 _setup() {
4309 this.post = adapterFun('post', function (doc, opts, callback) {
4310 if (typeof opts === 'function') {
4311 callback = opts;
4312 opts = {};
4313 }
4314 if (isNotSingleDoc(doc)) {
4315 return callback(createError(NOT_AN_OBJECT));
4316 }
4317 this.bulkDocs({docs: [doc]}, opts, yankError(callback, doc._id));
4318 }).bind(this);
4319
4320 this.put = adapterFun('put', function (doc, opts, cb) {
4321 if (typeof opts === 'function') {
4322 cb = opts;
4323 opts = {};
4324 }
4325 if (isNotSingleDoc(doc)) {
4326 return cb(createError(NOT_AN_OBJECT));
4327 }
4328 invalidIdError(doc._id);
4329 if ('_rev' in doc && !isValidRev(doc._rev)) {
4330 return cb(createError(INVALID_REV));
4331 }
4332 if (isLocalId(doc._id) && typeof this._putLocal === 'function') {
4333 if (doc._deleted) {
4334 return this._removeLocal(doc, cb);
4335 } else {
4336 return this._putLocal(doc, cb);
4337 }
4338 }
4339
4340 const putDoc = (next) => {
4341 if (typeof this._put === 'function' && opts.new_edits !== false) {
4342 this._put(doc, opts, next);
4343 } else {
4344 this.bulkDocs({docs: [doc]}, opts, yankError(next, doc._id));
4345 }
4346 };
4347
4348 if (opts.force && doc._rev) {
4349 transformForceOptionToNewEditsOption();
4350 putDoc(function (err) {
4351 var result = err ? null : {ok: true, id: doc._id, rev: doc._rev};
4352 cb(err, result);
4353 });
4354 } else {
4355 putDoc(cb);
4356 }
4357
4358 function transformForceOptionToNewEditsOption() {
4359 var parts = doc._rev.split('-');
4360 var oldRevId = parts[1];
4361 var oldRevNum = parseInt(parts[0], 10);
4362
4363 var newRevNum = oldRevNum + 1;
4364 var newRevId = rev();
4365
4366 doc._revisions = {
4367 start: newRevNum,
4368 ids: [newRevId, oldRevId]
4369 };
4370 doc._rev = newRevNum + '-' + newRevId;
4371 opts.new_edits = false;
4372 }
4373 }).bind(this);
4374
4375 this.putAttachment = adapterFun('putAttachment', function (docId, attachmentId, rev$$1, blob, type) {
4376 var api = this;
4377 if (typeof type === 'function') {
4378 type = blob;
4379 blob = rev$$1;
4380 rev$$1 = null;
4381 }
4382 // Lets fix in https://github.com/pouchdb/pouchdb/issues/3267
4383 /* istanbul ignore if */
4384 if (typeof type === 'undefined') {
4385 type = blob;
4386 blob = rev$$1;
4387 rev$$1 = null;
4388 }
4389 if (!type) {
4390 guardedConsole('warn', 'Attachment', attachmentId, 'on document', docId, 'is missing content_type');
4391 }
4392
4393 function createAttachment(doc) {
4394 var prevrevpos = '_rev' in doc ? parseInt(doc._rev, 10) : 0;
4395 doc._attachments = doc._attachments || {};
4396 doc._attachments[attachmentId] = {
4397 content_type: type,
4398 data: blob,
4399 revpos: ++prevrevpos
4400 };
4401 return api.put(doc);
4402 }
4403
4404 return api.get(docId).then(function (doc) {
4405 if (doc._rev !== rev$$1) {
4406 throw createError(REV_CONFLICT);
4407 }
4408
4409 return createAttachment(doc);
4410 }, function (err) {
4411 // create new doc
4412 /* istanbul ignore else */
4413 if (err.reason === MISSING_DOC.message) {
4414 return createAttachment({_id: docId});
4415 } else {
4416 throw err;
4417 }
4418 });
4419 }).bind(this);
4420
4421 this.removeAttachment = adapterFun('removeAttachment', function (docId, attachmentId, rev$$1, callback) {
4422 this.get(docId, (err, obj) => {
4423 /* istanbul ignore if */
4424 if (err) {
4425 callback(err);
4426 return;
4427 }
4428 if (obj._rev !== rev$$1) {
4429 callback(createError(REV_CONFLICT));
4430 return;
4431 }
4432 /* istanbul ignore if */
4433 if (!obj._attachments) {
4434 return callback();
4435 }
4436 delete obj._attachments[attachmentId];
4437 if (Object.keys(obj._attachments).length === 0) {
4438 delete obj._attachments;
4439 }
4440 this.put(obj, callback);
4441 });
4442 }).bind(this);
4443
4444 this.remove = adapterFun('remove', function (docOrId, optsOrRev, opts, callback) {
4445 var doc;
4446 if (typeof optsOrRev === 'string') {
4447 // id, rev, opts, callback style
4448 doc = {
4449 _id: docOrId,
4450 _rev: optsOrRev
4451 };
4452 if (typeof opts === 'function') {
4453 callback = opts;
4454 opts = {};
4455 }
4456 } else {
4457 // doc, opts, callback style
4458 doc = docOrId;
4459 if (typeof optsOrRev === 'function') {
4460 callback = optsOrRev;
4461 opts = {};
4462 } else {
4463 callback = opts;
4464 opts = optsOrRev;
4465 }
4466 }
4467 opts = opts || {};
4468 opts.was_delete = true;
4469 var newDoc = {_id: doc._id, _rev: (doc._rev || opts.rev)};
4470 newDoc._deleted = true;
4471 if (isLocalId(newDoc._id) && typeof this._removeLocal === 'function') {
4472 return this._removeLocal(doc, callback);
4473 }
4474 this.bulkDocs({docs: [newDoc]}, opts, yankError(callback, newDoc._id));
4475 }).bind(this);
4476
4477 this.revsDiff = adapterFun('revsDiff', function (req, opts, callback) {
4478 if (typeof opts === 'function') {
4479 callback = opts;
4480 opts = {};
4481 }
4482 var ids = Object.keys(req);
4483
4484 if (!ids.length) {
4485 return callback(null, {});
4486 }
4487
4488 var count = 0;
4489 var missing = new Map();
4490
4491 function addToMissing(id, revId) {
4492 if (!missing.has(id)) {
4493 missing.set(id, {missing: []});
4494 }
4495 missing.get(id).missing.push(revId);
4496 }
4497
4498 function processDoc(id, rev_tree) {
4499 // Is this fast enough? Maybe we should switch to a set simulated by a map
4500 var missingForId = req[id].slice(0);
4501 traverseRevTree(rev_tree, function (isLeaf, pos, revHash, ctx,
4502 opts) {
4503 var rev$$1 = pos + '-' + revHash;
4504 var idx = missingForId.indexOf(rev$$1);
4505 if (idx === -1) {
4506 return;
4507 }
4508
4509 missingForId.splice(idx, 1);
4510 /* istanbul ignore if */
4511 if (opts.status !== 'available') {
4512 addToMissing(id, rev$$1);
4513 }
4514 });
4515
4516 // Traversing the tree is synchronous, so now `missingForId` contains
4517 // revisions that were not found in the tree
4518 missingForId.forEach(function (rev$$1) {
4519 addToMissing(id, rev$$1);
4520 });
4521 }
4522
4523 ids.forEach(function (id) {
4524 this._getRevisionTree(id, function (err, rev_tree) {
4525 if (err && err.status === 404 && err.message === 'missing') {
4526 missing.set(id, {missing: req[id]});
4527 } else if (err) {
4528 /* istanbul ignore next */
4529 return callback(err);
4530 } else {
4531 processDoc(id, rev_tree);
4532 }
4533
4534 if (++count === ids.length) {
4535 // convert LazyMap to object
4536 var missingObj = {};
4537 missing.forEach(function (value, key) {
4538 missingObj[key] = value;
4539 });
4540 return callback(null, missingObj);
4541 }
4542 });
4543 }, this);
4544 }).bind(this);
4545
4546 // _bulk_get API for faster replication, as described in
4547 // https://github.com/apache/couchdb-chttpd/pull/33
4548 // At the "abstract" level, it will just run multiple get()s in
4549 // parallel, because this isn't much of a performance cost
4550 // for local databases (except the cost of multiple transactions, which is
4551 // small). The http adapter overrides this in order
4552 // to do a more efficient single HTTP request.
4553 this.bulkGet = adapterFun('bulkGet', function (opts, callback) {
4554 bulkGet(this, opts, callback);
4555 }).bind(this);
4556
4557 // compact one document and fire callback
4558 // by compacting we mean removing all revisions which
4559 // are further from the leaf in revision tree than max_height
4560 this.compactDocument = adapterFun('compactDocument', function (docId, maxHeight, callback) {
4561 this._getRevisionTree(docId, (err, revTree) => {
4562 /* istanbul ignore if */
4563 if (err) {
4564 return callback(err);
4565 }
4566 var height = computeHeight(revTree);
4567 var candidates = [];
4568 var revs = [];
4569 Object.keys(height).forEach(function (rev$$1) {
4570 if (height[rev$$1] > maxHeight) {
4571 candidates.push(rev$$1);
4572 }
4573 });
4574
4575 traverseRevTree(revTree, function (isLeaf, pos, revHash, ctx, opts) {
4576 var rev$$1 = pos + '-' + revHash;
4577 if (opts.status === 'available' && candidates.indexOf(rev$$1) !== -1) {
4578 revs.push(rev$$1);
4579 }
4580 });
4581 this._doCompaction(docId, revs, callback);
4582 });
4583 }).bind(this);
4584
4585 // compact the whole database using single document
4586 // compaction
4587 this.compact = adapterFun('compact', function (opts, callback) {
4588 if (typeof opts === 'function') {
4589 callback = opts;
4590 opts = {};
4591 }
4592
4593 opts = opts || {};
4594
4595 this._compactionQueue = this._compactionQueue || [];
4596 this._compactionQueue.push({opts, callback});
4597 if (this._compactionQueue.length === 1) {
4598 doNextCompaction(this);
4599 }
4600 }).bind(this);
4601
4602 /* Begin api wrappers. Specific functionality to storage belongs in the _[method] */
4603 this.get = adapterFun('get', function (id, opts, cb) {
4604 if (typeof opts === 'function') {
4605 cb = opts;
4606 opts = {};
4607 }
4608 opts = opts || {};
4609 if (typeof id !== 'string') {
4610 return cb(createError(INVALID_ID));
4611 }
4612 if (isLocalId(id) && typeof this._getLocal === 'function') {
4613 return this._getLocal(id, cb);
4614 }
4615 var leaves = [];
4616
4617 const finishOpenRevs = () => {
4618 var result = [];
4619 var count = leaves.length;
4620 /* istanbul ignore if */
4621 if (!count) {
4622 return cb(null, result);
4623 }
4624
4625 // order with open_revs is unspecified
4626 leaves.forEach((leaf) => {
4627 this.get(id, {
4628 rev: leaf,
4629 revs: opts.revs,
4630 latest: opts.latest,
4631 attachments: opts.attachments,
4632 binary: opts.binary
4633 }, function (err, doc) {
4634 if (!err) {
4635 // using latest=true can produce duplicates
4636 var existing;
4637 for (var i = 0, l = result.length; i < l; i++) {
4638 if (result[i].ok && result[i].ok._rev === doc._rev) {
4639 existing = true;
4640 break;
4641 }
4642 }
4643 if (!existing) {
4644 result.push({ok: doc});
4645 }
4646 } else {
4647 result.push({missing: leaf});
4648 }
4649 count--;
4650 if (!count) {
4651 cb(null, result);
4652 }
4653 });
4654 });
4655 };
4656
4657 if (opts.open_revs) {
4658 if (opts.open_revs === "all") {
4659 this._getRevisionTree(id, function (err, rev_tree) {
4660 /* istanbul ignore if */
4661 if (err) {
4662 return cb(err);
4663 }
4664 leaves = collectLeaves(rev_tree).map(function (leaf) {
4665 return leaf.rev;
4666 });
4667 finishOpenRevs();
4668 });
4669 } else {
4670 if (Array.isArray(opts.open_revs)) {
4671 leaves = opts.open_revs;
4672 for (var i = 0; i < leaves.length; i++) {
4673 var l = leaves[i];
4674 // looks like it's the only thing couchdb checks
4675 if (!isValidRev(l)) {
4676 return cb(createError(INVALID_REV));
4677 }
4678 }
4679 finishOpenRevs();
4680 } else {
4681 return cb(createError(UNKNOWN_ERROR, 'function_clause'));
4682 }
4683 }
4684 return; // open_revs does not like other options
4685 }
4686
4687 return this._get(id, opts, (err, result) => {
4688 if (err) {
4689 err.docId = id;
4690 return cb(err);
4691 }
4692
4693 var doc = result.doc;
4694 var metadata = result.metadata;
4695 var ctx = result.ctx;
4696
4697 if (opts.conflicts) {
4698 var conflicts = collectConflicts(metadata);
4699 if (conflicts.length) {
4700 doc._conflicts = conflicts;
4701 }
4702 }
4703
4704 if (isDeleted(metadata, doc._rev)) {
4705 doc._deleted = true;
4706 }
4707
4708 if (opts.revs || opts.revs_info) {
4709 var splittedRev = doc._rev.split('-');
4710 var revNo = parseInt(splittedRev[0], 10);
4711 var revHash = splittedRev[1];
4712
4713 var paths = rootToLeaf(metadata.rev_tree);
4714 var path = null;
4715
4716 for (var i = 0; i < paths.length; i++) {
4717 var currentPath = paths[i];
4718 const hashIndex = currentPath.ids.findIndex(x => x.id === revHash);
4719 var hashFoundAtRevPos = hashIndex === (revNo - 1);
4720
4721 if (hashFoundAtRevPos || (!path && hashIndex !== -1)) {
4722 path = currentPath;
4723 }
4724 }
4725
4726 /* istanbul ignore if */
4727 if (!path) {
4728 err = new Error('invalid rev tree');
4729 err.docId = id;
4730 return cb(err);
4731 }
4732
4733 const pathId = doc._rev.split('-')[1];
4734 const indexOfRev = path.ids.findIndex(x => x.id === pathId) + 1;
4735 var howMany = path.ids.length - indexOfRev;
4736 path.ids.splice(indexOfRev, howMany);
4737 path.ids.reverse();
4738
4739 if (opts.revs) {
4740 doc._revisions = {
4741 start: (path.pos + path.ids.length) - 1,
4742 ids: path.ids.map(function (rev$$1) {
4743 return rev$$1.id;
4744 })
4745 };
4746 }
4747 if (opts.revs_info) {
4748 var pos = path.pos + path.ids.length;
4749 doc._revs_info = path.ids.map(function (rev$$1) {
4750 pos--;
4751 return {
4752 rev: pos + '-' + rev$$1.id,
4753 status: rev$$1.opts.status
4754 };
4755 });
4756 }
4757 }
4758
4759 if (opts.attachments && doc._attachments) {
4760 var attachments = doc._attachments;
4761 var count = Object.keys(attachments).length;
4762 if (count === 0) {
4763 return cb(null, doc);
4764 }
4765 Object.keys(attachments).forEach((key) => {
4766 this._getAttachment(doc._id, key, attachments[key], {
4767 binary: opts.binary,
4768 metadata,
4769 ctx
4770 }, function (err, data) {
4771 var att = doc._attachments[key];
4772 att.data = data;
4773 delete att.stub;
4774 delete att.length;
4775 if (!--count) {
4776 cb(null, doc);
4777 }
4778 });
4779 });
4780 } else {
4781 if (doc._attachments) {
4782 for (var key in doc._attachments) {
4783 /* istanbul ignore else */
4784 if (Object.prototype.hasOwnProperty.call(doc._attachments, key)) {
4785 doc._attachments[key].stub = true;
4786 }
4787 }
4788 }
4789 cb(null, doc);
4790 }
4791 });
4792 }).bind(this);
4793
4794 // TODO: I don't like this, it forces an extra read for every
4795 // attachment read and enforces a confusing api between
4796 // adapter.js and the adapter implementation
4797 this.getAttachment = adapterFun('getAttachment', function (docId, attachmentId, opts, callback) {
4798 if (opts instanceof Function) {
4799 callback = opts;
4800 opts = {};
4801 }
4802 this._get(docId, opts, (err, res) => {
4803 if (err) {
4804 return callback(err);
4805 }
4806 if (res.doc._attachments && res.doc._attachments[attachmentId]) {
4807 opts.ctx = res.ctx;
4808 opts.binary = true;
4809 opts.metadata = res.metadata;
4810 this._getAttachment(docId, attachmentId,
4811 res.doc._attachments[attachmentId], opts, callback);
4812 } else {
4813 return callback(createError(MISSING_DOC));
4814 }
4815 });
4816 }).bind(this);
4817
4818 this.allDocs = adapterFun('allDocs', function (opts, callback) {
4819 if (typeof opts === 'function') {
4820 callback = opts;
4821 opts = {};
4822 }
4823 opts.skip = typeof opts.skip !== 'undefined' ? opts.skip : 0;
4824 if (opts.start_key) {
4825 opts.startkey = opts.start_key;
4826 }
4827 if (opts.end_key) {
4828 opts.endkey = opts.end_key;
4829 }
4830 if ('keys' in opts) {
4831 if (!Array.isArray(opts.keys)) {
4832 return callback(new TypeError('options.keys must be an array'));
4833 }
4834 var incompatibleOpt =
4835 ['startkey', 'endkey', 'key'].filter(function (incompatibleOpt) {
4836 return incompatibleOpt in opts;
4837 })[0];
4838 if (incompatibleOpt) {
4839 callback(createError(QUERY_PARSE_ERROR,
4840 'Query parameter `' + incompatibleOpt +
4841 '` is not compatible with multi-get'
4842 ));
4843 return;
4844 }
4845 if (!isRemote(this)) {
4846 allDocsKeysParse(opts);
4847 if (opts.keys.length === 0) {
4848 return this._allDocs({limit: 0}, callback);
4849 }
4850 }
4851 }
4852
4853 return this._allDocs(opts, callback);
4854 }).bind(this);
4855
4856 this.close = adapterFun('close', function (callback) {
4857 this._closed = true;
4858 this.emit('closed');
4859 return this._close(callback);
4860 }).bind(this);
4861
4862 this.info = adapterFun('info', function (callback) {
4863 this._info((err, info) => {
4864 if (err) {
4865 return callback(err);
4866 }
4867 // assume we know better than the adapter, unless it informs us
4868 info.db_name = info.db_name || this.name;
4869 info.auto_compaction = !!(this.auto_compaction && !isRemote(this));
4870 info.adapter = this.adapter;
4871 callback(null, info);
4872 });
4873 }).bind(this);
4874
4875 this.id = adapterFun('id', function (callback) {
4876 return this._id(callback);
4877 }).bind(this);
4878
4879 this.bulkDocs = adapterFun('bulkDocs', function (req, opts, callback) {
4880 if (typeof opts === 'function') {
4881 callback = opts;
4882 opts = {};
4883 }
4884
4885 opts = opts || {};
4886
4887 if (Array.isArray(req)) {
4888 req = {
4889 docs: req
4890 };
4891 }
4892
4893 if (!req || !req.docs || !Array.isArray(req.docs)) {
4894 return callback(createError(MISSING_BULK_DOCS));
4895 }
4896
4897 for (var i = 0; i < req.docs.length; ++i) {
4898 const doc = req.docs[i];
4899 if (isNotSingleDoc(doc)) {
4900 return callback(createError(NOT_AN_OBJECT));
4901 }
4902 if ('_rev' in doc && !isValidRev(doc._rev)) {
4903 return callback(createError(INVALID_REV));
4904 }
4905 }
4906
4907 var attachmentError;
4908 req.docs.forEach(function (doc) {
4909 if (doc._attachments) {
4910 Object.keys(doc._attachments).forEach(function (name) {
4911 attachmentError = attachmentError || attachmentNameError(name);
4912 if (!doc._attachments[name].content_type) {
4913 guardedConsole('warn', 'Attachment', name, 'on document', doc._id, 'is missing content_type');
4914 }
4915 });
4916 }
4917 });
4918
4919 if (attachmentError) {
4920 return callback(createError(BAD_REQUEST, attachmentError));
4921 }
4922
4923 if (!('new_edits' in opts)) {
4924 if ('new_edits' in req) {
4925 opts.new_edits = req.new_edits;
4926 } else {
4927 opts.new_edits = true;
4928 }
4929 }
4930
4931 var adapter = this;
4932 if (!opts.new_edits && !isRemote(adapter)) {
4933 // ensure revisions of the same doc are sorted, so that
4934 // the local adapter processes them correctly (#2935)
4935 req.docs.sort(compareByIdThenRev);
4936 }
4937
4938 cleanDocs(req.docs);
4939
4940 // in the case of conflicts, we want to return the _ids to the user
4941 // however, the underlying adapter may destroy the docs array, so
4942 // create a copy here
4943 var ids = req.docs.map(function (doc) {
4944 return doc._id;
4945 });
4946
4947 this._bulkDocs(req, opts, function (err, res) {
4948 if (err) {
4949 return callback(err);
4950 }
4951 if (!opts.new_edits) {
4952 // this is what couch does when new_edits is false
4953 res = res.filter(function (x) {
4954 return x.error;
4955 });
4956 }
4957 // add ids for error/conflict responses (not required for CouchDB)
4958 if (!isRemote(adapter)) {
4959 for (var i = 0, l = res.length; i < l; i++) {
4960 res[i].id = res[i].id || ids[i];
4961 }
4962 }
4963
4964 callback(null, res);
4965 });
4966 }).bind(this);
4967
4968 this.registerDependentDatabase = adapterFun('registerDependentDatabase', function (dependentDb, callback) {
4969 var dbOptions = clone(this.__opts);
4970 if (this.__opts.view_adapter) {
4971 dbOptions.adapter = this.__opts.view_adapter;
4972 }
4973
4974 var depDB = new this.constructor(dependentDb, dbOptions);
4975
4976 function diffFun(doc) {
4977 doc.dependentDbs = doc.dependentDbs || {};
4978 if (doc.dependentDbs[dependentDb]) {
4979 return false; // no update required
4980 }
4981 doc.dependentDbs[dependentDb] = true;
4982 return doc;
4983 }
4984 upsert(this, '_local/_pouch_dependentDbs', diffFun).then(function () {
4985 callback(null, {db: depDB});
4986 }).catch(callback);
4987 }).bind(this);
4988
4989 this.destroy = adapterFun('destroy', function (opts, callback) {
4990
4991 if (typeof opts === 'function') {
4992 callback = opts;
4993 opts = {};
4994 }
4995
4996 var usePrefix = 'use_prefix' in this ? this.use_prefix : true;
4997
4998 const destroyDb = () => {
4999 // call destroy method of the particular adaptor
5000 this._destroy(opts, (err, resp) => {
5001 if (err) {
5002 return callback(err);
5003 }
5004 this._destroyed = true;
5005 this.emit('destroyed');
5006 callback(null, resp || { 'ok': true });
5007 });
5008 };
5009
5010 if (isRemote(this)) {
5011 // no need to check for dependent DBs if it's a remote DB
5012 return destroyDb();
5013 }
5014
5015 this.get('_local/_pouch_dependentDbs', (err, localDoc) => {
5016 if (err) {
5017 /* istanbul ignore if */
5018 if (err.status !== 404) {
5019 return callback(err);
5020 } else { // no dependencies
5021 return destroyDb();
5022 }
5023 }
5024 var dependentDbs = localDoc.dependentDbs;
5025 var PouchDB = this.constructor;
5026 var deletedMap = Object.keys(dependentDbs).map((name) => {
5027 // use_prefix is only false in the browser
5028 /* istanbul ignore next */
5029 var trueName = usePrefix ?
5030 name.replace(new RegExp('^' + PouchDB.prefix), '') : name;
5031 return new PouchDB(trueName, this.__opts).destroy();
5032 });
5033 Promise.all(deletedMap).then(destroyDb, callback);
5034 });
5035 }).bind(this);
5036 }
5037
5038 _compact(opts, callback) {
5039 var changesOpts = {
5040 return_docs: false,
5041 last_seq: opts.last_seq || 0,
5042 since: opts.last_seq || 0
5043 };
5044 var promises = [];
5045
5046 var taskId;
5047 var compactedDocs = 0;
5048
5049 const onChange = (row) => {
5050 this.activeTasks.update(taskId, {
5051 completed_items: ++compactedDocs
5052 });
5053 promises.push(this.compactDocument(row.id, 0));
5054 };
5055 const onError = (err) => {
5056 this.activeTasks.remove(taskId, err);
5057 callback(err);
5058 };
5059 const onComplete = (resp) => {
5060 var lastSeq = resp.last_seq;
5061 Promise.all(promises).then(() => {
5062 return upsert(this, '_local/compaction', (doc) => {
5063 if (!doc.last_seq || doc.last_seq < lastSeq) {
5064 doc.last_seq = lastSeq;
5065 return doc;
5066 }
5067 return false; // somebody else got here first, don't update
5068 });
5069 }).then(() => {
5070 this.activeTasks.remove(taskId);
5071 callback(null, {ok: true});
5072 }).catch(onError);
5073 };
5074
5075 this.info().then((info) => {
5076 taskId = this.activeTasks.add({
5077 name: 'database_compaction',
5078 total_items: info.update_seq - changesOpts.last_seq,
5079 });
5080
5081 this.changes(changesOpts)
5082 .on('change', onChange)
5083 .on('complete', onComplete)
5084 .on('error', onError);
5085 });
5086 }
5087
5088 changes(opts, callback) {
5089 if (typeof opts === 'function') {
5090 callback = opts;
5091 opts = {};
5092 }
5093
5094 opts = opts || {};
5095
5096 // By default set return_docs to false if the caller has opts.live = true,
5097 // this will prevent us from collecting the set of changes indefinitely
5098 // resulting in growing memory
5099 opts.return_docs = ('return_docs' in opts) ? opts.return_docs : !opts.live;
5100
5101 return new Changes$1(this, opts, callback);
5102 }
5103
5104 type() {
5105 return (typeof this._type === 'function') ? this._type() : this.adapter;
5106 }
5107}
5108
5109// The abstract purge implementation expects a doc id and the rev of a leaf node in that doc.
5110// It will return errors if the rev doesn’t exist or isn’t a leaf.
5111AbstractPouchDB.prototype.purge = adapterFun('_purge', function (docId, rev$$1, callback) {
5112 if (typeof this._purge === 'undefined') {
5113 return callback(createError(UNKNOWN_ERROR, 'Purge is not implemented in the ' + this.adapter + ' adapter.'));
5114 }
5115 var self = this;
5116
5117 self._getRevisionTree(docId, (error, revs) => {
5118 if (error) {
5119 return callback(error);
5120 }
5121 if (!revs) {
5122 return callback(createError(MISSING_DOC));
5123 }
5124 let path;
5125 try {
5126 path = findPathToLeaf(revs, rev$$1);
5127 } catch (error) {
5128 return callback(error.message || error);
5129 }
5130 self._purge(docId, path, (error, result) => {
5131 if (error) {
5132 return callback(error);
5133 } else {
5134 appendPurgeSeq(self, docId, rev$$1).then(function () {
5135 return callback(null, result);
5136 });
5137 }
5138 });
5139 });
5140});
5141
5142class TaskQueue {
5143 constructor() {
5144 this.isReady = false;
5145 this.failed = false;
5146 this.queue = [];
5147 }
5148
5149 execute() {
5150 var fun;
5151 if (this.failed) {
5152 while ((fun = this.queue.shift())) {
5153 fun(this.failed);
5154 }
5155 } else {
5156 while ((fun = this.queue.shift())) {
5157 fun();
5158 }
5159 }
5160 }
5161
5162 fail(err) {
5163 this.failed = err;
5164 this.execute();
5165 }
5166
5167 ready(db) {
5168 this.isReady = true;
5169 this.db = db;
5170 this.execute();
5171 }
5172
5173 addTask(fun) {
5174 this.queue.push(fun);
5175 if (this.failed) {
5176 this.execute();
5177 }
5178 }
5179}
5180
5181function parseAdapter(name, opts) {
5182 var match = name.match(/([a-z-]*):\/\/(.*)/);
5183 if (match) {
5184 // the http adapter expects the fully qualified name
5185 return {
5186 name: /https?/.test(match[1]) ? match[1] + '://' + match[2] : match[2],
5187 adapter: match[1]
5188 };
5189 }
5190
5191 var adapters = PouchDB.adapters;
5192 var preferredAdapters = PouchDB.preferredAdapters;
5193 var prefix = PouchDB.prefix;
5194 var adapterName = opts.adapter;
5195
5196 if (!adapterName) { // automatically determine adapter
5197 for (var i = 0; i < preferredAdapters.length; ++i) {
5198 adapterName = preferredAdapters[i];
5199 // check for browsers that have been upgraded from websql-only to websql+idb
5200 /* istanbul ignore if */
5201 if (adapterName === 'idb' && 'websql' in adapters &&
5202 hasLocalStorage() && localStorage['_pouch__websqldb_' + prefix + name]) {
5203 // log it, because this can be confusing during development
5204 guardedConsole('log', 'PouchDB is downgrading "' + name + '" to WebSQL to' +
5205 ' avoid data loss, because it was already opened with WebSQL.');
5206 continue; // keep using websql to avoid user data loss
5207 }
5208 break;
5209 }
5210 }
5211
5212 var adapter = adapters[adapterName];
5213
5214 // if adapter is invalid, then an error will be thrown later
5215 var usePrefix = (adapter && 'use_prefix' in adapter) ?
5216 adapter.use_prefix : true;
5217
5218 return {
5219 name: usePrefix ? (prefix + name) : name,
5220 adapter: adapterName
5221 };
5222}
5223
5224function inherits(A, B) {
5225 A.prototype = Object.create(B.prototype, {
5226 constructor: { value: A }
5227 });
5228}
5229
5230function createClass(parent, init) {
5231 let klass = function (...args) {
5232 if (!(this instanceof klass)) {
5233 return new klass(...args);
5234 }
5235 init.apply(this, args);
5236 };
5237 inherits(klass, parent);
5238 return klass;
5239}
5240
5241// OK, so here's the deal. Consider this code:
5242// var db1 = new PouchDB('foo');
5243// var db2 = new PouchDB('foo');
5244// db1.destroy();
5245// ^ these two both need to emit 'destroyed' events,
5246// as well as the PouchDB constructor itself.
5247// So we have one db object (whichever one got destroy() called on it)
5248// responsible for emitting the initial event, which then gets emitted
5249// by the constructor, which then broadcasts it to any other dbs
5250// that may have been created with the same name.
5251function prepareForDestruction(self) {
5252
5253 function onDestroyed(from_constructor) {
5254 self.removeListener('closed', onClosed);
5255 if (!from_constructor) {
5256 self.constructor.emit('destroyed', self.name);
5257 }
5258 }
5259
5260 function onClosed() {
5261 self.removeListener('destroyed', onDestroyed);
5262 self.constructor.emit('unref', self);
5263 }
5264
5265 self.once('destroyed', onDestroyed);
5266 self.once('closed', onClosed);
5267 self.constructor.emit('ref', self);
5268}
5269
5270class PouchInternal extends AbstractPouchDB {
5271 constructor(name, opts) {
5272 super();
5273 this._setup(name, opts);
5274 }
5275
5276 _setup(name, opts) {
5277 super._setup();
5278 opts = opts || {};
5279
5280 if (name && typeof name === 'object') {
5281 opts = name;
5282 name = opts.name;
5283 delete opts.name;
5284 }
5285
5286 if (opts.deterministic_revs === undefined) {
5287 opts.deterministic_revs = true;
5288 }
5289
5290 this.__opts = opts = clone(opts);
5291
5292 this.auto_compaction = opts.auto_compaction;
5293 this.purged_infos_limit = opts.purged_infos_limit || 1000;
5294 this.prefix = PouchDB.prefix;
5295
5296 if (typeof name !== 'string') {
5297 throw new Error('Missing/invalid DB name');
5298 }
5299
5300 var prefixedName = (opts.prefix || '') + name;
5301 var backend = parseAdapter(prefixedName, opts);
5302
5303 opts.name = backend.name;
5304 opts.adapter = opts.adapter || backend.adapter;
5305
5306 this.name = name;
5307 this._adapter = opts.adapter;
5308 PouchDB.emit('debug', ['adapter', 'Picked adapter: ', opts.adapter]);
5309
5310 if (!PouchDB.adapters[opts.adapter] ||
5311 !PouchDB.adapters[opts.adapter].valid()) {
5312 throw new Error('Invalid Adapter: ' + opts.adapter);
5313 }
5314
5315 if (opts.view_adapter) {
5316 if (!PouchDB.adapters[opts.view_adapter] ||
5317 !PouchDB.adapters[opts.view_adapter].valid()) {
5318 throw new Error('Invalid View Adapter: ' + opts.view_adapter);
5319 }
5320 }
5321
5322 this.taskqueue = new TaskQueue();
5323
5324 this.adapter = opts.adapter;
5325
5326 PouchDB.adapters[opts.adapter].call(this, opts, (err) => {
5327 if (err) {
5328 return this.taskqueue.fail(err);
5329 }
5330 prepareForDestruction(this);
5331
5332 this.emit('created', this);
5333 PouchDB.emit('created', this.name);
5334 this.taskqueue.ready(this);
5335 });
5336 }
5337}
5338
5339const PouchDB = createClass(PouchInternal, function (name, opts) {
5340 PouchInternal.prototype._setup.call(this, name, opts);
5341});
5342
5343var f$1 = fetch;
5344var h = Headers;
5345
5346class ActiveTasks {
5347 constructor() {
5348 this.tasks = {};
5349 }
5350
5351 list() {
5352 return Object.values(this.tasks);
5353 }
5354
5355 add(task) {
5356 const id = uuid.v4();
5357 this.tasks[id] = {
5358 id,
5359 name: task.name,
5360 total_items: task.total_items,
5361 created_at: new Date().toJSON()
5362 };
5363 return id;
5364 }
5365
5366 get(id) {
5367 return this.tasks[id];
5368 }
5369
5370 /* eslint-disable no-unused-vars */
5371 remove(id, reason) {
5372 delete this.tasks[id];
5373 return this.tasks;
5374 }
5375
5376 update(id, updatedTask) {
5377 const task = this.tasks[id];
5378 if (typeof task !== 'undefined') {
5379 const mergedTask = {
5380 id: task.id,
5381 name: task.name,
5382 created_at: task.created_at,
5383 total_items: updatedTask.total_items || task.total_items,
5384 completed_items: updatedTask.completed_items || task.completed_items,
5385 updated_at: new Date().toJSON()
5386 };
5387 this.tasks[id] = mergedTask;
5388 }
5389 return this.tasks;
5390 }
5391}
5392
5393PouchDB.adapters = {};
5394PouchDB.preferredAdapters = [];
5395
5396PouchDB.prefix = '_pouch_';
5397
5398var eventEmitter = new EE();
5399
5400function setUpEventEmitter(Pouch) {
5401 Object.keys(EE.prototype).forEach(function (key) {
5402 if (typeof EE.prototype[key] === 'function') {
5403 Pouch[key] = eventEmitter[key].bind(eventEmitter);
5404 }
5405 });
5406
5407 // these are created in constructor.js, and allow us to notify each DB with
5408 // the same name that it was destroyed, via the constructor object
5409 var destructListeners = Pouch._destructionListeners = new Map();
5410
5411 Pouch.on('ref', function onConstructorRef(db) {
5412 if (!destructListeners.has(db.name)) {
5413 destructListeners.set(db.name, []);
5414 }
5415 destructListeners.get(db.name).push(db);
5416 });
5417
5418 Pouch.on('unref', function onConstructorUnref(db) {
5419 if (!destructListeners.has(db.name)) {
5420 return;
5421 }
5422 var dbList = destructListeners.get(db.name);
5423 var pos = dbList.indexOf(db);
5424 if (pos < 0) {
5425 /* istanbul ignore next */
5426 return;
5427 }
5428 dbList.splice(pos, 1);
5429 if (dbList.length > 1) {
5430 /* istanbul ignore next */
5431 destructListeners.set(db.name, dbList);
5432 } else {
5433 destructListeners.delete(db.name);
5434 }
5435 });
5436
5437 Pouch.on('destroyed', function onConstructorDestroyed(name) {
5438 if (!destructListeners.has(name)) {
5439 return;
5440 }
5441 var dbList = destructListeners.get(name);
5442 destructListeners.delete(name);
5443 dbList.forEach(function (db) {
5444 db.emit('destroyed',true);
5445 });
5446 });
5447}
5448
5449setUpEventEmitter(PouchDB);
5450
5451PouchDB.adapter = function (id, obj, addToPreferredAdapters) {
5452 /* istanbul ignore else */
5453 if (obj.valid()) {
5454 PouchDB.adapters[id] = obj;
5455 if (addToPreferredAdapters) {
5456 PouchDB.preferredAdapters.push(id);
5457 }
5458 }
5459};
5460
5461PouchDB.plugin = function (obj) {
5462 if (typeof obj === 'function') { // function style for plugins
5463 obj(PouchDB);
5464 } else if (typeof obj !== 'object' || Object.keys(obj).length === 0) {
5465 throw new Error('Invalid plugin: got "' + obj + '", expected an object or a function');
5466 } else {
5467 Object.keys(obj).forEach(function (id) { // object style for plugins
5468 PouchDB.prototype[id] = obj[id];
5469 });
5470 }
5471 if (this.__defaults) {
5472 PouchDB.__defaults = Object.assign({}, this.__defaults);
5473 }
5474 return PouchDB;
5475};
5476
5477PouchDB.defaults = function (defaultOpts) {
5478 let PouchWithDefaults = createClass(PouchDB, function (name, opts) {
5479 opts = opts || {};
5480
5481 if (name && typeof name === 'object') {
5482 opts = name;
5483 name = opts.name;
5484 delete opts.name;
5485 }
5486
5487 opts = Object.assign({}, PouchWithDefaults.__defaults, opts);
5488 PouchDB.call(this, name, opts);
5489 });
5490
5491 PouchWithDefaults.preferredAdapters = PouchDB.preferredAdapters.slice();
5492 Object.keys(PouchDB).forEach(function (key) {
5493 if (!(key in PouchWithDefaults)) {
5494 PouchWithDefaults[key] = PouchDB[key];
5495 }
5496 });
5497
5498 // make default options transitive
5499 // https://github.com/pouchdb/pouchdb/issues/5922
5500 PouchWithDefaults.__defaults = Object.assign({}, this.__defaults, defaultOpts);
5501
5502 return PouchWithDefaults;
5503};
5504
5505PouchDB.fetch = function (url, opts) {
5506 return f$1(url, opts);
5507};
5508
5509PouchDB.prototype.activeTasks = PouchDB.activeTasks = new ActiveTasks();
5510
5511// managed automatically by set-version.js
5512var version = "9.0.0";
5513
5514// this would just be "return doc[field]", but fields
5515// can be "deep" due to dot notation
5516function getFieldFromDoc(doc, parsedField) {
5517 var value = doc;
5518 for (var i = 0, len = parsedField.length; i < len; i++) {
5519 var key = parsedField[i];
5520 value = value[key];
5521 if (!value) {
5522 break;
5523 }
5524 }
5525 return value;
5526}
5527
5528function compare(left, right) {
5529 return left < right ? -1 : left > right ? 1 : 0;
5530}
5531
5532// Converts a string in dot notation to an array of its components, with backslash escaping
5533function parseField(fieldName) {
5534 // fields may be deep (e.g. "foo.bar.baz"), so parse
5535 var fields = [];
5536 var current = '';
5537 for (var i = 0, len = fieldName.length; i < len; i++) {
5538 var ch = fieldName[i];
5539 if (i > 0 && fieldName[i - 1] === '\\' && (ch === '$' || ch === '.')) {
5540 // escaped delimiter
5541 current = current.substring(0, current.length - 1) + ch;
5542 } else if (ch === '.') {
5543 // When `.` is not escaped (above), it is a field delimiter
5544 fields.push(current);
5545 current = '';
5546 } else { // normal character
5547 current += ch;
5548 }
5549 }
5550 fields.push(current);
5551 return fields;
5552}
5553
5554var combinationFields = ['$or', '$nor', '$not'];
5555function isCombinationalField(field) {
5556 return combinationFields.indexOf(field) > -1;
5557}
5558
5559function getKey(obj) {
5560 return Object.keys(obj)[0];
5561}
5562
5563function getValue(obj) {
5564 return obj[getKey(obj)];
5565}
5566
5567
5568// flatten an array of selectors joined by an $and operator
5569function mergeAndedSelectors(selectors) {
5570
5571 // sort to ensure that e.g. if the user specified
5572 // $and: [{$gt: 'a'}, {$gt: 'b'}], then it's collapsed into
5573 // just {$gt: 'b'}
5574 var res = {};
5575 var first = {$or: true, $nor: true};
5576
5577 selectors.forEach(function (selector) {
5578 Object.keys(selector).forEach(function (field) {
5579 var matcher = selector[field];
5580 if (typeof matcher !== 'object') {
5581 matcher = {$eq: matcher};
5582 }
5583
5584 if (isCombinationalField(field)) {
5585 // or, nor
5586 if (matcher instanceof Array) {
5587 if (first[field]) {
5588 first[field] = false;
5589 res[field] = matcher;
5590 return;
5591 }
5592
5593 var entries = [];
5594 res[field].forEach(function (existing) {
5595 Object.keys(matcher).forEach(function (key) {
5596 var m = matcher[key];
5597 var longest = Math.max(Object.keys(existing).length, Object.keys(m).length);
5598 var merged = mergeAndedSelectors([existing, m]);
5599 if (Object.keys(merged).length <= longest) {
5600 // we have a situation like: (a :{$eq :1} || ...) && (a {$eq: 2} || ...)
5601 // merging would produce a $eq 2 when actually we shouldn't ever match against these merged conditions
5602 // merged should always contain more values to be valid
5603 return;
5604 }
5605 entries.push(merged);
5606 });
5607 });
5608 res[field] = entries;
5609 } else {
5610 // not
5611 res[field] = mergeAndedSelectors([matcher]);
5612 }
5613 } else {
5614 var fieldMatchers = res[field] = res[field] || {};
5615 Object.keys(matcher).forEach(function (operator) {
5616 var value = matcher[operator];
5617
5618 if (operator === '$gt' || operator === '$gte') {
5619 return mergeGtGte(operator, value, fieldMatchers);
5620 } else if (operator === '$lt' || operator === '$lte') {
5621 return mergeLtLte(operator, value, fieldMatchers);
5622 } else if (operator === '$ne') {
5623 return mergeNe(value, fieldMatchers);
5624 } else if (operator === '$eq') {
5625 return mergeEq(value, fieldMatchers);
5626 } else if (operator === "$regex") {
5627 return mergeRegex(value, fieldMatchers);
5628 }
5629 fieldMatchers[operator] = value;
5630 });
5631 }
5632 });
5633 });
5634
5635 return res;
5636}
5637
5638
5639
5640// collapse logically equivalent gt/gte values
5641function mergeGtGte(operator, value, fieldMatchers) {
5642 if (typeof fieldMatchers.$eq !== 'undefined') {
5643 return; // do nothing
5644 }
5645 if (typeof fieldMatchers.$gte !== 'undefined') {
5646 if (operator === '$gte') {
5647 if (value > fieldMatchers.$gte) { // more specificity
5648 fieldMatchers.$gte = value;
5649 }
5650 } else { // operator === '$gt'
5651 if (value >= fieldMatchers.$gte) { // more specificity
5652 delete fieldMatchers.$gte;
5653 fieldMatchers.$gt = value;
5654 }
5655 }
5656 } else if (typeof fieldMatchers.$gt !== 'undefined') {
5657 if (operator === '$gte') {
5658 if (value > fieldMatchers.$gt) { // more specificity
5659 delete fieldMatchers.$gt;
5660 fieldMatchers.$gte = value;
5661 }
5662 } else { // operator === '$gt'
5663 if (value > fieldMatchers.$gt) { // more specificity
5664 fieldMatchers.$gt = value;
5665 }
5666 }
5667 } else {
5668 fieldMatchers[operator] = value;
5669 }
5670}
5671
5672// collapse logically equivalent lt/lte values
5673function mergeLtLte(operator, value, fieldMatchers) {
5674 if (typeof fieldMatchers.$eq !== 'undefined') {
5675 return; // do nothing
5676 }
5677 if (typeof fieldMatchers.$lte !== 'undefined') {
5678 if (operator === '$lte') {
5679 if (value < fieldMatchers.$lte) { // more specificity
5680 fieldMatchers.$lte = value;
5681 }
5682 } else { // operator === '$gt'
5683 if (value <= fieldMatchers.$lte) { // more specificity
5684 delete fieldMatchers.$lte;
5685 fieldMatchers.$lt = value;
5686 }
5687 }
5688 } else if (typeof fieldMatchers.$lt !== 'undefined') {
5689 if (operator === '$lte') {
5690 if (value < fieldMatchers.$lt) { // more specificity
5691 delete fieldMatchers.$lt;
5692 fieldMatchers.$lte = value;
5693 }
5694 } else { // operator === '$gt'
5695 if (value < fieldMatchers.$lt) { // more specificity
5696 fieldMatchers.$lt = value;
5697 }
5698 }
5699 } else {
5700 fieldMatchers[operator] = value;
5701 }
5702}
5703
5704// combine $ne values into one array
5705function mergeNe(value, fieldMatchers) {
5706 if ('$ne' in fieldMatchers) {
5707 // there are many things this could "not" be
5708 fieldMatchers.$ne.push(value);
5709 } else { // doesn't exist yet
5710 fieldMatchers.$ne = [value];
5711 }
5712}
5713
5714// add $eq into the mix
5715function mergeEq(value, fieldMatchers) {
5716 // these all have less specificity than the $eq
5717 // TODO: check for user errors here
5718 delete fieldMatchers.$gt;
5719 delete fieldMatchers.$gte;
5720 delete fieldMatchers.$lt;
5721 delete fieldMatchers.$lte;
5722 delete fieldMatchers.$ne;
5723 fieldMatchers.$eq = value;
5724}
5725
5726// combine $regex values into one array
5727function mergeRegex(value, fieldMatchers) {
5728 if ('$regex' in fieldMatchers) {
5729 // a value could match multiple regexes
5730 fieldMatchers.$regex.push(value);
5731 } else { // doesn't exist yet
5732 fieldMatchers.$regex = [value];
5733 }
5734}
5735
5736//#7458: execute function mergeAndedSelectors on nested $and
5737function mergeAndedSelectorsNested(obj) {
5738 for (var prop in obj) {
5739 if (Array.isArray(obj)) {
5740 for (var i in obj) {
5741 if (obj[i]['$and']) {
5742 obj[i] = mergeAndedSelectors(obj[i]['$and']);
5743 }
5744 }
5745 }
5746 var value = obj[prop];
5747 if (typeof value === 'object') {
5748 mergeAndedSelectorsNested(value); // <- recursive call
5749 }
5750 }
5751 return obj;
5752}
5753
5754//#7458: determine id $and is present in selector (at any level)
5755function isAndInSelector(obj, isAnd) {
5756 for (var prop in obj) {
5757 if (prop === '$and') {
5758 isAnd = true;
5759 }
5760 var value = obj[prop];
5761 if (typeof value === 'object') {
5762 isAnd = isAndInSelector(value, isAnd); // <- recursive call
5763 }
5764 }
5765 return isAnd;
5766}
5767
5768//
5769// normalize the selector
5770//
5771function massageSelector(input) {
5772 var result = clone(input);
5773
5774 //#7458: if $and is present in selector (at any level) merge nested $and
5775 if (isAndInSelector(result, false)) {
5776 result = mergeAndedSelectorsNested(result);
5777 if ('$and' in result) {
5778 result = mergeAndedSelectors(result['$and']);
5779 }
5780 }
5781
5782 ['$or', '$nor'].forEach(function (orOrNor) {
5783 if (orOrNor in result) {
5784 // message each individual selector
5785 // e.g. {foo: 'bar'} becomes {foo: {$eq: 'bar'}}
5786 result[orOrNor].forEach(function (subSelector) {
5787 var fields = Object.keys(subSelector);
5788 for (var i = 0; i < fields.length; i++) {
5789 var field = fields[i];
5790 var matcher = subSelector[field];
5791 if (typeof matcher !== 'object' || matcher === null) {
5792 subSelector[field] = {$eq: matcher};
5793 }
5794 }
5795 });
5796 }
5797 });
5798
5799 if ('$not' in result) {
5800 //This feels a little like forcing, but it will work for now,
5801 //I would like to come back to this and make the merging of selectors a little more generic
5802 result['$not'] = mergeAndedSelectors([result['$not']]);
5803 }
5804
5805 var fields = Object.keys(result);
5806
5807 for (var i = 0; i < fields.length; i++) {
5808 var field = fields[i];
5809 var matcher = result[field];
5810
5811 if (typeof matcher !== 'object' || matcher === null) {
5812 matcher = {$eq: matcher};
5813 }
5814 result[field] = matcher;
5815 }
5816
5817 normalizeArrayOperators(result);
5818
5819 return result;
5820}
5821
5822//
5823// The $ne and $regex values must be placed in an array because these operators can be used multiple times on the same field.
5824// When $and is used, mergeAndedSelectors takes care of putting some of them into arrays, otherwise it's done here.
5825//
5826function normalizeArrayOperators(selector) {
5827 Object.keys(selector).forEach(function (field) {
5828 var matcher = selector[field];
5829
5830 if (Array.isArray(matcher)) {
5831 matcher.forEach(function (matcherItem) {
5832 if (matcherItem && typeof matcherItem === 'object') {
5833 normalizeArrayOperators(matcherItem);
5834 }
5835 });
5836 } else if (field === '$ne') {
5837 selector.$ne = [matcher];
5838 } else if (field === '$regex') {
5839 selector.$regex = [matcher];
5840 } else if (matcher && typeof matcher === 'object') {
5841 normalizeArrayOperators(matcher);
5842 }
5843 });
5844}
5845
5846function pad(str, padWith, upToLength) {
5847 var padding = '';
5848 var targetLength = upToLength - str.length;
5849 /* istanbul ignore next */
5850 while (padding.length < targetLength) {
5851 padding += padWith;
5852 }
5853 return padding;
5854}
5855
5856function padLeft(str, padWith, upToLength) {
5857 var padding = pad(str, padWith, upToLength);
5858 return padding + str;
5859}
5860
5861var MIN_MAGNITUDE = -324; // verified by -Number.MIN_VALUE
5862var MAGNITUDE_DIGITS = 3; // ditto
5863var SEP = ''; // set to '_' for easier debugging
5864
5865function collate(a, b) {
5866
5867 if (a === b) {
5868 return 0;
5869 }
5870
5871 a = normalizeKey(a);
5872 b = normalizeKey(b);
5873
5874 var ai = collationIndex(a);
5875 var bi = collationIndex(b);
5876 if ((ai - bi) !== 0) {
5877 return ai - bi;
5878 }
5879 switch (typeof a) {
5880 case 'number':
5881 return a - b;
5882 case 'boolean':
5883 return a < b ? -1 : 1;
5884 case 'string':
5885 return stringCollate(a, b);
5886 }
5887 return Array.isArray(a) ? arrayCollate(a, b) : objectCollate(a, b);
5888}
5889
5890// couch considers null/NaN/Infinity/-Infinity === undefined,
5891// for the purposes of mapreduce indexes. also, dates get stringified.
5892function normalizeKey(key) {
5893 switch (typeof key) {
5894 case 'undefined':
5895 return null;
5896 case 'number':
5897 if (key === Infinity || key === -Infinity || isNaN(key)) {
5898 return null;
5899 }
5900 return key;
5901 case 'object':
5902 var origKey = key;
5903 if (Array.isArray(key)) {
5904 var len = key.length;
5905 key = new Array(len);
5906 for (var i = 0; i < len; i++) {
5907 key[i] = normalizeKey(origKey[i]);
5908 }
5909 /* istanbul ignore next */
5910 } else if (key instanceof Date) {
5911 return key.toJSON();
5912 } else if (key !== null) { // generic object
5913 key = {};
5914 for (var k in origKey) {
5915 if (Object.prototype.hasOwnProperty.call(origKey, k)) {
5916 var val = origKey[k];
5917 if (typeof val !== 'undefined') {
5918 key[k] = normalizeKey(val);
5919 }
5920 }
5921 }
5922 }
5923 }
5924 return key;
5925}
5926
5927function indexify(key) {
5928 if (key !== null) {
5929 switch (typeof key) {
5930 case 'boolean':
5931 return key ? 1 : 0;
5932 case 'number':
5933 return numToIndexableString(key);
5934 case 'string':
5935 // We've to be sure that key does not contain \u0000
5936 // Do order-preserving replacements:
5937 // 0 -> 1, 1
5938 // 1 -> 1, 2
5939 // 2 -> 2, 2
5940 /* eslint-disable no-control-regex */
5941 return key
5942 .replace(/\u0002/g, '\u0002\u0002')
5943 .replace(/\u0001/g, '\u0001\u0002')
5944 .replace(/\u0000/g, '\u0001\u0001');
5945 /* eslint-enable no-control-regex */
5946 case 'object':
5947 var isArray = Array.isArray(key);
5948 var arr = isArray ? key : Object.keys(key);
5949 var i = -1;
5950 var len = arr.length;
5951 var result = '';
5952 if (isArray) {
5953 while (++i < len) {
5954 result += toIndexableString(arr[i]);
5955 }
5956 } else {
5957 while (++i < len) {
5958 var objKey = arr[i];
5959 result += toIndexableString(objKey) +
5960 toIndexableString(key[objKey]);
5961 }
5962 }
5963 return result;
5964 }
5965 }
5966 return '';
5967}
5968
5969// convert the given key to a string that would be appropriate
5970// for lexical sorting, e.g. within a database, where the
5971// sorting is the same given by the collate() function.
5972function toIndexableString(key) {
5973 var zero = '\u0000';
5974 key = normalizeKey(key);
5975 return collationIndex(key) + SEP + indexify(key) + zero;
5976}
5977
5978function parseNumber(str, i) {
5979 var originalIdx = i;
5980 var num;
5981 var zero = str[i] === '1';
5982 if (zero) {
5983 num = 0;
5984 i++;
5985 } else {
5986 var neg = str[i] === '0';
5987 i++;
5988 var numAsString = '';
5989 var magAsString = str.substring(i, i + MAGNITUDE_DIGITS);
5990 var magnitude = parseInt(magAsString, 10) + MIN_MAGNITUDE;
5991 /* istanbul ignore next */
5992 if (neg) {
5993 magnitude = -magnitude;
5994 }
5995 i += MAGNITUDE_DIGITS;
5996 while (true) {
5997 var ch = str[i];
5998 if (ch === '\u0000') {
5999 break;
6000 } else {
6001 numAsString += ch;
6002 }
6003 i++;
6004 }
6005 numAsString = numAsString.split('.');
6006 if (numAsString.length === 1) {
6007 num = parseInt(numAsString, 10);
6008 } else {
6009 /* istanbul ignore next */
6010 num = parseFloat(numAsString[0] + '.' + numAsString[1]);
6011 }
6012 /* istanbul ignore next */
6013 if (neg) {
6014 num = num - 10;
6015 }
6016 /* istanbul ignore next */
6017 if (magnitude !== 0) {
6018 // parseFloat is more reliable than pow due to rounding errors
6019 // e.g. Number.MAX_VALUE would return Infinity if we did
6020 // num * Math.pow(10, magnitude);
6021 num = parseFloat(num + 'e' + magnitude);
6022 }
6023 }
6024 return {num, length : i - originalIdx};
6025}
6026
6027// move up the stack while parsing
6028// this function moved outside of parseIndexableString for performance
6029function pop(stack, metaStack) {
6030 var obj = stack.pop();
6031
6032 if (metaStack.length) {
6033 var lastMetaElement = metaStack[metaStack.length - 1];
6034 if (obj === lastMetaElement.element) {
6035 // popping a meta-element, e.g. an object whose value is another object
6036 metaStack.pop();
6037 lastMetaElement = metaStack[metaStack.length - 1];
6038 }
6039 var element = lastMetaElement.element;
6040 var lastElementIndex = lastMetaElement.index;
6041 if (Array.isArray(element)) {
6042 element.push(obj);
6043 } else if (lastElementIndex === stack.length - 2) { // obj with key+value
6044 var key = stack.pop();
6045 element[key] = obj;
6046 } else {
6047 stack.push(obj); // obj with key only
6048 }
6049 }
6050}
6051
6052function parseIndexableString(str) {
6053 var stack = [];
6054 var metaStack = []; // stack for arrays and objects
6055 var i = 0;
6056
6057 /*eslint no-constant-condition: ["error", { "checkLoops": false }]*/
6058 while (true) {
6059 var collationIndex = str[i++];
6060 if (collationIndex === '\u0000') {
6061 if (stack.length === 1) {
6062 return stack.pop();
6063 } else {
6064 pop(stack, metaStack);
6065 continue;
6066 }
6067 }
6068 switch (collationIndex) {
6069 case '1':
6070 stack.push(null);
6071 break;
6072 case '2':
6073 stack.push(str[i] === '1');
6074 i++;
6075 break;
6076 case '3':
6077 var parsedNum = parseNumber(str, i);
6078 stack.push(parsedNum.num);
6079 i += parsedNum.length;
6080 break;
6081 case '4':
6082 var parsedStr = '';
6083 /*eslint no-constant-condition: ["error", { "checkLoops": false }]*/
6084 while (true) {
6085 var ch = str[i];
6086 if (ch === '\u0000') {
6087 break;
6088 }
6089 parsedStr += ch;
6090 i++;
6091 }
6092 // perform the reverse of the order-preserving replacement
6093 // algorithm (see above)
6094 /* eslint-disable no-control-regex */
6095 parsedStr = parsedStr.replace(/\u0001\u0001/g, '\u0000')
6096 .replace(/\u0001\u0002/g, '\u0001')
6097 .replace(/\u0002\u0002/g, '\u0002');
6098 /* eslint-enable no-control-regex */
6099 stack.push(parsedStr);
6100 break;
6101 case '5':
6102 var arrayElement = { element: [], index: stack.length };
6103 stack.push(arrayElement.element);
6104 metaStack.push(arrayElement);
6105 break;
6106 case '6':
6107 var objElement = { element: {}, index: stack.length };
6108 stack.push(objElement.element);
6109 metaStack.push(objElement);
6110 break;
6111 /* istanbul ignore next */
6112 default:
6113 throw new Error(
6114 'bad collationIndex or unexpectedly reached end of input: ' +
6115 collationIndex);
6116 }
6117 }
6118}
6119
6120function arrayCollate(a, b) {
6121 var len = Math.min(a.length, b.length);
6122 for (var i = 0; i < len; i++) {
6123 var sort = collate(a[i], b[i]);
6124 if (sort !== 0) {
6125 return sort;
6126 }
6127 }
6128 return (a.length === b.length) ? 0 :
6129 (a.length > b.length) ? 1 : -1;
6130}
6131function stringCollate(a, b) {
6132 // See: https://github.com/daleharvey/pouchdb/issues/40
6133 // This is incompatible with the CouchDB implementation, but its the
6134 // best we can do for now
6135 return (a === b) ? 0 : ((a > b) ? 1 : -1);
6136}
6137function objectCollate(a, b) {
6138 var ak = Object.keys(a), bk = Object.keys(b);
6139 var len = Math.min(ak.length, bk.length);
6140 for (var i = 0; i < len; i++) {
6141 // First sort the keys
6142 var sort = collate(ak[i], bk[i]);
6143 if (sort !== 0) {
6144 return sort;
6145 }
6146 // if the keys are equal sort the values
6147 sort = collate(a[ak[i]], b[bk[i]]);
6148 if (sort !== 0) {
6149 return sort;
6150 }
6151
6152 }
6153 return (ak.length === bk.length) ? 0 :
6154 (ak.length > bk.length) ? 1 : -1;
6155}
6156// The collation is defined by erlangs ordered terms
6157// the atoms null, true, false come first, then numbers, strings,
6158// arrays, then objects
6159// null/undefined/NaN/Infinity/-Infinity are all considered null
6160function collationIndex(x) {
6161 var id = ['boolean', 'number', 'string', 'object'];
6162 var idx = id.indexOf(typeof x);
6163 //false if -1 otherwise true, but fast!!!!1
6164 if (~idx) {
6165 if (x === null) {
6166 return 1;
6167 }
6168 if (Array.isArray(x)) {
6169 return 5;
6170 }
6171 return idx < 3 ? (idx + 2) : (idx + 3);
6172 }
6173 /* istanbul ignore next */
6174 if (Array.isArray(x)) {
6175 return 5;
6176 }
6177}
6178
6179// conversion:
6180// x yyy zz...zz
6181// x = 0 for negative, 1 for 0, 2 for positive
6182// y = exponent (for negative numbers negated) moved so that it's >= 0
6183// z = mantisse
6184function numToIndexableString(num) {
6185
6186 if (num === 0) {
6187 return '1';
6188 }
6189
6190 // convert number to exponential format for easier and
6191 // more succinct string sorting
6192 var expFormat = num.toExponential().split(/e\+?/);
6193 var magnitude = parseInt(expFormat[1], 10);
6194
6195 var neg = num < 0;
6196
6197 var result = neg ? '0' : '2';
6198
6199 // first sort by magnitude
6200 // it's easier if all magnitudes are positive
6201 var magForComparison = ((neg ? -magnitude : magnitude) - MIN_MAGNITUDE);
6202 var magString = padLeft((magForComparison).toString(), '0', MAGNITUDE_DIGITS);
6203
6204 result += SEP + magString;
6205
6206 // then sort by the factor
6207 var factor = Math.abs(parseFloat(expFormat[0])); // [1..10)
6208 /* istanbul ignore next */
6209 if (neg) { // for negative reverse ordering
6210 factor = 10 - factor;
6211 }
6212
6213 var factorStr = factor.toFixed(20);
6214
6215 // strip zeros from the end
6216 factorStr = factorStr.replace(/\.?0+$/, '');
6217
6218 result += SEP + factorStr;
6219
6220 return result;
6221}
6222
6223// create a comparator based on the sort object
6224function createFieldSorter(sort) {
6225
6226 function getFieldValuesAsArray(doc) {
6227 return sort.map(function (sorting) {
6228 var fieldName = getKey(sorting);
6229 var parsedField = parseField(fieldName);
6230 var docFieldValue = getFieldFromDoc(doc, parsedField);
6231 return docFieldValue;
6232 });
6233 }
6234
6235 return function (aRow, bRow) {
6236 var aFieldValues = getFieldValuesAsArray(aRow.doc);
6237 var bFieldValues = getFieldValuesAsArray(bRow.doc);
6238 var collation = collate(aFieldValues, bFieldValues);
6239 if (collation !== 0) {
6240 return collation;
6241 }
6242 // this is what mango seems to do
6243 return compare(aRow.doc._id, bRow.doc._id);
6244 };
6245}
6246
6247function filterInMemoryFields(rows, requestDef, inMemoryFields) {
6248 rows = rows.filter(function (row) {
6249 return rowFilter(row.doc, requestDef.selector, inMemoryFields);
6250 });
6251
6252 if (requestDef.sort) {
6253 // in-memory sort
6254 var fieldSorter = createFieldSorter(requestDef.sort);
6255 rows = rows.sort(fieldSorter);
6256 if (typeof requestDef.sort[0] !== 'string' &&
6257 getValue(requestDef.sort[0]) === 'desc') {
6258 rows = rows.reverse();
6259 }
6260 }
6261
6262 if ('limit' in requestDef || 'skip' in requestDef) {
6263 // have to do the limit in-memory
6264 var skip = requestDef.skip || 0;
6265 var limit = ('limit' in requestDef ? requestDef.limit : rows.length) + skip;
6266 rows = rows.slice(skip, limit);
6267 }
6268 return rows;
6269}
6270
6271function rowFilter(doc, selector, inMemoryFields) {
6272 return inMemoryFields.every(function (field) {
6273 var matcher = selector[field];
6274 var parsedField = parseField(field);
6275 var docFieldValue = getFieldFromDoc(doc, parsedField);
6276 if (isCombinationalField(field)) {
6277 return matchCominationalSelector(field, matcher, doc);
6278 }
6279
6280 return matchSelector(matcher, doc, parsedField, docFieldValue);
6281 });
6282}
6283
6284function matchSelector(matcher, doc, parsedField, docFieldValue) {
6285 if (!matcher) {
6286 // no filtering necessary; this field is just needed for sorting
6287 return true;
6288 }
6289
6290 // is matcher an object, if so continue recursion
6291 if (typeof matcher === 'object') {
6292 return Object.keys(matcher).every(function (maybeUserOperator) {
6293 var userValue = matcher[ maybeUserOperator ];
6294 // explicit operator
6295 if (maybeUserOperator.indexOf("$") === 0) {
6296 return match(maybeUserOperator, doc, userValue, parsedField, docFieldValue);
6297 } else {
6298 var subParsedField = parseField(maybeUserOperator);
6299
6300 if (
6301 docFieldValue === undefined &&
6302 typeof userValue !== "object" &&
6303 subParsedField.length > 0
6304 ) {
6305 // the field does not exist, return or getFieldFromDoc will throw
6306 return false;
6307 }
6308
6309 var subDocFieldValue = getFieldFromDoc(docFieldValue, subParsedField);
6310
6311 if (typeof userValue === "object") {
6312 // field value is an object that might contain more operators
6313 return matchSelector(userValue, doc, parsedField, subDocFieldValue);
6314 }
6315
6316 // implicit operator
6317 return match("$eq", doc, userValue, subParsedField, subDocFieldValue);
6318 }
6319 });
6320 }
6321
6322 // no more depth, No need to recurse further
6323 return matcher === docFieldValue;
6324}
6325
6326function matchCominationalSelector(field, matcher, doc) {
6327
6328 if (field === '$or') {
6329 return matcher.some(function (orMatchers) {
6330 return rowFilter(doc, orMatchers, Object.keys(orMatchers));
6331 });
6332 }
6333
6334 if (field === '$not') {
6335 return !rowFilter(doc, matcher, Object.keys(matcher));
6336 }
6337
6338 //`$nor`
6339 return !matcher.find(function (orMatchers) {
6340 return rowFilter(doc, orMatchers, Object.keys(orMatchers));
6341 });
6342
6343}
6344
6345function match(userOperator, doc, userValue, parsedField, docFieldValue) {
6346 if (!matchers[userOperator]) {
6347 /* istanbul ignore next */
6348 throw new Error('unknown operator "' + userOperator +
6349 '" - should be one of $eq, $lte, $lt, $gt, $gte, $exists, $ne, $in, ' +
6350 '$nin, $size, $mod, $regex, $elemMatch, $type, $allMatch or $all');
6351 }
6352 return matchers[userOperator](doc, userValue, parsedField, docFieldValue);
6353}
6354
6355function fieldExists(docFieldValue) {
6356 return typeof docFieldValue !== 'undefined' && docFieldValue !== null;
6357}
6358
6359function fieldIsNotUndefined(docFieldValue) {
6360 return typeof docFieldValue !== 'undefined';
6361}
6362
6363function modField(docFieldValue, userValue) {
6364 if (typeof docFieldValue !== "number" ||
6365 parseInt(docFieldValue, 10) !== docFieldValue) {
6366 return false;
6367 }
6368
6369 var divisor = userValue[0];
6370 var mod = userValue[1];
6371
6372 return docFieldValue % divisor === mod;
6373}
6374
6375function arrayContainsValue(docFieldValue, userValue) {
6376 return userValue.some(function (val) {
6377 if (docFieldValue instanceof Array) {
6378 return docFieldValue.some(function (docFieldValueItem) {
6379 return collate(val, docFieldValueItem) === 0;
6380 });
6381 }
6382
6383 return collate(val, docFieldValue) === 0;
6384 });
6385}
6386
6387function arrayContainsAllValues(docFieldValue, userValue) {
6388 return userValue.every(function (val) {
6389 return docFieldValue.some(function (docFieldValueItem) {
6390 return collate(val, docFieldValueItem) === 0;
6391 });
6392 });
6393}
6394
6395function arraySize(docFieldValue, userValue) {
6396 return docFieldValue.length === userValue;
6397}
6398
6399function regexMatch(docFieldValue, userValue) {
6400 var re = new RegExp(userValue);
6401
6402 return re.test(docFieldValue);
6403}
6404
6405function typeMatch(docFieldValue, userValue) {
6406
6407 switch (userValue) {
6408 case 'null':
6409 return docFieldValue === null;
6410 case 'boolean':
6411 return typeof (docFieldValue) === 'boolean';
6412 case 'number':
6413 return typeof (docFieldValue) === 'number';
6414 case 'string':
6415 return typeof (docFieldValue) === 'string';
6416 case 'array':
6417 return docFieldValue instanceof Array;
6418 case 'object':
6419 return ({}).toString.call(docFieldValue) === '[object Object]';
6420 }
6421}
6422
6423var matchers = {
6424
6425 '$elemMatch': function (doc, userValue, parsedField, docFieldValue) {
6426 if (!Array.isArray(docFieldValue)) {
6427 return false;
6428 }
6429
6430 if (docFieldValue.length === 0) {
6431 return false;
6432 }
6433
6434 if (typeof docFieldValue[0] === 'object' && docFieldValue[0] !== null) {
6435 return docFieldValue.some(function (val) {
6436 return rowFilter(val, userValue, Object.keys(userValue));
6437 });
6438 }
6439
6440 return docFieldValue.some(function (val) {
6441 return matchSelector(userValue, doc, parsedField, val);
6442 });
6443 },
6444
6445 '$allMatch': function (doc, userValue, parsedField, docFieldValue) {
6446 if (!Array.isArray(docFieldValue)) {
6447 return false;
6448 }
6449
6450 /* istanbul ignore next */
6451 if (docFieldValue.length === 0) {
6452 return false;
6453 }
6454
6455 if (typeof docFieldValue[0] === 'object' && docFieldValue[0] !== null) {
6456 return docFieldValue.every(function (val) {
6457 return rowFilter(val, userValue, Object.keys(userValue));
6458 });
6459 }
6460
6461 return docFieldValue.every(function (val) {
6462 return matchSelector(userValue, doc, parsedField, val);
6463 });
6464 },
6465
6466 '$eq': function (doc, userValue, parsedField, docFieldValue) {
6467 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) === 0;
6468 },
6469
6470 '$gte': function (doc, userValue, parsedField, docFieldValue) {
6471 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) >= 0;
6472 },
6473
6474 '$gt': function (doc, userValue, parsedField, docFieldValue) {
6475 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) > 0;
6476 },
6477
6478 '$lte': function (doc, userValue, parsedField, docFieldValue) {
6479 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) <= 0;
6480 },
6481
6482 '$lt': function (doc, userValue, parsedField, docFieldValue) {
6483 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) < 0;
6484 },
6485
6486 '$exists': function (doc, userValue, parsedField, docFieldValue) {
6487 //a field that is null is still considered to exist
6488 if (userValue) {
6489 return fieldIsNotUndefined(docFieldValue);
6490 }
6491
6492 return !fieldIsNotUndefined(docFieldValue);
6493 },
6494
6495 '$mod': function (doc, userValue, parsedField, docFieldValue) {
6496 return fieldExists(docFieldValue) && modField(docFieldValue, userValue);
6497 },
6498
6499 '$ne': function (doc, userValue, parsedField, docFieldValue) {
6500 return userValue.every(function (neValue) {
6501 return collate(docFieldValue, neValue) !== 0;
6502 });
6503 },
6504 '$in': function (doc, userValue, parsedField, docFieldValue) {
6505 return fieldExists(docFieldValue) && arrayContainsValue(docFieldValue, userValue);
6506 },
6507
6508 '$nin': function (doc, userValue, parsedField, docFieldValue) {
6509 return fieldExists(docFieldValue) && !arrayContainsValue(docFieldValue, userValue);
6510 },
6511
6512 '$size': function (doc, userValue, parsedField, docFieldValue) {
6513 return fieldExists(docFieldValue) &&
6514 Array.isArray(docFieldValue) &&
6515 arraySize(docFieldValue, userValue);
6516 },
6517
6518 '$all': function (doc, userValue, parsedField, docFieldValue) {
6519 return Array.isArray(docFieldValue) && arrayContainsAllValues(docFieldValue, userValue);
6520 },
6521
6522 '$regex': function (doc, userValue, parsedField, docFieldValue) {
6523 return fieldExists(docFieldValue) &&
6524 typeof docFieldValue == "string" &&
6525 userValue.every(function (regexValue) {
6526 return regexMatch(docFieldValue, regexValue);
6527 });
6528 },
6529
6530 '$type': function (doc, userValue, parsedField, docFieldValue) {
6531 return typeMatch(docFieldValue, userValue);
6532 }
6533};
6534
6535// return true if the given doc matches the supplied selector
6536function matchesSelector(doc, selector) {
6537 /* istanbul ignore if */
6538 if (typeof selector !== 'object') {
6539 // match the CouchDB error message
6540 throw new Error('Selector error: expected a JSON object');
6541 }
6542
6543 selector = massageSelector(selector);
6544 var row = {
6545 doc
6546 };
6547
6548 var rowsMatched = filterInMemoryFields([row], { selector }, Object.keys(selector));
6549 return rowsMatched && rowsMatched.length === 1;
6550}
6551
6552function evalFilter(input) {
6553 return scopeEval('"use strict";\nreturn ' + input + ';', {});
6554}
6555
6556function evalView(input) {
6557 var code = [
6558 'return function(doc) {',
6559 ' "use strict";',
6560 ' var emitted = false;',
6561 ' var emit = function (a, b) {',
6562 ' emitted = true;',
6563 ' };',
6564 ' var view = ' + input + ';',
6565 ' view(doc);',
6566 ' if (emitted) {',
6567 ' return true;',
6568 ' }',
6569 '};'
6570 ].join('\n');
6571
6572 return scopeEval(code, {});
6573}
6574
6575function validate(opts, callback) {
6576 if (opts.selector) {
6577 if (opts.filter && opts.filter !== '_selector') {
6578 var filterName = typeof opts.filter === 'string' ?
6579 opts.filter : 'function';
6580 return callback(new Error('selector invalid for filter "' + filterName + '"'));
6581 }
6582 }
6583 callback();
6584}
6585
6586function normalize(opts) {
6587 if (opts.view && !opts.filter) {
6588 opts.filter = '_view';
6589 }
6590
6591 if (opts.selector && !opts.filter) {
6592 opts.filter = '_selector';
6593 }
6594
6595 if (opts.filter && typeof opts.filter === 'string') {
6596 if (opts.filter === '_view') {
6597 opts.view = normalizeDesignDocFunctionName(opts.view);
6598 } else {
6599 opts.filter = normalizeDesignDocFunctionName(opts.filter);
6600 }
6601 }
6602}
6603
6604function shouldFilter(changesHandler, opts) {
6605 return opts.filter && typeof opts.filter === 'string' &&
6606 !opts.doc_ids && !isRemote(changesHandler.db);
6607}
6608
6609function filter(changesHandler, opts) {
6610 var callback = opts.complete;
6611 if (opts.filter === '_view') {
6612 if (!opts.view || typeof opts.view !== 'string') {
6613 var err = createError(BAD_REQUEST,
6614 '`view` filter parameter not found or invalid.');
6615 return callback(err);
6616 }
6617 // fetch a view from a design doc, make it behave like a filter
6618 var viewName = parseDesignDocFunctionName(opts.view);
6619 changesHandler.db.get('_design/' + viewName[0], function (err, ddoc) {
6620 /* istanbul ignore if */
6621 if (changesHandler.isCancelled) {
6622 return callback(null, {status: 'cancelled'});
6623 }
6624 /* istanbul ignore next */
6625 if (err) {
6626 return callback(generateErrorFromResponse(err));
6627 }
6628 var mapFun = ddoc && ddoc.views && ddoc.views[viewName[1]] &&
6629 ddoc.views[viewName[1]].map;
6630 if (!mapFun) {
6631 return callback(createError(MISSING_DOC,
6632 (ddoc.views ? 'missing json key: ' + viewName[1] :
6633 'missing json key: views')));
6634 }
6635 opts.filter = evalView(mapFun);
6636 changesHandler.doChanges(opts);
6637 });
6638 } else if (opts.selector) {
6639 opts.filter = function (doc) {
6640 return matchesSelector(doc, opts.selector);
6641 };
6642 changesHandler.doChanges(opts);
6643 } else {
6644 // fetch a filter from a design doc
6645 var filterName = parseDesignDocFunctionName(opts.filter);
6646 changesHandler.db.get('_design/' + filterName[0], function (err, ddoc) {
6647 /* istanbul ignore if */
6648 if (changesHandler.isCancelled) {
6649 return callback(null, {status: 'cancelled'});
6650 }
6651 /* istanbul ignore next */
6652 if (err) {
6653 return callback(generateErrorFromResponse(err));
6654 }
6655 var filterFun = ddoc && ddoc.filters && ddoc.filters[filterName[1]];
6656 if (!filterFun) {
6657 return callback(createError(MISSING_DOC,
6658 ((ddoc && ddoc.filters) ? 'missing json key: ' + filterName[1]
6659 : 'missing json key: filters')));
6660 }
6661 opts.filter = evalFilter(filterFun);
6662 changesHandler.doChanges(opts);
6663 });
6664 }
6665}
6666
6667function applyChangesFilterPlugin(PouchDB) {
6668 PouchDB._changesFilterPlugin = {
6669 validate,
6670 normalize,
6671 shouldFilter,
6672 filter
6673 };
6674}
6675
6676// TODO: remove from pouchdb-core (breaking)
6677PouchDB.plugin(applyChangesFilterPlugin);
6678
6679PouchDB.version = version;
6680
6681//
6682// Blobs are not supported in all versions of IndexedDB, notably
6683// Chrome <37, Android <5 and (some?) webkit-based browsers.
6684// In those versions, storing a blob will throw.
6685//
6686// Example Webkit error:
6687// > DataCloneError: Failed to store record in an IDBObjectStore: BlobURLs are not yet supported.
6688//
6689// Various other blob bugs exist in Chrome v37-42 (inclusive).
6690// Detecting them is expensive and confusing to users, and Chrome 37-42
6691// is at very low usage worldwide, so we do a hacky userAgent check instead.
6692//
6693// content-type bug: https://code.google.com/p/chromium/issues/detail?id=408120
6694// 404 bug: https://code.google.com/p/chromium/issues/detail?id=447916
6695// FileReader bug: https://code.google.com/p/chromium/issues/detail?id=447836
6696//
6697function checkBlobSupport(txn, store, docIdOrCreateDoc) {
6698 return new Promise(function (resolve) {
6699 var blob$$1 = createBlob(['']);
6700
6701 let req;
6702 if (typeof docIdOrCreateDoc === 'function') {
6703 // Store may require a specific key path, in which case we can't store the
6704 // blob directly in the store.
6705 const createDoc = docIdOrCreateDoc;
6706 const doc = createDoc(blob$$1);
6707 req = txn.objectStore(store).put(doc);
6708 } else {
6709 const docId = docIdOrCreateDoc;
6710 req = txn.objectStore(store).put(blob$$1, docId);
6711 }
6712
6713 req.onsuccess = function () {
6714 var matchedChrome = navigator.userAgent.match(/Chrome\/(\d+)/);
6715 var matchedEdge = navigator.userAgent.match(/Edge\//);
6716 // MS Edge pretends to be Chrome 42:
6717 // https://msdn.microsoft.com/en-us/library/hh869301%28v=vs.85%29.aspx
6718 resolve(matchedEdge || !matchedChrome ||
6719 parseInt(matchedChrome[1], 10) >= 43);
6720 };
6721
6722 req.onerror = txn.onabort = function (e) {
6723 // If the transaction aborts now its due to not being able to
6724 // write to the database, likely due to the disk being full
6725 e.preventDefault();
6726 e.stopPropagation();
6727 resolve(false);
6728 };
6729 }).catch(function () {
6730 return false; // error, so assume unsupported
6731 });
6732}
6733
6734function toObject(array) {
6735 return array.reduce(function (obj, item) {
6736 obj[item] = true;
6737 return obj;
6738 }, {});
6739}
6740// List of top level reserved words for doc
6741var reservedWords = toObject([
6742 '_id',
6743 '_rev',
6744 '_access',
6745 '_attachments',
6746 '_deleted',
6747 '_revisions',
6748 '_revs_info',
6749 '_conflicts',
6750 '_deleted_conflicts',
6751 '_local_seq',
6752 '_rev_tree',
6753 // replication documents
6754 '_replication_id',
6755 '_replication_state',
6756 '_replication_state_time',
6757 '_replication_state_reason',
6758 '_replication_stats',
6759 // Specific to Couchbase Sync Gateway
6760 '_removed'
6761]);
6762
6763// List of reserved words that should end up in the document
6764var dataWords = toObject([
6765 '_access',
6766 '_attachments',
6767 // replication documents
6768 '_replication_id',
6769 '_replication_state',
6770 '_replication_state_time',
6771 '_replication_state_reason',
6772 '_replication_stats'
6773]);
6774
6775function parseRevisionInfo(rev$$1) {
6776 if (!/^\d+-/.test(rev$$1)) {
6777 return createError(INVALID_REV);
6778 }
6779 var idx = rev$$1.indexOf('-');
6780 var left = rev$$1.substring(0, idx);
6781 var right = rev$$1.substring(idx + 1);
6782 return {
6783 prefix: parseInt(left, 10),
6784 id: right
6785 };
6786}
6787
6788function makeRevTreeFromRevisions(revisions, opts) {
6789 var pos = revisions.start - revisions.ids.length + 1;
6790
6791 var revisionIds = revisions.ids;
6792 var ids = [revisionIds[0], opts, []];
6793
6794 for (var i = 1, len = revisionIds.length; i < len; i++) {
6795 ids = [revisionIds[i], {status: 'missing'}, [ids]];
6796 }
6797
6798 return [{
6799 pos,
6800 ids
6801 }];
6802}
6803
6804// Preprocess documents, parse their revisions, assign an id and a
6805// revision for new writes that are missing them, etc
6806function parseDoc(doc, newEdits, dbOpts) {
6807 if (!dbOpts) {
6808 dbOpts = {
6809 deterministic_revs: true
6810 };
6811 }
6812
6813 var nRevNum;
6814 var newRevId;
6815 var revInfo;
6816 var opts = {status: 'available'};
6817 if (doc._deleted) {
6818 opts.deleted = true;
6819 }
6820
6821 if (newEdits) {
6822 if (!doc._id) {
6823 doc._id = uuid$1();
6824 }
6825 newRevId = rev(doc, dbOpts.deterministic_revs);
6826 if (doc._rev) {
6827 revInfo = parseRevisionInfo(doc._rev);
6828 if (revInfo.error) {
6829 return revInfo;
6830 }
6831 doc._rev_tree = [{
6832 pos: revInfo.prefix,
6833 ids: [revInfo.id, {status: 'missing'}, [[newRevId, opts, []]]]
6834 }];
6835 nRevNum = revInfo.prefix + 1;
6836 } else {
6837 doc._rev_tree = [{
6838 pos: 1,
6839 ids : [newRevId, opts, []]
6840 }];
6841 nRevNum = 1;
6842 }
6843 } else {
6844 if (doc._revisions) {
6845 doc._rev_tree = makeRevTreeFromRevisions(doc._revisions, opts);
6846 nRevNum = doc._revisions.start;
6847 newRevId = doc._revisions.ids[0];
6848 }
6849 if (!doc._rev_tree) {
6850 revInfo = parseRevisionInfo(doc._rev);
6851 if (revInfo.error) {
6852 return revInfo;
6853 }
6854 nRevNum = revInfo.prefix;
6855 newRevId = revInfo.id;
6856 doc._rev_tree = [{
6857 pos: nRevNum,
6858 ids: [newRevId, opts, []]
6859 }];
6860 }
6861 }
6862
6863 invalidIdError(doc._id);
6864
6865 doc._rev = nRevNum + '-' + newRevId;
6866
6867 var result = {metadata : {}, data : {}};
6868 for (var key in doc) {
6869 /* istanbul ignore else */
6870 if (Object.prototype.hasOwnProperty.call(doc, key)) {
6871 var specialKey = key[0] === '_';
6872 if (specialKey && !reservedWords[key]) {
6873 var error = createError(DOC_VALIDATION, key);
6874 error.message = DOC_VALIDATION.message + ': ' + key;
6875 throw error;
6876 } else if (specialKey && !dataWords[key]) {
6877 result.metadata[key.slice(1)] = doc[key];
6878 } else {
6879 result.data[key] = doc[key];
6880 }
6881 }
6882 }
6883 return result;
6884}
6885
6886function parseBase64(data) {
6887 try {
6888 return thisAtob(data);
6889 } catch (e) {
6890 var err = createError(BAD_ARG,
6891 'Attachment is not a valid base64 string');
6892 return {error: err};
6893 }
6894}
6895
6896function preprocessString(att, blobType, callback) {
6897 var asBinary = parseBase64(att.data);
6898 if (asBinary.error) {
6899 return callback(asBinary.error);
6900 }
6901
6902 att.length = asBinary.length;
6903 if (blobType === 'blob') {
6904 att.data = binStringToBluffer(asBinary, att.content_type);
6905 } else if (blobType === 'base64') {
6906 att.data = thisBtoa(asBinary);
6907 } else { // binary
6908 att.data = asBinary;
6909 }
6910 binaryMd5(asBinary, function (result) {
6911 att.digest = 'md5-' + result;
6912 callback();
6913 });
6914}
6915
6916function preprocessBlob(att, blobType, callback) {
6917 binaryMd5(att.data, function (md5) {
6918 att.digest = 'md5-' + md5;
6919 // size is for blobs (browser), length is for buffers (node)
6920 att.length = att.data.size || att.data.length || 0;
6921 if (blobType === 'binary') {
6922 blobToBinaryString(att.data, function (binString) {
6923 att.data = binString;
6924 callback();
6925 });
6926 } else if (blobType === 'base64') {
6927 blobToBase64(att.data, function (b64) {
6928 att.data = b64;
6929 callback();
6930 });
6931 } else {
6932 callback();
6933 }
6934 });
6935}
6936
6937function preprocessAttachment(att, blobType, callback) {
6938 if (att.stub) {
6939 return callback();
6940 }
6941 if (typeof att.data === 'string') { // input is a base64 string
6942 preprocessString(att, blobType, callback);
6943 } else { // input is a blob
6944 preprocessBlob(att, blobType, callback);
6945 }
6946}
6947
6948function preprocessAttachments(docInfos, blobType, callback) {
6949
6950 if (!docInfos.length) {
6951 return callback();
6952 }
6953
6954 var docv = 0;
6955 var overallErr;
6956
6957 docInfos.forEach(function (docInfo) {
6958 var attachments = docInfo.data && docInfo.data._attachments ?
6959 Object.keys(docInfo.data._attachments) : [];
6960 var recv = 0;
6961
6962 if (!attachments.length) {
6963 return done();
6964 }
6965
6966 function processedAttachment(err) {
6967 overallErr = err;
6968 recv++;
6969 if (recv === attachments.length) {
6970 done();
6971 }
6972 }
6973
6974 for (var key in docInfo.data._attachments) {
6975 if (Object.prototype.hasOwnProperty.call(docInfo.data._attachments, key)) {
6976 preprocessAttachment(docInfo.data._attachments[key],
6977 blobType, processedAttachment);
6978 }
6979 }
6980 });
6981
6982 function done() {
6983 docv++;
6984 if (docInfos.length === docv) {
6985 if (overallErr) {
6986 callback(overallErr);
6987 } else {
6988 callback();
6989 }
6990 }
6991 }
6992}
6993
6994function updateDoc(revLimit, prev, docInfo, results,
6995 i, cb, writeDoc, newEdits) {
6996
6997 if (revExists(prev.rev_tree, docInfo.metadata.rev) && !newEdits) {
6998 results[i] = docInfo;
6999 return cb();
7000 }
7001
7002 // sometimes this is pre-calculated. historically not always
7003 var previousWinningRev = prev.winningRev || winningRev(prev);
7004 var previouslyDeleted = 'deleted' in prev ? prev.deleted :
7005 isDeleted(prev, previousWinningRev);
7006 var deleted = 'deleted' in docInfo.metadata ? docInfo.metadata.deleted :
7007 isDeleted(docInfo.metadata);
7008 var isRoot = /^1-/.test(docInfo.metadata.rev);
7009
7010 if (previouslyDeleted && !deleted && newEdits && isRoot) {
7011 var newDoc = docInfo.data;
7012 newDoc._rev = previousWinningRev;
7013 newDoc._id = docInfo.metadata.id;
7014 docInfo = parseDoc(newDoc, newEdits);
7015 }
7016
7017 var merged = merge(prev.rev_tree, docInfo.metadata.rev_tree[0], revLimit);
7018
7019 var inConflict = newEdits && ((
7020 (previouslyDeleted && deleted && merged.conflicts !== 'new_leaf') ||
7021 (!previouslyDeleted && merged.conflicts !== 'new_leaf') ||
7022 (previouslyDeleted && !deleted && merged.conflicts === 'new_branch')));
7023
7024 if (inConflict) {
7025 var err = createError(REV_CONFLICT);
7026 results[i] = err;
7027 return cb();
7028 }
7029
7030 var newRev = docInfo.metadata.rev;
7031 docInfo.metadata.rev_tree = merged.tree;
7032 docInfo.stemmedRevs = merged.stemmedRevs || [];
7033 /* istanbul ignore else */
7034 if (prev.rev_map) {
7035 docInfo.metadata.rev_map = prev.rev_map; // used only by leveldb
7036 }
7037
7038 // recalculate
7039 var winningRev$$1 = winningRev(docInfo.metadata);
7040 var winningRevIsDeleted = isDeleted(docInfo.metadata, winningRev$$1);
7041
7042 // calculate the total number of documents that were added/removed,
7043 // from the perspective of total_rows/doc_count
7044 var delta = (previouslyDeleted === winningRevIsDeleted) ? 0 :
7045 previouslyDeleted < winningRevIsDeleted ? -1 : 1;
7046
7047 var newRevIsDeleted;
7048 if (newRev === winningRev$$1) {
7049 // if the new rev is the same as the winning rev, we can reuse that value
7050 newRevIsDeleted = winningRevIsDeleted;
7051 } else {
7052 // if they're not the same, then we need to recalculate
7053 newRevIsDeleted = isDeleted(docInfo.metadata, newRev);
7054 }
7055
7056 writeDoc(docInfo, winningRev$$1, winningRevIsDeleted, newRevIsDeleted,
7057 true, delta, i, cb);
7058}
7059
7060function rootIsMissing(docInfo) {
7061 return docInfo.metadata.rev_tree[0].ids[1].status === 'missing';
7062}
7063
7064function processDocs(revLimit, docInfos, api, fetchedDocs, tx, results,
7065 writeDoc, opts, overallCallback) {
7066
7067 // Default to 1000 locally
7068 revLimit = revLimit || 1000;
7069
7070 function insertDoc(docInfo, resultsIdx, callback) {
7071 // Cant insert new deleted documents
7072 var winningRev$$1 = winningRev(docInfo.metadata);
7073 var deleted = isDeleted(docInfo.metadata, winningRev$$1);
7074 if ('was_delete' in opts && deleted) {
7075 results[resultsIdx] = createError(MISSING_DOC, 'deleted');
7076 return callback();
7077 }
7078
7079 // 4712 - detect whether a new document was inserted with a _rev
7080 var inConflict = newEdits && rootIsMissing(docInfo);
7081
7082 if (inConflict) {
7083 var err = createError(REV_CONFLICT);
7084 results[resultsIdx] = err;
7085 return callback();
7086 }
7087
7088 var delta = deleted ? 0 : 1;
7089
7090 writeDoc(docInfo, winningRev$$1, deleted, deleted, false,
7091 delta, resultsIdx, callback);
7092 }
7093
7094 var newEdits = opts.new_edits;
7095 var idsToDocs = new Map();
7096
7097 var docsDone = 0;
7098 var docsToDo = docInfos.length;
7099
7100 function checkAllDocsDone() {
7101 if (++docsDone === docsToDo && overallCallback) {
7102 overallCallback();
7103 }
7104 }
7105
7106 docInfos.forEach(function (currentDoc, resultsIdx) {
7107
7108 if (currentDoc._id && isLocalId(currentDoc._id)) {
7109 var fun = currentDoc._deleted ? '_removeLocal' : '_putLocal';
7110 api[fun](currentDoc, {ctx: tx}, function (err, res) {
7111 results[resultsIdx] = err || res;
7112 checkAllDocsDone();
7113 });
7114 return;
7115 }
7116
7117 var id = currentDoc.metadata.id;
7118 if (idsToDocs.has(id)) {
7119 docsToDo--; // duplicate
7120 idsToDocs.get(id).push([currentDoc, resultsIdx]);
7121 } else {
7122 idsToDocs.set(id, [[currentDoc, resultsIdx]]);
7123 }
7124 });
7125
7126 // in the case of new_edits, the user can provide multiple docs
7127 // with the same id. these need to be processed sequentially
7128 idsToDocs.forEach(function (docs, id) {
7129 var numDone = 0;
7130
7131 function docWritten() {
7132 if (++numDone < docs.length) {
7133 nextDoc();
7134 } else {
7135 checkAllDocsDone();
7136 }
7137 }
7138 function nextDoc() {
7139 var value = docs[numDone];
7140 var currentDoc = value[0];
7141 var resultsIdx = value[1];
7142
7143 if (fetchedDocs.has(id)) {
7144 updateDoc(revLimit, fetchedDocs.get(id), currentDoc, results,
7145 resultsIdx, docWritten, writeDoc, newEdits);
7146 } else {
7147 // Ensure stemming applies to new writes as well
7148 var merged = merge([], currentDoc.metadata.rev_tree[0], revLimit);
7149 currentDoc.metadata.rev_tree = merged.tree;
7150 currentDoc.stemmedRevs = merged.stemmedRevs || [];
7151 insertDoc(currentDoc, resultsIdx, docWritten);
7152 }
7153 }
7154 nextDoc();
7155 });
7156}
7157
7158// IndexedDB requires a versioned database structure, so we use the
7159// version here to manage migrations.
7160var ADAPTER_VERSION = 5;
7161
7162// The object stores created for each database
7163// DOC_STORE stores the document meta data, its revision history and state
7164// Keyed by document id
7165var DOC_STORE = 'document-store';
7166// BY_SEQ_STORE stores a particular version of a document, keyed by its
7167// sequence id
7168var BY_SEQ_STORE = 'by-sequence';
7169// Where we store attachments
7170var ATTACH_STORE = 'attach-store';
7171// Where we store many-to-many relations
7172// between attachment digests and seqs
7173var ATTACH_AND_SEQ_STORE = 'attach-seq-store';
7174
7175// Where we store database-wide meta data in a single record
7176// keyed by id: META_STORE
7177var META_STORE = 'meta-store';
7178// Where we store local documents
7179var LOCAL_STORE = 'local-store';
7180// Where we detect blob support
7181var DETECT_BLOB_SUPPORT_STORE = 'detect-blob-support';
7182
7183function safeJsonParse(str) {
7184 // This try/catch guards against stack overflow errors.
7185 // JSON.parse() is faster than vuvuzela.parse() but vuvuzela
7186 // cannot overflow.
7187 try {
7188 return JSON.parse(str);
7189 } catch (e) {
7190 /* istanbul ignore next */
7191 return vuvuzela.parse(str);
7192 }
7193}
7194
7195function safeJsonStringify(json) {
7196 try {
7197 return JSON.stringify(json);
7198 } catch (e) {
7199 /* istanbul ignore next */
7200 return vuvuzela.stringify(json);
7201 }
7202}
7203
7204function idbError(callback) {
7205 return function (evt) {
7206 var message = 'unknown_error';
7207 if (evt.target && evt.target.error) {
7208 message = evt.target.error.name || evt.target.error.message;
7209 }
7210 callback(createError(IDB_ERROR, message, evt.type));
7211 };
7212}
7213
7214// Unfortunately, the metadata has to be stringified
7215// when it is put into the database, because otherwise
7216// IndexedDB can throw errors for deeply-nested objects.
7217// Originally we just used JSON.parse/JSON.stringify; now
7218// we use this custom vuvuzela library that avoids recursion.
7219// If we could do it all over again, we'd probably use a
7220// format for the revision trees other than JSON.
7221function encodeMetadata(metadata, winningRev, deleted) {
7222 return {
7223 data: safeJsonStringify(metadata),
7224 winningRev,
7225 deletedOrLocal: deleted ? '1' : '0',
7226 seq: metadata.seq, // highest seq for this doc
7227 id: metadata.id
7228 };
7229}
7230
7231function decodeMetadata(storedObject) {
7232 if (!storedObject) {
7233 return null;
7234 }
7235 var metadata = safeJsonParse(storedObject.data);
7236 metadata.winningRev = storedObject.winningRev;
7237 metadata.deleted = storedObject.deletedOrLocal === '1';
7238 metadata.seq = storedObject.seq;
7239 return metadata;
7240}
7241
7242// read the doc back out from the database. we don't store the
7243// _id or _rev because we already have _doc_id_rev.
7244function decodeDoc(doc) {
7245 if (!doc) {
7246 return doc;
7247 }
7248 var idx = doc._doc_id_rev.lastIndexOf(':');
7249 doc._id = doc._doc_id_rev.substring(0, idx - 1);
7250 doc._rev = doc._doc_id_rev.substring(idx + 1);
7251 delete doc._doc_id_rev;
7252 return doc;
7253}
7254
7255// Read a blob from the database, encoding as necessary
7256// and translating from base64 if the IDB doesn't support
7257// native Blobs
7258function readBlobData(body, type, asBlob, callback) {
7259 if (asBlob) {
7260 if (!body) {
7261 callback(createBlob([''], {type}));
7262 } else if (typeof body !== 'string') { // we have blob support
7263 callback(body);
7264 } else { // no blob support
7265 callback(b64ToBluffer(body, type));
7266 }
7267 } else { // as base64 string
7268 if (!body) {
7269 callback('');
7270 } else if (typeof body !== 'string') { // we have blob support
7271 readAsBinaryString(body, function (binary) {
7272 callback(thisBtoa(binary));
7273 });
7274 } else { // no blob support
7275 callback(body);
7276 }
7277 }
7278}
7279
7280function fetchAttachmentsIfNecessary(doc, opts, txn, cb) {
7281 var attachments = Object.keys(doc._attachments || {});
7282 if (!attachments.length) {
7283 return cb && cb();
7284 }
7285 var numDone = 0;
7286
7287 function checkDone() {
7288 if (++numDone === attachments.length && cb) {
7289 cb();
7290 }
7291 }
7292
7293 function fetchAttachment(doc, att) {
7294 var attObj = doc._attachments[att];
7295 var digest = attObj.digest;
7296 var req = txn.objectStore(ATTACH_STORE).get(digest);
7297 req.onsuccess = function (e) {
7298 attObj.body = e.target.result.body;
7299 checkDone();
7300 };
7301 }
7302
7303 attachments.forEach(function (att) {
7304 if (opts.attachments && opts.include_docs) {
7305 fetchAttachment(doc, att);
7306 } else {
7307 doc._attachments[att].stub = true;
7308 checkDone();
7309 }
7310 });
7311}
7312
7313// IDB-specific postprocessing necessary because
7314// we don't know whether we stored a true Blob or
7315// a base64-encoded string, and if it's a Blob it
7316// needs to be read outside of the transaction context
7317function postProcessAttachments(results, asBlob) {
7318 return Promise.all(results.map(function (row) {
7319 if (row.doc && row.doc._attachments) {
7320 var attNames = Object.keys(row.doc._attachments);
7321 return Promise.all(attNames.map(function (att) {
7322 var attObj = row.doc._attachments[att];
7323 if (!('body' in attObj)) { // already processed
7324 return;
7325 }
7326 var body = attObj.body;
7327 var type = attObj.content_type;
7328 return new Promise(function (resolve) {
7329 readBlobData(body, type, asBlob, function (data) {
7330 row.doc._attachments[att] = Object.assign(
7331 pick(attObj, ['digest', 'content_type']),
7332 {data}
7333 );
7334 resolve();
7335 });
7336 });
7337 }));
7338 }
7339 }));
7340}
7341
7342function compactRevs(revs, docId, txn) {
7343
7344 var possiblyOrphanedDigests = [];
7345 var seqStore = txn.objectStore(BY_SEQ_STORE);
7346 var attStore = txn.objectStore(ATTACH_STORE);
7347 var attAndSeqStore = txn.objectStore(ATTACH_AND_SEQ_STORE);
7348 var count = revs.length;
7349
7350 function checkDone() {
7351 count--;
7352 if (!count) { // done processing all revs
7353 deleteOrphanedAttachments();
7354 }
7355 }
7356
7357 function deleteOrphanedAttachments() {
7358 if (!possiblyOrphanedDigests.length) {
7359 return;
7360 }
7361 possiblyOrphanedDigests.forEach(function (digest) {
7362 var countReq = attAndSeqStore.index('digestSeq').count(
7363 IDBKeyRange.bound(
7364 digest + '::', digest + '::\uffff', false, false));
7365 countReq.onsuccess = function (e) {
7366 var count = e.target.result;
7367 if (!count) {
7368 // orphaned
7369 attStore.delete(digest);
7370 }
7371 };
7372 });
7373 }
7374
7375 revs.forEach(function (rev$$1) {
7376 var index = seqStore.index('_doc_id_rev');
7377 var key = docId + "::" + rev$$1;
7378 index.getKey(key).onsuccess = function (e) {
7379 var seq = e.target.result;
7380 if (typeof seq !== 'number') {
7381 return checkDone();
7382 }
7383 seqStore.delete(seq);
7384
7385 var cursor = attAndSeqStore.index('seq')
7386 .openCursor(IDBKeyRange.only(seq));
7387
7388 cursor.onsuccess = function (event) {
7389 var cursor = event.target.result;
7390 if (cursor) {
7391 var digest = cursor.value.digestSeq.split('::')[0];
7392 possiblyOrphanedDigests.push(digest);
7393 attAndSeqStore.delete(cursor.primaryKey);
7394 cursor.continue();
7395 } else { // done
7396 checkDone();
7397 }
7398 };
7399 };
7400 });
7401}
7402
7403function openTransactionSafely(idb, stores, mode) {
7404 try {
7405 return {
7406 txn: idb.transaction(stores, mode)
7407 };
7408 } catch (err) {
7409 return {
7410 error: err
7411 };
7412 }
7413}
7414
7415var changesHandler = new Changes();
7416
7417function idbBulkDocs(dbOpts, req, opts, api, idb, callback) {
7418 var docInfos = req.docs;
7419 var txn;
7420 var docStore;
7421 var bySeqStore;
7422 var attachStore;
7423 var attachAndSeqStore;
7424 var metaStore;
7425 var docInfoError;
7426 var metaDoc;
7427
7428 for (var i = 0, len = docInfos.length; i < len; i++) {
7429 var doc = docInfos[i];
7430 if (doc._id && isLocalId(doc._id)) {
7431 continue;
7432 }
7433 doc = docInfos[i] = parseDoc(doc, opts.new_edits, dbOpts);
7434 if (doc.error && !docInfoError) {
7435 docInfoError = doc;
7436 }
7437 }
7438
7439 if (docInfoError) {
7440 return callback(docInfoError);
7441 }
7442
7443 var allDocsProcessed = false;
7444 var docCountDelta = 0;
7445 var results = new Array(docInfos.length);
7446 var fetchedDocs = new Map();
7447 var preconditionErrored = false;
7448 var blobType = api._meta.blobSupport ? 'blob' : 'base64';
7449
7450 preprocessAttachments(docInfos, blobType, function (err) {
7451 if (err) {
7452 return callback(err);
7453 }
7454 startTransaction();
7455 });
7456
7457 function startTransaction() {
7458
7459 var stores = [
7460 DOC_STORE, BY_SEQ_STORE,
7461 ATTACH_STORE,
7462 LOCAL_STORE, ATTACH_AND_SEQ_STORE,
7463 META_STORE
7464 ];
7465 var txnResult = openTransactionSafely(idb, stores, 'readwrite');
7466 if (txnResult.error) {
7467 return callback(txnResult.error);
7468 }
7469 txn = txnResult.txn;
7470 txn.onabort = idbError(callback);
7471 txn.ontimeout = idbError(callback);
7472 txn.oncomplete = complete;
7473 docStore = txn.objectStore(DOC_STORE);
7474 bySeqStore = txn.objectStore(BY_SEQ_STORE);
7475 attachStore = txn.objectStore(ATTACH_STORE);
7476 attachAndSeqStore = txn.objectStore(ATTACH_AND_SEQ_STORE);
7477 metaStore = txn.objectStore(META_STORE);
7478
7479 metaStore.get(META_STORE).onsuccess = function (e) {
7480 metaDoc = e.target.result;
7481 updateDocCountIfReady();
7482 };
7483
7484 verifyAttachments(function (err) {
7485 if (err) {
7486 preconditionErrored = true;
7487 return callback(err);
7488 }
7489 fetchExistingDocs();
7490 });
7491 }
7492
7493 function onAllDocsProcessed() {
7494 allDocsProcessed = true;
7495 updateDocCountIfReady();
7496 }
7497
7498 function idbProcessDocs() {
7499 processDocs(dbOpts.revs_limit, docInfos, api, fetchedDocs,
7500 txn, results, writeDoc, opts, onAllDocsProcessed);
7501 }
7502
7503 function updateDocCountIfReady() {
7504 if (!metaDoc || !allDocsProcessed) {
7505 return;
7506 }
7507 // caching the docCount saves a lot of time in allDocs() and
7508 // info(), which is why we go to all the trouble of doing this
7509 metaDoc.docCount += docCountDelta;
7510 metaStore.put(metaDoc);
7511 }
7512
7513 function fetchExistingDocs() {
7514
7515 if (!docInfos.length) {
7516 return;
7517 }
7518
7519 var numFetched = 0;
7520
7521 function checkDone() {
7522 if (++numFetched === docInfos.length) {
7523 idbProcessDocs();
7524 }
7525 }
7526
7527 function readMetadata(event) {
7528 var metadata = decodeMetadata(event.target.result);
7529
7530 if (metadata) {
7531 fetchedDocs.set(metadata.id, metadata);
7532 }
7533 checkDone();
7534 }
7535
7536 for (var i = 0, len = docInfos.length; i < len; i++) {
7537 var docInfo = docInfos[i];
7538 if (docInfo._id && isLocalId(docInfo._id)) {
7539 checkDone(); // skip local docs
7540 continue;
7541 }
7542 var req = docStore.get(docInfo.metadata.id);
7543 req.onsuccess = readMetadata;
7544 }
7545 }
7546
7547 function complete() {
7548 if (preconditionErrored) {
7549 return;
7550 }
7551
7552 changesHandler.notify(api._meta.name);
7553 callback(null, results);
7554 }
7555
7556 function verifyAttachment(digest, callback) {
7557
7558 var req = attachStore.get(digest);
7559 req.onsuccess = function (e) {
7560 if (!e.target.result) {
7561 var err = createError(MISSING_STUB,
7562 'unknown stub attachment with digest ' +
7563 digest);
7564 err.status = 412;
7565 callback(err);
7566 } else {
7567 callback();
7568 }
7569 };
7570 }
7571
7572 function verifyAttachments(finish) {
7573
7574
7575 var digests = [];
7576 docInfos.forEach(function (docInfo) {
7577 if (docInfo.data && docInfo.data._attachments) {
7578 Object.keys(docInfo.data._attachments).forEach(function (filename) {
7579 var att = docInfo.data._attachments[filename];
7580 if (att.stub) {
7581 digests.push(att.digest);
7582 }
7583 });
7584 }
7585 });
7586 if (!digests.length) {
7587 return finish();
7588 }
7589 var numDone = 0;
7590 var err;
7591
7592 function checkDone() {
7593 if (++numDone === digests.length) {
7594 finish(err);
7595 }
7596 }
7597 digests.forEach(function (digest) {
7598 verifyAttachment(digest, function (attErr) {
7599 if (attErr && !err) {
7600 err = attErr;
7601 }
7602 checkDone();
7603 });
7604 });
7605 }
7606
7607 function writeDoc(docInfo, winningRev$$1, winningRevIsDeleted, newRevIsDeleted,
7608 isUpdate, delta, resultsIdx, callback) {
7609
7610 docInfo.metadata.winningRev = winningRev$$1;
7611 docInfo.metadata.deleted = winningRevIsDeleted;
7612
7613 var doc = docInfo.data;
7614 doc._id = docInfo.metadata.id;
7615 doc._rev = docInfo.metadata.rev;
7616
7617 if (newRevIsDeleted) {
7618 doc._deleted = true;
7619 }
7620
7621 var hasAttachments = doc._attachments &&
7622 Object.keys(doc._attachments).length;
7623 if (hasAttachments) {
7624 return writeAttachments(docInfo, winningRev$$1, winningRevIsDeleted,
7625 isUpdate, resultsIdx, callback);
7626 }
7627
7628 docCountDelta += delta;
7629 updateDocCountIfReady();
7630
7631 finishDoc(docInfo, winningRev$$1, winningRevIsDeleted,
7632 isUpdate, resultsIdx, callback);
7633 }
7634
7635 function finishDoc(docInfo, winningRev$$1, winningRevIsDeleted,
7636 isUpdate, resultsIdx, callback) {
7637
7638 var doc = docInfo.data;
7639 var metadata = docInfo.metadata;
7640
7641 doc._doc_id_rev = metadata.id + '::' + metadata.rev;
7642 delete doc._id;
7643 delete doc._rev;
7644
7645 function afterPutDoc(e) {
7646 var revsToDelete = docInfo.stemmedRevs || [];
7647
7648 if (isUpdate && api.auto_compaction) {
7649 revsToDelete = revsToDelete.concat(compactTree(docInfo.metadata));
7650 }
7651
7652 if (revsToDelete && revsToDelete.length) {
7653 compactRevs(revsToDelete, docInfo.metadata.id, txn);
7654 }
7655
7656 metadata.seq = e.target.result;
7657 // Current _rev is calculated from _rev_tree on read
7658 // delete metadata.rev;
7659 var metadataToStore = encodeMetadata(metadata, winningRev$$1,
7660 winningRevIsDeleted);
7661 var metaDataReq = docStore.put(metadataToStore);
7662 metaDataReq.onsuccess = afterPutMetadata;
7663 }
7664
7665 function afterPutDocError(e) {
7666 // ConstraintError, need to update, not put (see #1638 for details)
7667 e.preventDefault(); // avoid transaction abort
7668 e.stopPropagation(); // avoid transaction onerror
7669 var index = bySeqStore.index('_doc_id_rev');
7670 var getKeyReq = index.getKey(doc._doc_id_rev);
7671 getKeyReq.onsuccess = function (e) {
7672 var putReq = bySeqStore.put(doc, e.target.result);
7673 putReq.onsuccess = afterPutDoc;
7674 };
7675 }
7676
7677 function afterPutMetadata() {
7678 results[resultsIdx] = {
7679 ok: true,
7680 id: metadata.id,
7681 rev: metadata.rev
7682 };
7683 fetchedDocs.set(docInfo.metadata.id, docInfo.metadata);
7684 insertAttachmentMappings(docInfo, metadata.seq, callback);
7685 }
7686
7687 var putReq = bySeqStore.put(doc);
7688
7689 putReq.onsuccess = afterPutDoc;
7690 putReq.onerror = afterPutDocError;
7691 }
7692
7693 function writeAttachments(docInfo, winningRev$$1, winningRevIsDeleted,
7694 isUpdate, resultsIdx, callback) {
7695
7696
7697 var doc = docInfo.data;
7698
7699 var numDone = 0;
7700 var attachments = Object.keys(doc._attachments);
7701
7702 function collectResults() {
7703 if (numDone === attachments.length) {
7704 finishDoc(docInfo, winningRev$$1, winningRevIsDeleted,
7705 isUpdate, resultsIdx, callback);
7706 }
7707 }
7708
7709 function attachmentSaved() {
7710 numDone++;
7711 collectResults();
7712 }
7713
7714 attachments.forEach(function (key) {
7715 var att = docInfo.data._attachments[key];
7716 if (!att.stub) {
7717 var data = att.data;
7718 delete att.data;
7719 att.revpos = parseInt(winningRev$$1, 10);
7720 var digest = att.digest;
7721 saveAttachment(digest, data, attachmentSaved);
7722 } else {
7723 numDone++;
7724 collectResults();
7725 }
7726 });
7727 }
7728
7729 // map seqs to attachment digests, which
7730 // we will need later during compaction
7731 function insertAttachmentMappings(docInfo, seq, callback) {
7732
7733 var attsAdded = 0;
7734 var attsToAdd = Object.keys(docInfo.data._attachments || {});
7735
7736 if (!attsToAdd.length) {
7737 return callback();
7738 }
7739
7740 function checkDone() {
7741 if (++attsAdded === attsToAdd.length) {
7742 callback();
7743 }
7744 }
7745
7746 function add(att) {
7747 var digest = docInfo.data._attachments[att].digest;
7748 var req = attachAndSeqStore.put({
7749 seq,
7750 digestSeq: digest + '::' + seq
7751 });
7752
7753 req.onsuccess = checkDone;
7754 req.onerror = function (e) {
7755 // this callback is for a constaint error, which we ignore
7756 // because this docid/rev has already been associated with
7757 // the digest (e.g. when new_edits == false)
7758 e.preventDefault(); // avoid transaction abort
7759 e.stopPropagation(); // avoid transaction onerror
7760 checkDone();
7761 };
7762 }
7763 for (var i = 0; i < attsToAdd.length; i++) {
7764 add(attsToAdd[i]); // do in parallel
7765 }
7766 }
7767
7768 function saveAttachment(digest, data, callback) {
7769
7770
7771 var getKeyReq = attachStore.count(digest);
7772 getKeyReq.onsuccess = function (e) {
7773 var count = e.target.result;
7774 if (count) {
7775 return callback(); // already exists
7776 }
7777 var newAtt = {
7778 digest,
7779 body: data
7780 };
7781 var putReq = attachStore.put(newAtt);
7782 putReq.onsuccess = callback;
7783 };
7784 }
7785}
7786
7787// Abstraction over IDBCursor and getAll()/getAllKeys() that allows us to batch our operations
7788// while falling back to a normal IDBCursor operation on browsers that don't support getAll() or
7789// getAllKeys(). This allows for a much faster implementation than just straight-up cursors, because
7790// we're not processing each document one-at-a-time.
7791function runBatchedCursor(objectStore, keyRange, descending, batchSize, onBatch) {
7792
7793 if (batchSize === -1) {
7794 batchSize = 1000;
7795 }
7796
7797 // Bail out of getAll()/getAllKeys() in the following cases:
7798 // 1) either method is unsupported - we need both
7799 // 2) batchSize is 1 (might as well use IDBCursor)
7800 // 3) descending – no real way to do this via getAll()/getAllKeys()
7801
7802 var useGetAll = typeof objectStore.getAll === 'function' &&
7803 typeof objectStore.getAllKeys === 'function' &&
7804 batchSize > 1 && !descending;
7805
7806 var keysBatch;
7807 var valuesBatch;
7808 var pseudoCursor;
7809
7810 function onGetAll(e) {
7811 valuesBatch = e.target.result;
7812 if (keysBatch) {
7813 onBatch(keysBatch, valuesBatch, pseudoCursor);
7814 }
7815 }
7816
7817 function onGetAllKeys(e) {
7818 keysBatch = e.target.result;
7819 if (valuesBatch) {
7820 onBatch(keysBatch, valuesBatch, pseudoCursor);
7821 }
7822 }
7823
7824 function continuePseudoCursor() {
7825 if (!keysBatch.length) { // no more results
7826 return onBatch();
7827 }
7828 // fetch next batch, exclusive start
7829 var lastKey = keysBatch[keysBatch.length - 1];
7830 var newKeyRange;
7831 if (keyRange && keyRange.upper) {
7832 try {
7833 newKeyRange = IDBKeyRange.bound(lastKey, keyRange.upper,
7834 true, keyRange.upperOpen);
7835 } catch (e) {
7836 if (e.name === "DataError" && e.code === 0) {
7837 return onBatch(); // we're done, startkey and endkey are equal
7838 }
7839 }
7840 } else {
7841 newKeyRange = IDBKeyRange.lowerBound(lastKey, true);
7842 }
7843 keyRange = newKeyRange;
7844 keysBatch = null;
7845 valuesBatch = null;
7846 objectStore.getAll(keyRange, batchSize).onsuccess = onGetAll;
7847 objectStore.getAllKeys(keyRange, batchSize).onsuccess = onGetAllKeys;
7848 }
7849
7850 function onCursor(e) {
7851 var cursor = e.target.result;
7852 if (!cursor) { // done
7853 return onBatch();
7854 }
7855 // regular IDBCursor acts like a batch where batch size is always 1
7856 onBatch([cursor.key], [cursor.value], cursor);
7857 }
7858
7859 if (useGetAll) {
7860 pseudoCursor = {"continue": continuePseudoCursor};
7861 objectStore.getAll(keyRange, batchSize).onsuccess = onGetAll;
7862 objectStore.getAllKeys(keyRange, batchSize).onsuccess = onGetAllKeys;
7863 } else if (descending) {
7864 objectStore.openCursor(keyRange, 'prev').onsuccess = onCursor;
7865 } else {
7866 objectStore.openCursor(keyRange).onsuccess = onCursor;
7867 }
7868}
7869
7870// simple shim for objectStore.getAll(), falling back to IDBCursor
7871function getAll(objectStore, keyRange, onSuccess) {
7872 if (typeof objectStore.getAll === 'function') {
7873 // use native getAll
7874 objectStore.getAll(keyRange).onsuccess = onSuccess;
7875 return;
7876 }
7877 // fall back to cursors
7878 var values = [];
7879
7880 function onCursor(e) {
7881 var cursor = e.target.result;
7882 if (cursor) {
7883 values.push(cursor.value);
7884 cursor.continue();
7885 } else {
7886 onSuccess({
7887 target: {
7888 result: values
7889 }
7890 });
7891 }
7892 }
7893
7894 objectStore.openCursor(keyRange).onsuccess = onCursor;
7895}
7896
7897function allDocsKeys(keys, docStore, onBatch) {
7898 // It's not guaranteed to be returned in right order
7899 var valuesBatch = new Array(keys.length);
7900 var count = 0;
7901 keys.forEach(function (key, index) {
7902 docStore.get(key).onsuccess = function (event) {
7903 if (event.target.result) {
7904 valuesBatch[index] = event.target.result;
7905 } else {
7906 valuesBatch[index] = {key, error: 'not_found'};
7907 }
7908 count++;
7909 if (count === keys.length) {
7910 onBatch(keys, valuesBatch, {});
7911 }
7912 };
7913 });
7914}
7915
7916function createKeyRange(start, end, inclusiveEnd, key, descending) {
7917 try {
7918 if (start && end) {
7919 if (descending) {
7920 return IDBKeyRange.bound(end, start, !inclusiveEnd, false);
7921 } else {
7922 return IDBKeyRange.bound(start, end, false, !inclusiveEnd);
7923 }
7924 } else if (start) {
7925 if (descending) {
7926 return IDBKeyRange.upperBound(start);
7927 } else {
7928 return IDBKeyRange.lowerBound(start);
7929 }
7930 } else if (end) {
7931 if (descending) {
7932 return IDBKeyRange.lowerBound(end, !inclusiveEnd);
7933 } else {
7934 return IDBKeyRange.upperBound(end, !inclusiveEnd);
7935 }
7936 } else if (key) {
7937 return IDBKeyRange.only(key);
7938 }
7939 } catch (e) {
7940 return {error: e};
7941 }
7942 return null;
7943}
7944
7945function idbAllDocs(opts, idb, callback) {
7946 var start = 'startkey' in opts ? opts.startkey : false;
7947 var end = 'endkey' in opts ? opts.endkey : false;
7948 var key = 'key' in opts ? opts.key : false;
7949 var keys = 'keys' in opts ? opts.keys : false;
7950 var skip = opts.skip || 0;
7951 var limit = typeof opts.limit === 'number' ? opts.limit : -1;
7952 var inclusiveEnd = opts.inclusive_end !== false;
7953
7954 var keyRange ;
7955 var keyRangeError;
7956 if (!keys) {
7957 keyRange = createKeyRange(start, end, inclusiveEnd, key, opts.descending);
7958 keyRangeError = keyRange && keyRange.error;
7959 if (keyRangeError &&
7960 !(keyRangeError.name === "DataError" && keyRangeError.code === 0)) {
7961 // DataError with error code 0 indicates start is less than end, so
7962 // can just do an empty query. Else need to throw
7963 return callback(createError(IDB_ERROR,
7964 keyRangeError.name, keyRangeError.message));
7965 }
7966 }
7967
7968 var stores = [DOC_STORE, BY_SEQ_STORE, META_STORE];
7969
7970 if (opts.attachments) {
7971 stores.push(ATTACH_STORE);
7972 }
7973 var txnResult = openTransactionSafely(idb, stores, 'readonly');
7974 if (txnResult.error) {
7975 return callback(txnResult.error);
7976 }
7977 var txn = txnResult.txn;
7978 txn.oncomplete = onTxnComplete;
7979 txn.onabort = idbError(callback);
7980 var docStore = txn.objectStore(DOC_STORE);
7981 var seqStore = txn.objectStore(BY_SEQ_STORE);
7982 var metaStore = txn.objectStore(META_STORE);
7983 var docIdRevIndex = seqStore.index('_doc_id_rev');
7984 var results = [];
7985 var docCount;
7986 var updateSeq;
7987
7988 metaStore.get(META_STORE).onsuccess = function (e) {
7989 docCount = e.target.result.docCount;
7990 };
7991
7992 /* istanbul ignore if */
7993 if (opts.update_seq) {
7994 // get max updateSeq
7995 seqStore.openKeyCursor(null, 'prev').onsuccess = e => {
7996 var cursor = e.target.result;
7997 if (cursor && cursor.key) {
7998 updateSeq = cursor.key;
7999 }
8000 };
8001 }
8002
8003 // if the user specifies include_docs=true, then we don't
8004 // want to block the main cursor while we're fetching the doc
8005 function fetchDocAsynchronously(metadata, row, winningRev$$1) {
8006 var key = metadata.id + "::" + winningRev$$1;
8007 docIdRevIndex.get(key).onsuccess = function onGetDoc(e) {
8008 row.doc = decodeDoc(e.target.result) || {};
8009 if (opts.conflicts) {
8010 var conflicts = collectConflicts(metadata);
8011 if (conflicts.length) {
8012 row.doc._conflicts = conflicts;
8013 }
8014 }
8015 fetchAttachmentsIfNecessary(row.doc, opts, txn);
8016 };
8017 }
8018
8019 function allDocsInner(winningRev$$1, metadata) {
8020 var row = {
8021 id: metadata.id,
8022 key: metadata.id,
8023 value: {
8024 rev: winningRev$$1
8025 }
8026 };
8027 var deleted = metadata.deleted;
8028 if (deleted) {
8029 if (keys) {
8030 results.push(row);
8031 // deleted docs are okay with "keys" requests
8032 row.value.deleted = true;
8033 row.doc = null;
8034 }
8035 } else if (skip-- <= 0) {
8036 results.push(row);
8037 if (opts.include_docs) {
8038 fetchDocAsynchronously(metadata, row, winningRev$$1);
8039 }
8040 }
8041 }
8042
8043 function processBatch(batchValues) {
8044 for (var i = 0, len = batchValues.length; i < len; i++) {
8045 if (results.length === limit) {
8046 break;
8047 }
8048 var batchValue = batchValues[i];
8049 if (batchValue.error && keys) {
8050 // key was not found with "keys" requests
8051 results.push(batchValue);
8052 continue;
8053 }
8054 var metadata = decodeMetadata(batchValue);
8055 var winningRev$$1 = metadata.winningRev;
8056 allDocsInner(winningRev$$1, metadata);
8057 }
8058 }
8059
8060 function onBatch(batchKeys, batchValues, cursor) {
8061 if (!cursor) {
8062 return;
8063 }
8064 processBatch(batchValues);
8065 if (results.length < limit) {
8066 cursor.continue();
8067 }
8068 }
8069
8070 function onGetAll(e) {
8071 var values = e.target.result;
8072 if (opts.descending) {
8073 values = values.reverse();
8074 }
8075 processBatch(values);
8076 }
8077
8078 function onResultsReady() {
8079 var returnVal = {
8080 total_rows: docCount,
8081 offset: opts.skip,
8082 rows: results
8083 };
8084
8085 /* istanbul ignore if */
8086 if (opts.update_seq && updateSeq !== undefined) {
8087 returnVal.update_seq = updateSeq;
8088 }
8089 callback(null, returnVal);
8090 }
8091
8092 function onTxnComplete() {
8093 if (opts.attachments) {
8094 postProcessAttachments(results, opts.binary).then(onResultsReady);
8095 } else {
8096 onResultsReady();
8097 }
8098 }
8099
8100 // don't bother doing any requests if start > end or limit === 0
8101 if (keyRangeError || limit === 0) {
8102 return;
8103 }
8104 if (keys) {
8105 return allDocsKeys(keys, docStore, onBatch);
8106 }
8107 if (limit === -1) { // just fetch everything
8108 return getAll(docStore, keyRange, onGetAll);
8109 }
8110 // else do a cursor
8111 // choose a batch size based on the skip, since we'll need to skip that many
8112 runBatchedCursor(docStore, keyRange, opts.descending, limit + skip, onBatch);
8113}
8114
8115function countDocs(txn, cb) {
8116 var index = txn.objectStore(DOC_STORE).index('deletedOrLocal');
8117 index.count(IDBKeyRange.only('0')).onsuccess = function (e) {
8118 cb(e.target.result);
8119 };
8120}
8121
8122// This task queue ensures that IDB open calls are done in their own tick
8123
8124var running = false;
8125var queue = [];
8126
8127function tryCode(fun, err, res, PouchDB) {
8128 try {
8129 fun(err, res);
8130 } catch (err) {
8131 // Shouldn't happen, but in some odd cases
8132 // IndexedDB implementations might throw a sync
8133 // error, in which case this will at least log it.
8134 PouchDB.emit('error', err);
8135 }
8136}
8137
8138function applyNext() {
8139 if (running || !queue.length) {
8140 return;
8141 }
8142 running = true;
8143 queue.shift()();
8144}
8145
8146function enqueueTask(action, callback, PouchDB) {
8147 queue.push(function runAction() {
8148 action(function runCallback(err, res) {
8149 tryCode(callback, err, res, PouchDB);
8150 running = false;
8151 nextTick(function runNext() {
8152 applyNext(PouchDB);
8153 });
8154 });
8155 });
8156 applyNext();
8157}
8158
8159function changes(opts, api, dbName, idb) {
8160 opts = clone(opts);
8161
8162 if (opts.continuous) {
8163 var id = dbName + ':' + uuid$1();
8164 changesHandler.addListener(dbName, id, api, opts);
8165 changesHandler.notify(dbName);
8166 return {
8167 cancel: function () {
8168 changesHandler.removeListener(dbName, id);
8169 }
8170 };
8171 }
8172
8173 var docIds = opts.doc_ids && new Set(opts.doc_ids);
8174
8175 opts.since = opts.since || 0;
8176 var lastSeq = opts.since;
8177
8178 var limit = 'limit' in opts ? opts.limit : -1;
8179 if (limit === 0) {
8180 limit = 1; // per CouchDB _changes spec
8181 }
8182
8183 var results = [];
8184 var numResults = 0;
8185 var filter = filterChange(opts);
8186 var docIdsToMetadata = new Map();
8187
8188 var txn;
8189 var bySeqStore;
8190 var docStore;
8191 var docIdRevIndex;
8192
8193 function onBatch(batchKeys, batchValues, cursor) {
8194 if (!cursor || !batchKeys.length) { // done
8195 return;
8196 }
8197
8198 var winningDocs = new Array(batchKeys.length);
8199 var metadatas = new Array(batchKeys.length);
8200
8201 function processMetadataAndWinningDoc(metadata, winningDoc) {
8202 var change = opts.processChange(winningDoc, metadata, opts);
8203 lastSeq = change.seq = metadata.seq;
8204
8205 var filtered = filter(change);
8206 if (typeof filtered === 'object') { // anything but true/false indicates error
8207 return Promise.reject(filtered);
8208 }
8209
8210 if (!filtered) {
8211 return Promise.resolve();
8212 }
8213 numResults++;
8214 if (opts.return_docs) {
8215 results.push(change);
8216 }
8217 // process the attachment immediately
8218 // for the benefit of live listeners
8219 if (opts.attachments && opts.include_docs) {
8220 return new Promise(function (resolve) {
8221 fetchAttachmentsIfNecessary(winningDoc, opts, txn, function () {
8222 postProcessAttachments([change], opts.binary).then(function () {
8223 resolve(change);
8224 });
8225 });
8226 });
8227 } else {
8228 return Promise.resolve(change);
8229 }
8230 }
8231
8232 function onBatchDone() {
8233 var promises = [];
8234 for (var i = 0, len = winningDocs.length; i < len; i++) {
8235 if (numResults === limit) {
8236 break;
8237 }
8238 var winningDoc = winningDocs[i];
8239 if (!winningDoc) {
8240 continue;
8241 }
8242 var metadata = metadatas[i];
8243 promises.push(processMetadataAndWinningDoc(metadata, winningDoc));
8244 }
8245
8246 Promise.all(promises).then(function (changes) {
8247 for (var i = 0, len = changes.length; i < len; i++) {
8248 if (changes[i]) {
8249 opts.onChange(changes[i]);
8250 }
8251 }
8252 }).catch(opts.complete);
8253
8254 if (numResults !== limit) {
8255 cursor.continue();
8256 }
8257 }
8258
8259 // Fetch all metadatas/winningdocs from this batch in parallel, then process
8260 // them all only once all data has been collected. This is done in parallel
8261 // because it's faster than doing it one-at-a-time.
8262 var numDone = 0;
8263 batchValues.forEach(function (value, i) {
8264 var doc = decodeDoc(value);
8265 var seq = batchKeys[i];
8266 fetchWinningDocAndMetadata(doc, seq, function (metadata, winningDoc) {
8267 metadatas[i] = metadata;
8268 winningDocs[i] = winningDoc;
8269 if (++numDone === batchKeys.length) {
8270 onBatchDone();
8271 }
8272 });
8273 });
8274 }
8275
8276 function onGetMetadata(doc, seq, metadata, cb) {
8277 if (metadata.seq !== seq) {
8278 // some other seq is later
8279 return cb();
8280 }
8281
8282 if (metadata.winningRev === doc._rev) {
8283 // this is the winning doc
8284 return cb(metadata, doc);
8285 }
8286
8287 // fetch winning doc in separate request
8288 var docIdRev = doc._id + '::' + metadata.winningRev;
8289 var req = docIdRevIndex.get(docIdRev);
8290 req.onsuccess = function (e) {
8291 cb(metadata, decodeDoc(e.target.result));
8292 };
8293 }
8294
8295 function fetchWinningDocAndMetadata(doc, seq, cb) {
8296 if (docIds && !docIds.has(doc._id)) {
8297 return cb();
8298 }
8299
8300 var metadata = docIdsToMetadata.get(doc._id);
8301 if (metadata) { // cached
8302 return onGetMetadata(doc, seq, metadata, cb);
8303 }
8304 // metadata not cached, have to go fetch it
8305 docStore.get(doc._id).onsuccess = function (e) {
8306 metadata = decodeMetadata(e.target.result);
8307 docIdsToMetadata.set(doc._id, metadata);
8308 onGetMetadata(doc, seq, metadata, cb);
8309 };
8310 }
8311
8312 function finish() {
8313 opts.complete(null, {
8314 results,
8315 last_seq: lastSeq
8316 });
8317 }
8318
8319 function onTxnComplete() {
8320 if (!opts.continuous && opts.attachments) {
8321 // cannot guarantee that postProcessing was already done,
8322 // so do it again
8323 postProcessAttachments(results).then(finish);
8324 } else {
8325 finish();
8326 }
8327 }
8328
8329 var objectStores = [DOC_STORE, BY_SEQ_STORE];
8330 if (opts.attachments) {
8331 objectStores.push(ATTACH_STORE);
8332 }
8333 var txnResult = openTransactionSafely(idb, objectStores, 'readonly');
8334 if (txnResult.error) {
8335 return opts.complete(txnResult.error);
8336 }
8337 txn = txnResult.txn;
8338 txn.onabort = idbError(opts.complete);
8339 txn.oncomplete = onTxnComplete;
8340
8341 bySeqStore = txn.objectStore(BY_SEQ_STORE);
8342 docStore = txn.objectStore(DOC_STORE);
8343 docIdRevIndex = bySeqStore.index('_doc_id_rev');
8344
8345 var keyRange = (opts.since && !opts.descending) ?
8346 IDBKeyRange.lowerBound(opts.since, true) : null;
8347
8348 runBatchedCursor(bySeqStore, keyRange, opts.descending, limit, onBatch);
8349}
8350
8351var cachedDBs = new Map();
8352var blobSupportPromise;
8353var openReqList = new Map();
8354
8355function IdbPouch(opts, callback) {
8356 var api = this;
8357
8358 enqueueTask(function (thisCallback) {
8359 init(api, opts, thisCallback);
8360 }, callback, api.constructor);
8361}
8362
8363function init(api, opts, callback) {
8364
8365 var dbName = opts.name;
8366
8367 var idb = null;
8368 var idbGlobalFailureError = null;
8369 api._meta = null;
8370
8371 function enrichCallbackError(callback) {
8372 return function (error, result) {
8373 if (error && error instanceof Error && !error.reason) {
8374 if (idbGlobalFailureError) {
8375 error.reason = idbGlobalFailureError;
8376 }
8377 }
8378
8379 callback(error, result);
8380 };
8381 }
8382
8383 // called when creating a fresh new database
8384 function createSchema(db) {
8385 var docStore = db.createObjectStore(DOC_STORE, {keyPath : 'id'});
8386 db.createObjectStore(BY_SEQ_STORE, {autoIncrement: true})
8387 .createIndex('_doc_id_rev', '_doc_id_rev', {unique: true});
8388 db.createObjectStore(ATTACH_STORE, {keyPath: 'digest'});
8389 db.createObjectStore(META_STORE, {keyPath: 'id', autoIncrement: false});
8390 db.createObjectStore(DETECT_BLOB_SUPPORT_STORE);
8391
8392 // added in v2
8393 docStore.createIndex('deletedOrLocal', 'deletedOrLocal', {unique : false});
8394
8395 // added in v3
8396 db.createObjectStore(LOCAL_STORE, {keyPath: '_id'});
8397
8398 // added in v4
8399 var attAndSeqStore = db.createObjectStore(ATTACH_AND_SEQ_STORE,
8400 {autoIncrement: true});
8401 attAndSeqStore.createIndex('seq', 'seq');
8402 attAndSeqStore.createIndex('digestSeq', 'digestSeq', {unique: true});
8403 }
8404
8405 // migration to version 2
8406 // unfortunately "deletedOrLocal" is a misnomer now that we no longer
8407 // store local docs in the main doc-store, but whaddyagonnado
8408 function addDeletedOrLocalIndex(txn, callback) {
8409 var docStore = txn.objectStore(DOC_STORE);
8410 docStore.createIndex('deletedOrLocal', 'deletedOrLocal', {unique : false});
8411
8412 docStore.openCursor().onsuccess = function (event) {
8413 var cursor = event.target.result;
8414 if (cursor) {
8415 var metadata = cursor.value;
8416 var deleted = isDeleted(metadata);
8417 metadata.deletedOrLocal = deleted ? "1" : "0";
8418 docStore.put(metadata);
8419 cursor.continue();
8420 } else {
8421 callback();
8422 }
8423 };
8424 }
8425
8426 // migration to version 3 (part 1)
8427 function createLocalStoreSchema(db) {
8428 db.createObjectStore(LOCAL_STORE, {keyPath: '_id'})
8429 .createIndex('_doc_id_rev', '_doc_id_rev', {unique: true});
8430 }
8431
8432 // migration to version 3 (part 2)
8433 function migrateLocalStore(txn, cb) {
8434 var localStore = txn.objectStore(LOCAL_STORE);
8435 var docStore = txn.objectStore(DOC_STORE);
8436 var seqStore = txn.objectStore(BY_SEQ_STORE);
8437
8438 var cursor = docStore.openCursor();
8439 cursor.onsuccess = function (event) {
8440 var cursor = event.target.result;
8441 if (cursor) {
8442 var metadata = cursor.value;
8443 var docId = metadata.id;
8444 var local = isLocalId(docId);
8445 var rev$$1 = winningRev(metadata);
8446 if (local) {
8447 var docIdRev = docId + "::" + rev$$1;
8448 // remove all seq entries
8449 // associated with this docId
8450 var start = docId + "::";
8451 var end = docId + "::~";
8452 var index = seqStore.index('_doc_id_rev');
8453 var range = IDBKeyRange.bound(start, end, false, false);
8454 var seqCursor = index.openCursor(range);
8455 seqCursor.onsuccess = function (e) {
8456 seqCursor = e.target.result;
8457 if (!seqCursor) {
8458 // done
8459 docStore.delete(cursor.primaryKey);
8460 cursor.continue();
8461 } else {
8462 var data = seqCursor.value;
8463 if (data._doc_id_rev === docIdRev) {
8464 localStore.put(data);
8465 }
8466 seqStore.delete(seqCursor.primaryKey);
8467 seqCursor.continue();
8468 }
8469 };
8470 } else {
8471 cursor.continue();
8472 }
8473 } else if (cb) {
8474 cb();
8475 }
8476 };
8477 }
8478
8479 // migration to version 4 (part 1)
8480 function addAttachAndSeqStore(db) {
8481 var attAndSeqStore = db.createObjectStore(ATTACH_AND_SEQ_STORE,
8482 {autoIncrement: true});
8483 attAndSeqStore.createIndex('seq', 'seq');
8484 attAndSeqStore.createIndex('digestSeq', 'digestSeq', {unique: true});
8485 }
8486
8487 // migration to version 4 (part 2)
8488 function migrateAttsAndSeqs(txn, callback) {
8489 var seqStore = txn.objectStore(BY_SEQ_STORE);
8490 var attStore = txn.objectStore(ATTACH_STORE);
8491 var attAndSeqStore = txn.objectStore(ATTACH_AND_SEQ_STORE);
8492
8493 // need to actually populate the table. this is the expensive part,
8494 // so as an optimization, check first that this database even
8495 // contains attachments
8496 var req = attStore.count();
8497 req.onsuccess = function (e) {
8498 var count = e.target.result;
8499 if (!count) {
8500 return callback(); // done
8501 }
8502
8503 seqStore.openCursor().onsuccess = function (e) {
8504 var cursor = e.target.result;
8505 if (!cursor) {
8506 return callback(); // done
8507 }
8508 var doc = cursor.value;
8509 var seq = cursor.primaryKey;
8510 var atts = Object.keys(doc._attachments || {});
8511 var digestMap = {};
8512 for (var j = 0; j < atts.length; j++) {
8513 var att = doc._attachments[atts[j]];
8514 digestMap[att.digest] = true; // uniq digests, just in case
8515 }
8516 var digests = Object.keys(digestMap);
8517 for (j = 0; j < digests.length; j++) {
8518 var digest = digests[j];
8519 attAndSeqStore.put({
8520 seq,
8521 digestSeq: digest + '::' + seq
8522 });
8523 }
8524 cursor.continue();
8525 };
8526 };
8527 }
8528
8529 // migration to version 5
8530 // Instead of relying on on-the-fly migration of metadata,
8531 // this brings the doc-store to its modern form:
8532 // - metadata.winningrev
8533 // - metadata.seq
8534 // - stringify the metadata when storing it
8535 function migrateMetadata(txn) {
8536
8537 function decodeMetadataCompat(storedObject) {
8538 if (!storedObject.data) {
8539 // old format, when we didn't store it stringified
8540 storedObject.deleted = storedObject.deletedOrLocal === '1';
8541 return storedObject;
8542 }
8543 return decodeMetadata(storedObject);
8544 }
8545
8546 // ensure that every metadata has a winningRev and seq,
8547 // which was previously created on-the-fly but better to migrate
8548 var bySeqStore = txn.objectStore(BY_SEQ_STORE);
8549 var docStore = txn.objectStore(DOC_STORE);
8550 var cursor = docStore.openCursor();
8551 cursor.onsuccess = function (e) {
8552 var cursor = e.target.result;
8553 if (!cursor) {
8554 return; // done
8555 }
8556 var metadata = decodeMetadataCompat(cursor.value);
8557
8558 metadata.winningRev = metadata.winningRev ||
8559 winningRev(metadata);
8560
8561 function fetchMetadataSeq() {
8562 // metadata.seq was added post-3.2.0, so if it's missing,
8563 // we need to fetch it manually
8564 var start = metadata.id + '::';
8565 var end = metadata.id + '::\uffff';
8566 var req = bySeqStore.index('_doc_id_rev').openCursor(
8567 IDBKeyRange.bound(start, end));
8568
8569 var metadataSeq = 0;
8570 req.onsuccess = function (e) {
8571 var cursor = e.target.result;
8572 if (!cursor) {
8573 metadata.seq = metadataSeq;
8574 return onGetMetadataSeq();
8575 }
8576 var seq = cursor.primaryKey;
8577 if (seq > metadataSeq) {
8578 metadataSeq = seq;
8579 }
8580 cursor.continue();
8581 };
8582 }
8583
8584 function onGetMetadataSeq() {
8585 var metadataToStore = encodeMetadata(metadata,
8586 metadata.winningRev, metadata.deleted);
8587
8588 var req = docStore.put(metadataToStore);
8589 req.onsuccess = function () {
8590 cursor.continue();
8591 };
8592 }
8593
8594 if (metadata.seq) {
8595 return onGetMetadataSeq();
8596 }
8597
8598 fetchMetadataSeq();
8599 };
8600
8601 }
8602
8603 api._remote = false;
8604 api.type = function () {
8605 return 'idb';
8606 };
8607
8608 api._id = toPromise(function (callback) {
8609 callback(null, api._meta.instanceId);
8610 });
8611
8612 api._bulkDocs = function idb_bulkDocs(req, reqOpts, callback) {
8613 idbBulkDocs(opts, req, reqOpts, api, idb, enrichCallbackError(callback));
8614 };
8615
8616 // First we look up the metadata in the ids database, then we fetch the
8617 // current revision(s) from the by sequence store
8618 api._get = function idb_get(id, opts, callback) {
8619 var doc;
8620 var metadata;
8621 var err;
8622 var txn = opts.ctx;
8623 if (!txn) {
8624 var txnResult = openTransactionSafely(idb,
8625 [DOC_STORE, BY_SEQ_STORE, ATTACH_STORE], 'readonly');
8626 if (txnResult.error) {
8627 return callback(txnResult.error);
8628 }
8629 txn = txnResult.txn;
8630 }
8631
8632 function finish() {
8633 callback(err, {doc, metadata, ctx: txn});
8634 }
8635
8636 txn.objectStore(DOC_STORE).get(id).onsuccess = function (e) {
8637 metadata = decodeMetadata(e.target.result);
8638 // we can determine the result here if:
8639 // 1. there is no such document
8640 // 2. the document is deleted and we don't ask about specific rev
8641 // When we ask with opts.rev we expect the answer to be either
8642 // doc (possibly with _deleted=true) or missing error
8643 if (!metadata) {
8644 err = createError(MISSING_DOC, 'missing');
8645 return finish();
8646 }
8647
8648 var rev$$1;
8649 if (!opts.rev) {
8650 rev$$1 = metadata.winningRev;
8651 var deleted = isDeleted(metadata);
8652 if (deleted) {
8653 err = createError(MISSING_DOC, "deleted");
8654 return finish();
8655 }
8656 } else {
8657 rev$$1 = opts.latest ? latest(opts.rev, metadata) : opts.rev;
8658 }
8659
8660 var objectStore = txn.objectStore(BY_SEQ_STORE);
8661 var key = metadata.id + '::' + rev$$1;
8662
8663 objectStore.index('_doc_id_rev').get(key).onsuccess = function (e) {
8664 doc = e.target.result;
8665 if (doc) {
8666 doc = decodeDoc(doc);
8667 }
8668 if (!doc) {
8669 err = createError(MISSING_DOC, 'missing');
8670 return finish();
8671 }
8672 finish();
8673 };
8674 };
8675 };
8676
8677 api._getAttachment = function (docId, attachId, attachment, opts, callback) {
8678 var txn;
8679 if (opts.ctx) {
8680 txn = opts.ctx;
8681 } else {
8682 var txnResult = openTransactionSafely(idb,
8683 [DOC_STORE, BY_SEQ_STORE, ATTACH_STORE], 'readonly');
8684 if (txnResult.error) {
8685 return callback(txnResult.error);
8686 }
8687 txn = txnResult.txn;
8688 }
8689 var digest = attachment.digest;
8690 var type = attachment.content_type;
8691
8692 txn.objectStore(ATTACH_STORE).get(digest).onsuccess = function (e) {
8693 var body = e.target.result.body;
8694 readBlobData(body, type, opts.binary, function (blobData) {
8695 callback(null, blobData);
8696 });
8697 };
8698 };
8699
8700 api._info = function idb_info(callback) {
8701 var updateSeq;
8702 var docCount;
8703
8704 var txnResult = openTransactionSafely(idb, [META_STORE, BY_SEQ_STORE], 'readonly');
8705 if (txnResult.error) {
8706 return callback(txnResult.error);
8707 }
8708 var txn = txnResult.txn;
8709 txn.objectStore(META_STORE).get(META_STORE).onsuccess = function (e) {
8710 docCount = e.target.result.docCount;
8711 };
8712 txn.objectStore(BY_SEQ_STORE).openKeyCursor(null, 'prev').onsuccess = function (e) {
8713 var cursor = e.target.result;
8714 updateSeq = cursor ? cursor.key : 0;
8715 };
8716
8717 txn.oncomplete = function () {
8718 callback(null, {
8719 doc_count: docCount,
8720 update_seq: updateSeq,
8721 // for debugging
8722 idb_attachment_format: (api._meta.blobSupport ? 'binary' : 'base64')
8723 });
8724 };
8725 };
8726
8727 api._allDocs = function idb_allDocs(opts, callback) {
8728 idbAllDocs(opts, idb, enrichCallbackError(callback));
8729 };
8730
8731 api._changes = function idbChanges(opts) {
8732 return changes(opts, api, dbName, idb);
8733 };
8734
8735 api._close = function (callback) {
8736 // https://developer.mozilla.org/en-US/docs/IndexedDB/IDBDatabase#close
8737 // "Returns immediately and closes the connection in a separate thread..."
8738 idb.close();
8739 cachedDBs.delete(dbName);
8740 callback();
8741 };
8742
8743 api._getRevisionTree = function (docId, callback) {
8744 var txnResult = openTransactionSafely(idb, [DOC_STORE], 'readonly');
8745 if (txnResult.error) {
8746 return callback(txnResult.error);
8747 }
8748 var txn = txnResult.txn;
8749 var req = txn.objectStore(DOC_STORE).get(docId);
8750 req.onsuccess = function (event) {
8751 var doc = decodeMetadata(event.target.result);
8752 if (!doc) {
8753 callback(createError(MISSING_DOC));
8754 } else {
8755 callback(null, doc.rev_tree);
8756 }
8757 };
8758 };
8759
8760 // This function removes revisions of document docId
8761 // which are listed in revs and sets this document
8762 // revision to to rev_tree
8763 api._doCompaction = function (docId, revs, callback) {
8764 var stores = [
8765 DOC_STORE,
8766 BY_SEQ_STORE,
8767 ATTACH_STORE,
8768 ATTACH_AND_SEQ_STORE
8769 ];
8770 var txnResult = openTransactionSafely(idb, stores, 'readwrite');
8771 if (txnResult.error) {
8772 return callback(txnResult.error);
8773 }
8774 var txn = txnResult.txn;
8775
8776 var docStore = txn.objectStore(DOC_STORE);
8777
8778 docStore.get(docId).onsuccess = function (event) {
8779 var metadata = decodeMetadata(event.target.result);
8780 traverseRevTree(metadata.rev_tree, function (isLeaf, pos,
8781 revHash, ctx, opts) {
8782 var rev$$1 = pos + '-' + revHash;
8783 if (revs.indexOf(rev$$1) !== -1) {
8784 opts.status = 'missing';
8785 }
8786 });
8787 compactRevs(revs, docId, txn);
8788 var winningRev$$1 = metadata.winningRev;
8789 var deleted = metadata.deleted;
8790 txn.objectStore(DOC_STORE).put(
8791 encodeMetadata(metadata, winningRev$$1, deleted));
8792 };
8793 txn.onabort = idbError(callback);
8794 txn.oncomplete = function () {
8795 callback();
8796 };
8797 };
8798
8799
8800 api._getLocal = function (id, callback) {
8801 var txnResult = openTransactionSafely(idb, [LOCAL_STORE], 'readonly');
8802 if (txnResult.error) {
8803 return callback(txnResult.error);
8804 }
8805 var tx = txnResult.txn;
8806 var req = tx.objectStore(LOCAL_STORE).get(id);
8807
8808 req.onerror = idbError(callback);
8809 req.onsuccess = function (e) {
8810 var doc = e.target.result;
8811 if (!doc) {
8812 callback(createError(MISSING_DOC));
8813 } else {
8814 delete doc['_doc_id_rev']; // for backwards compat
8815 callback(null, doc);
8816 }
8817 };
8818 };
8819
8820 api._putLocal = function (doc, opts, callback) {
8821 if (typeof opts === 'function') {
8822 callback = opts;
8823 opts = {};
8824 }
8825 delete doc._revisions; // ignore this, trust the rev
8826 var oldRev = doc._rev;
8827 var id = doc._id;
8828 if (!oldRev) {
8829 doc._rev = '0-1';
8830 } else {
8831 doc._rev = '0-' + (parseInt(oldRev.split('-')[1], 10) + 1);
8832 }
8833
8834 var tx = opts.ctx;
8835 var ret;
8836 if (!tx) {
8837 var txnResult = openTransactionSafely(idb, [LOCAL_STORE], 'readwrite');
8838 if (txnResult.error) {
8839 return callback(txnResult.error);
8840 }
8841 tx = txnResult.txn;
8842 tx.onerror = idbError(callback);
8843 tx.oncomplete = function () {
8844 if (ret) {
8845 callback(null, ret);
8846 }
8847 };
8848 }
8849
8850 var oStore = tx.objectStore(LOCAL_STORE);
8851 var req;
8852 if (oldRev) {
8853 req = oStore.get(id);
8854 req.onsuccess = function (e) {
8855 var oldDoc = e.target.result;
8856 if (!oldDoc || oldDoc._rev !== oldRev) {
8857 callback(createError(REV_CONFLICT));
8858 } else { // update
8859 var req = oStore.put(doc);
8860 req.onsuccess = function () {
8861 ret = {ok: true, id: doc._id, rev: doc._rev};
8862 if (opts.ctx) { // return immediately
8863 callback(null, ret);
8864 }
8865 };
8866 }
8867 };
8868 } else { // new doc
8869 req = oStore.add(doc);
8870 req.onerror = function (e) {
8871 // constraint error, already exists
8872 callback(createError(REV_CONFLICT));
8873 e.preventDefault(); // avoid transaction abort
8874 e.stopPropagation(); // avoid transaction onerror
8875 };
8876 req.onsuccess = function () {
8877 ret = {ok: true, id: doc._id, rev: doc._rev};
8878 if (opts.ctx) { // return immediately
8879 callback(null, ret);
8880 }
8881 };
8882 }
8883 };
8884
8885 api._removeLocal = function (doc, opts, callback) {
8886 if (typeof opts === 'function') {
8887 callback = opts;
8888 opts = {};
8889 }
8890 var tx = opts.ctx;
8891 if (!tx) {
8892 var txnResult = openTransactionSafely(idb, [LOCAL_STORE], 'readwrite');
8893 if (txnResult.error) {
8894 return callback(txnResult.error);
8895 }
8896 tx = txnResult.txn;
8897 tx.oncomplete = function () {
8898 if (ret) {
8899 callback(null, ret);
8900 }
8901 };
8902 }
8903 var ret;
8904 var id = doc._id;
8905 var oStore = tx.objectStore(LOCAL_STORE);
8906 var req = oStore.get(id);
8907
8908 req.onerror = idbError(callback);
8909 req.onsuccess = function (e) {
8910 var oldDoc = e.target.result;
8911 if (!oldDoc || oldDoc._rev !== doc._rev) {
8912 callback(createError(MISSING_DOC));
8913 } else {
8914 oStore.delete(id);
8915 ret = {ok: true, id, rev: '0-0'};
8916 if (opts.ctx) { // return immediately
8917 callback(null, ret);
8918 }
8919 }
8920 };
8921 };
8922
8923 api._destroy = function (opts, callback) {
8924 changesHandler.removeAllListeners(dbName);
8925
8926 //Close open request for "dbName" database to fix ie delay.
8927 var openReq = openReqList.get(dbName);
8928 if (openReq && openReq.result) {
8929 openReq.result.close();
8930 cachedDBs.delete(dbName);
8931 }
8932 var req = indexedDB.deleteDatabase(dbName);
8933
8934 req.onsuccess = function () {
8935 //Remove open request from the list.
8936 openReqList.delete(dbName);
8937 if (hasLocalStorage() && (dbName in localStorage)) {
8938 delete localStorage[dbName];
8939 }
8940 callback(null, { 'ok': true });
8941 };
8942
8943 req.onerror = idbError(callback);
8944 };
8945
8946 var cached = cachedDBs.get(dbName);
8947
8948 if (cached) {
8949 idb = cached.idb;
8950 api._meta = cached.global;
8951 return nextTick(function () {
8952 callback(null, api);
8953 });
8954 }
8955
8956 var req = indexedDB.open(dbName, ADAPTER_VERSION);
8957 openReqList.set(dbName, req);
8958
8959 req.onupgradeneeded = function (e) {
8960 var db = e.target.result;
8961 if (e.oldVersion < 1) {
8962 return createSchema(db); // new db, initial schema
8963 }
8964 // do migrations
8965
8966 var txn = e.currentTarget.transaction;
8967 // these migrations have to be done in this function, before
8968 // control is returned to the event loop, because IndexedDB
8969
8970 if (e.oldVersion < 3) {
8971 createLocalStoreSchema(db); // v2 -> v3
8972 }
8973 if (e.oldVersion < 4) {
8974 addAttachAndSeqStore(db); // v3 -> v4
8975 }
8976
8977 var migrations = [
8978 addDeletedOrLocalIndex, // v1 -> v2
8979 migrateLocalStore, // v2 -> v3
8980 migrateAttsAndSeqs, // v3 -> v4
8981 migrateMetadata // v4 -> v5
8982 ];
8983
8984 var i = e.oldVersion;
8985
8986 function next() {
8987 var migration = migrations[i - 1];
8988 i++;
8989 if (migration) {
8990 migration(txn, next);
8991 }
8992 }
8993
8994 next();
8995 };
8996
8997 req.onsuccess = function (e) {
8998
8999 idb = e.target.result;
9000
9001 idb.onversionchange = function () {
9002 idb.close();
9003 cachedDBs.delete(dbName);
9004 };
9005
9006 idb.onabort = function (e) {
9007 guardedConsole('error', 'Database has a global failure', e.target.error);
9008 idbGlobalFailureError = e.target.error;
9009 idb.close();
9010 cachedDBs.delete(dbName);
9011 };
9012
9013 // Do a few setup operations (in parallel as much as possible):
9014 // 1. Fetch meta doc
9015 // 2. Check blob support
9016 // 3. Calculate docCount
9017 // 4. Generate an instanceId if necessary
9018 // 5. Store docCount and instanceId on meta doc
9019
9020 var txn = idb.transaction([
9021 META_STORE,
9022 DETECT_BLOB_SUPPORT_STORE,
9023 DOC_STORE
9024 ], 'readwrite');
9025
9026 var storedMetaDoc = false;
9027 var metaDoc;
9028 var docCount;
9029 var blobSupport;
9030 var instanceId;
9031
9032 function completeSetup() {
9033 if (typeof blobSupport === 'undefined' || !storedMetaDoc) {
9034 return;
9035 }
9036 api._meta = {
9037 name: dbName,
9038 instanceId,
9039 blobSupport
9040 };
9041
9042 cachedDBs.set(dbName, {
9043 idb,
9044 global: api._meta
9045 });
9046 callback(null, api);
9047 }
9048
9049 function storeMetaDocIfReady() {
9050 if (typeof docCount === 'undefined' || typeof metaDoc === 'undefined') {
9051 return;
9052 }
9053 var instanceKey = dbName + '_id';
9054 if (instanceKey in metaDoc) {
9055 instanceId = metaDoc[instanceKey];
9056 } else {
9057 metaDoc[instanceKey] = instanceId = uuid$1();
9058 }
9059 metaDoc.docCount = docCount;
9060 txn.objectStore(META_STORE).put(metaDoc);
9061 }
9062
9063 //
9064 // fetch or generate the instanceId
9065 //
9066 txn.objectStore(META_STORE).get(META_STORE).onsuccess = function (e) {
9067 metaDoc = e.target.result || { id: META_STORE };
9068 storeMetaDocIfReady();
9069 };
9070
9071 //
9072 // countDocs
9073 //
9074 countDocs(txn, function (count) {
9075 docCount = count;
9076 storeMetaDocIfReady();
9077 });
9078
9079 //
9080 // check blob support
9081 //
9082 if (!blobSupportPromise) {
9083 // make sure blob support is only checked once
9084 blobSupportPromise = checkBlobSupport(txn, DETECT_BLOB_SUPPORT_STORE, 'key');
9085 }
9086
9087 blobSupportPromise.then(function (val) {
9088 blobSupport = val;
9089 completeSetup();
9090 });
9091
9092 // only when the metadata put transaction has completed,
9093 // consider the setup done
9094 txn.oncomplete = function () {
9095 storedMetaDoc = true;
9096 completeSetup();
9097 };
9098 txn.onabort = idbError(callback);
9099 };
9100
9101 req.onerror = function (e) {
9102 var msg = e.target.error && e.target.error.message;
9103
9104 if (!msg) {
9105 msg = 'Failed to open indexedDB, are you in private browsing mode?';
9106 } else if (msg.indexOf("stored database is a higher version") !== -1) {
9107 msg = new Error('This DB was created with the newer "indexeddb" adapter, but you are trying to open it with the older "idb" adapter');
9108 }
9109
9110 guardedConsole('error', msg);
9111 callback(createError(IDB_ERROR, msg));
9112 };
9113}
9114
9115IdbPouch.valid = function () {
9116 // Following #7085 buggy idb versions (typically Safari < 10.1) are
9117 // considered valid.
9118
9119 // On Firefox SecurityError is thrown while referencing indexedDB if cookies
9120 // are not allowed. `typeof indexedDB` also triggers the error.
9121 try {
9122 // some outdated implementations of IDB that appear on Samsung
9123 // and HTC Android devices <4.4 are missing IDBKeyRange
9124 return typeof indexedDB !== 'undefined' && typeof IDBKeyRange !== 'undefined';
9125 } catch (e) {
9126 return false;
9127 }
9128};
9129
9130function IDBPouch (PouchDB) {
9131 PouchDB.adapter('idb', IdbPouch, true);
9132}
9133
9134// dead simple promise pool, inspired by https://github.com/timdp/es6-promise-pool
9135// but much smaller in code size. limits the number of concurrent promises that are executed
9136
9137
9138function pool(promiseFactories, limit) {
9139 return new Promise(function (resolve, reject) {
9140 var running = 0;
9141 var current = 0;
9142 var done = 0;
9143 var len = promiseFactories.length;
9144 var err;
9145
9146 function runNext() {
9147 running++;
9148 promiseFactories[current++]().then(onSuccess, onError);
9149 }
9150
9151 function doNext() {
9152 if (++done === len) {
9153 /* istanbul ignore if */
9154 if (err) {
9155 reject(err);
9156 } else {
9157 resolve();
9158 }
9159 } else {
9160 runNextBatch();
9161 }
9162 }
9163
9164 function onSuccess() {
9165 running--;
9166 doNext();
9167 }
9168
9169 /* istanbul ignore next */
9170 function onError(thisErr) {
9171 running--;
9172 err = err || thisErr;
9173 doNext();
9174 }
9175
9176 function runNextBatch() {
9177 while (running < limit && current < len) {
9178 runNext();
9179 }
9180 }
9181
9182 runNextBatch();
9183 });
9184}
9185
9186const CHANGES_BATCH_SIZE = 25;
9187const MAX_SIMULTANEOUS_REVS = 50;
9188const CHANGES_TIMEOUT_BUFFER = 5000;
9189const DEFAULT_HEARTBEAT = 10000;
9190
9191const supportsBulkGetMap = {};
9192
9193function readAttachmentsAsBlobOrBuffer(row) {
9194 const doc = row.doc || row.ok;
9195 const atts = doc && doc._attachments;
9196 if (!atts) {
9197 return;
9198 }
9199 Object.keys(atts).forEach(function (filename) {
9200 const att = atts[filename];
9201 att.data = b64ToBluffer(att.data, att.content_type);
9202 });
9203}
9204
9205function encodeDocId(id) {
9206 if (/^_design/.test(id)) {
9207 return '_design/' + encodeURIComponent(id.slice(8));
9208 }
9209 if (id.startsWith('_local/')) {
9210 return '_local/' + encodeURIComponent(id.slice(7));
9211 }
9212 return encodeURIComponent(id);
9213}
9214
9215function preprocessAttachments$1(doc) {
9216 if (!doc._attachments || !Object.keys(doc._attachments)) {
9217 return Promise.resolve();
9218 }
9219
9220 return Promise.all(Object.keys(doc._attachments).map(function (key) {
9221 const attachment = doc._attachments[key];
9222 if (attachment.data && typeof attachment.data !== 'string') {
9223 return new Promise(function (resolve) {
9224 blobToBase64(attachment.data, resolve);
9225 }).then(function (b64) {
9226 attachment.data = b64;
9227 });
9228 }
9229 }));
9230}
9231
9232function hasUrlPrefix(opts) {
9233 if (!opts.prefix) {
9234 return false;
9235 }
9236 const protocol = parseUri(opts.prefix).protocol;
9237 return protocol === 'http' || protocol === 'https';
9238}
9239
9240// Get all the information you possibly can about the URI given by name and
9241// return it as a suitable object.
9242function getHost(name, opts) {
9243 // encode db name if opts.prefix is a url (#5574)
9244 if (hasUrlPrefix(opts)) {
9245 const dbName = opts.name.substr(opts.prefix.length);
9246 // Ensure prefix has a trailing slash
9247 const prefix = opts.prefix.replace(/\/?$/, '/');
9248 name = prefix + encodeURIComponent(dbName);
9249 }
9250
9251 const uri = parseUri(name);
9252 if (uri.user || uri.password) {
9253 uri.auth = {username: uri.user, password: uri.password};
9254 }
9255
9256 // Split the path part of the URI into parts using '/' as the delimiter
9257 // after removing any leading '/' and any trailing '/'
9258 const parts = uri.path.replace(/(^\/|\/$)/g, '').split('/');
9259
9260 uri.db = parts.pop();
9261 // Prevent double encoding of URI component
9262 if (uri.db.indexOf('%') === -1) {
9263 uri.db = encodeURIComponent(uri.db);
9264 }
9265
9266 uri.path = parts.join('/');
9267
9268 return uri;
9269}
9270
9271// Generate a URL with the host data given by opts and the given path
9272function genDBUrl(opts, path) {
9273 return genUrl(opts, opts.db + '/' + path);
9274}
9275
9276// Generate a URL with the host data given by opts and the given path
9277function genUrl(opts, path) {
9278 // If the host already has a path, then we need to have a path delimiter
9279 // Otherwise, the path delimiter is the empty string
9280 const pathDel = !opts.path ? '' : '/';
9281
9282 // If the host already has a path, then we need to have a path delimiter
9283 // Otherwise, the path delimiter is the empty string
9284 return opts.protocol + '://' + opts.host +
9285 (opts.port ? (':' + opts.port) : '') +
9286 '/' + opts.path + pathDel + path;
9287}
9288
9289function paramsToStr(params) {
9290 const paramKeys = Object.keys(params);
9291 if (paramKeys.length === 0) {
9292 return '';
9293 }
9294
9295 return '?' + paramKeys.map(key => key + '=' + encodeURIComponent(params[key])).join('&');
9296}
9297
9298function shouldCacheBust(opts) {
9299 const ua = (typeof navigator !== 'undefined' && navigator.userAgent) ?
9300 navigator.userAgent.toLowerCase() : '';
9301 const isIE = ua.indexOf('msie') !== -1;
9302 const isTrident = ua.indexOf('trident') !== -1;
9303 const isEdge = ua.indexOf('edge') !== -1;
9304 const isGET = !('method' in opts) || opts.method === 'GET';
9305 return (isIE || isTrident || isEdge) && isGET;
9306}
9307
9308// Implements the PouchDB API for dealing with CouchDB instances over HTTP
9309function HttpPouch(opts, callback) {
9310
9311 // The functions that will be publicly available for HttpPouch
9312 const api = this;
9313
9314 const host = getHost(opts.name, opts);
9315 const dbUrl = genDBUrl(host, '');
9316
9317 opts = clone(opts);
9318
9319 const ourFetch = async function (url, options) {
9320
9321 options = options || {};
9322 options.headers = options.headers || new h();
9323
9324 options.credentials = 'include';
9325
9326 if (opts.auth || host.auth) {
9327 const nAuth = opts.auth || host.auth;
9328 const str = nAuth.username + ':' + nAuth.password;
9329 const token = thisBtoa(unescape(encodeURIComponent(str)));
9330 options.headers.set('Authorization', 'Basic ' + token);
9331 }
9332
9333 const headers = opts.headers || {};
9334 Object.keys(headers).forEach(function (key) {
9335 options.headers.append(key, headers[key]);
9336 });
9337
9338 /* istanbul ignore if */
9339 if (shouldCacheBust(options)) {
9340 url += (url.indexOf('?') === -1 ? '?' : '&') + '_nonce=' + Date.now();
9341 }
9342
9343 const fetchFun = opts.fetch || f$1;
9344 return await fetchFun(url, options);
9345 };
9346
9347 function adapterFun$$1(name, fun) {
9348 return adapterFun(name, function (...args) {
9349 setup().then(function () {
9350 return fun.apply(this, args);
9351 }).catch(function (e) {
9352 const callback = args.pop();
9353 callback(e);
9354 });
9355 }).bind(api);
9356 }
9357
9358 async function fetchJSON(url, options) {
9359
9360 const result = {};
9361
9362 options = options || {};
9363 options.headers = options.headers || new h();
9364
9365 if (!options.headers.get('Content-Type')) {
9366 options.headers.set('Content-Type', 'application/json');
9367 }
9368 if (!options.headers.get('Accept')) {
9369 options.headers.set('Accept', 'application/json');
9370 }
9371
9372 const response = await ourFetch(url, options);
9373 result.ok = response.ok;
9374 result.status = response.status;
9375 const json = await response.json();
9376
9377 result.data = json;
9378 if (!result.ok) {
9379 result.data.status = result.status;
9380 const err = generateErrorFromResponse(result.data);
9381 throw err;
9382 }
9383
9384 if (Array.isArray(result.data)) {
9385 result.data = result.data.map(function (v) {
9386 if (v.error || v.missing) {
9387 return generateErrorFromResponse(v);
9388 } else {
9389 return v;
9390 }
9391 });
9392 }
9393
9394 return result;
9395 }
9396
9397 let setupPromise;
9398
9399 async function setup() {
9400 if (opts.skip_setup) {
9401 return Promise.resolve();
9402 }
9403
9404 // If there is a setup in process or previous successful setup
9405 // done then we will use that
9406 // If previous setups have been rejected we will try again
9407 if (setupPromise) {
9408 return setupPromise;
9409 }
9410
9411 setupPromise = fetchJSON(dbUrl).catch(function (err) {
9412 if (err && err.status && err.status === 404) {
9413 // Doesnt exist, create it
9414 explainError(404, 'PouchDB is just detecting if the remote exists.');
9415 return fetchJSON(dbUrl, {method: 'PUT'});
9416 } else {
9417 return Promise.reject(err);
9418 }
9419 }).catch(function (err) {
9420 // If we try to create a database that already exists, skipped in
9421 // istanbul since its catching a race condition.
9422 /* istanbul ignore if */
9423 if (err && err.status && err.status === 412) {
9424 return true;
9425 }
9426 return Promise.reject(err);
9427 });
9428
9429 setupPromise.catch(function () {
9430 setupPromise = null;
9431 });
9432
9433 return setupPromise;
9434 }
9435
9436 nextTick(function () {
9437 callback(null, api);
9438 });
9439
9440 api._remote = true;
9441
9442 /* istanbul ignore next */
9443 api.type = function () {
9444 return 'http';
9445 };
9446
9447 api.id = adapterFun$$1('id', async function (callback) {
9448 let result;
9449 try {
9450 const response = await ourFetch(genUrl(host, ''));
9451 result = await response.json();
9452 } catch (err) {
9453 result = {};
9454 }
9455
9456 // Bad response or missing `uuid` should not prevent ID generation.
9457 const uuid$$1 = (result && result.uuid) ? (result.uuid + host.db) : genDBUrl(host, '');
9458 callback(null, uuid$$1);
9459 });
9460
9461 // Sends a POST request to the host calling the couchdb _compact function
9462 // version: The version of CouchDB it is running
9463 api.compact = adapterFun$$1('compact', async function (opts, callback) {
9464 if (typeof opts === 'function') {
9465 callback = opts;
9466 opts = {};
9467 }
9468 opts = clone(opts);
9469
9470 await fetchJSON(genDBUrl(host, '_compact'), {method: 'POST'});
9471
9472 function ping() {
9473 api.info(function (err, res) {
9474 // CouchDB may send a "compact_running:true" if it's
9475 // already compacting. PouchDB Server doesn't.
9476 /* istanbul ignore else */
9477 if (res && !res.compact_running) {
9478 callback(null, {ok: true});
9479 } else {
9480 setTimeout(ping, opts.interval || 200);
9481 }
9482 });
9483 }
9484 // Ping the http if it's finished compaction
9485 ping();
9486 });
9487
9488 api.bulkGet = adapterFun('bulkGet', function (opts, callback) {
9489 const self = this;
9490
9491 async function doBulkGet(cb) {
9492 const params = {};
9493 if (opts.revs) {
9494 params.revs = true;
9495 }
9496 if (opts.attachments) {
9497 /* istanbul ignore next */
9498 params.attachments = true;
9499 }
9500 if (opts.latest) {
9501 params.latest = true;
9502 }
9503 try {
9504 const result = await fetchJSON(genDBUrl(host, '_bulk_get' + paramsToStr(params)), {
9505 method: 'POST',
9506 body: JSON.stringify({ docs: opts.docs})
9507 });
9508
9509 if (opts.attachments && opts.binary) {
9510 result.data.results.forEach(function (res) {
9511 res.docs.forEach(readAttachmentsAsBlobOrBuffer);
9512 });
9513 }
9514 cb(null, result.data);
9515 } catch (error) {
9516 cb(error);
9517 }
9518 }
9519
9520 /* istanbul ignore next */
9521 function doBulkGetShim() {
9522 // avoid "url too long error" by splitting up into multiple requests
9523 const batchSize = MAX_SIMULTANEOUS_REVS;
9524 const numBatches = Math.ceil(opts.docs.length / batchSize);
9525 let numDone = 0;
9526 const results = new Array(numBatches);
9527
9528 function onResult(batchNum) {
9529 return function (err, res) {
9530 // err is impossible because shim returns a list of errs in that case
9531 results[batchNum] = res.results;
9532 if (++numDone === numBatches) {
9533 callback(null, {results: results.flat()});
9534 }
9535 };
9536 }
9537
9538 for (let i = 0; i < numBatches; i++) {
9539 const subOpts = pick(opts, ['revs', 'attachments', 'binary', 'latest']);
9540 subOpts.docs = opts.docs.slice(i * batchSize,
9541 Math.min(opts.docs.length, (i + 1) * batchSize));
9542 bulkGet(self, subOpts, onResult(i));
9543 }
9544 }
9545
9546 // mark the whole database as either supporting or not supporting _bulk_get
9547 const dbUrl = genUrl(host, '');
9548 const supportsBulkGet = supportsBulkGetMap[dbUrl];
9549
9550 /* istanbul ignore next */
9551 if (typeof supportsBulkGet !== 'boolean') {
9552 // check if this database supports _bulk_get
9553 doBulkGet(function (err, res) {
9554 if (err) {
9555 supportsBulkGetMap[dbUrl] = false;
9556 explainError(
9557 err.status,
9558 'PouchDB is just detecting if the remote ' +
9559 'supports the _bulk_get API.'
9560 );
9561 doBulkGetShim();
9562 } else {
9563 supportsBulkGetMap[dbUrl] = true;
9564 callback(null, res);
9565 }
9566 });
9567 } else if (supportsBulkGet) {
9568 doBulkGet(callback);
9569 } else {
9570 doBulkGetShim();
9571 }
9572 });
9573
9574 // Calls GET on the host, which gets back a JSON string containing
9575 // couchdb: A welcome string
9576 // version: The version of CouchDB it is running
9577 api._info = async function (callback) {
9578 try {
9579 await setup();
9580 const response = await ourFetch(genDBUrl(host, ''));
9581 const info = await response.json();
9582 info.host = genDBUrl(host, '');
9583 callback(null, info);
9584 } catch (err) {
9585 callback(err);
9586 }
9587 };
9588
9589 api.fetch = async function (path, options) {
9590 await setup();
9591 const url = path.substring(0, 1) === '/' ?
9592 genUrl(host, path.substring(1)) :
9593 genDBUrl(host, path);
9594 return ourFetch(url, options);
9595 };
9596
9597 // Get the document with the given id from the database given by host.
9598 // The id could be solely the _id in the database, or it may be a
9599 // _design/ID or _local/ID path
9600 api.get = adapterFun$$1('get', async function (id, opts, callback) {
9601 // If no options were given, set the callback to the second parameter
9602 if (typeof opts === 'function') {
9603 callback = opts;
9604 opts = {};
9605 }
9606 opts = clone(opts);
9607
9608 // List of parameters to add to the GET request
9609 const params = {};
9610
9611 if (opts.revs) {
9612 params.revs = true;
9613 }
9614
9615 if (opts.revs_info) {
9616 params.revs_info = true;
9617 }
9618
9619 if (opts.latest) {
9620 params.latest = true;
9621 }
9622
9623 if (opts.open_revs) {
9624 if (opts.open_revs !== "all") {
9625 opts.open_revs = JSON.stringify(opts.open_revs);
9626 }
9627 params.open_revs = opts.open_revs;
9628 }
9629
9630 if (opts.rev) {
9631 params.rev = opts.rev;
9632 }
9633
9634 if (opts.conflicts) {
9635 params.conflicts = opts.conflicts;
9636 }
9637
9638 /* istanbul ignore if */
9639 if (opts.update_seq) {
9640 params.update_seq = opts.update_seq;
9641 }
9642
9643 id = encodeDocId(id);
9644
9645 function fetchAttachments(doc) {
9646 const atts = doc._attachments;
9647 const filenames = atts && Object.keys(atts);
9648 if (!atts || !filenames.length) {
9649 return;
9650 }
9651 // we fetch these manually in separate XHRs, because
9652 // Sync Gateway would normally send it back as multipart/mixed,
9653 // which we cannot parse. Also, this is more efficient than
9654 // receiving attachments as base64-encoded strings.
9655 async function fetchData(filename) {
9656 const att = atts[filename];
9657 const path = encodeDocId(doc._id) + '/' + encodeAttachmentId(filename) +
9658 '?rev=' + doc._rev;
9659
9660 const response = await ourFetch(genDBUrl(host, path));
9661
9662 let blob;
9663 if ('buffer' in response) {
9664 blob = await response.buffer();
9665 } else {
9666 /* istanbul ignore next */
9667 blob = await response.blob();
9668 }
9669
9670 let data;
9671 if (opts.binary) {
9672 const typeFieldDescriptor = Object.getOwnPropertyDescriptor(blob.__proto__, 'type');
9673 if (!typeFieldDescriptor || typeFieldDescriptor.set) {
9674 blob.type = att.content_type;
9675 }
9676 data = blob;
9677 } else {
9678 data = await new Promise(function (resolve) {
9679 blobToBase64(blob, resolve);
9680 });
9681 }
9682
9683 delete att.stub;
9684 delete att.length;
9685 att.data = data;
9686 }
9687
9688 const promiseFactories = filenames.map(function (filename) {
9689 return function () {
9690 return fetchData(filename);
9691 };
9692 });
9693
9694 // This limits the number of parallel xhr requests to 5 any time
9695 // to avoid issues with maximum browser request limits
9696 return pool(promiseFactories, 5);
9697 }
9698
9699 function fetchAllAttachments(docOrDocs) {
9700 if (Array.isArray(docOrDocs)) {
9701 return Promise.all(docOrDocs.map(function (doc) {
9702 if (doc.ok) {
9703 return fetchAttachments(doc.ok);
9704 }
9705 }));
9706 }
9707 return fetchAttachments(docOrDocs);
9708 }
9709
9710 const url = genDBUrl(host, id + paramsToStr(params));
9711 try {
9712 const res = await fetchJSON(url);
9713 if (opts.attachments) {
9714 await fetchAllAttachments(res.data);
9715 }
9716 callback(null, res.data);
9717 } catch (error) {
9718 error.docId = id;
9719 callback(error);
9720 }
9721 });
9722
9723
9724 // Delete the document given by doc from the database given by host.
9725 api.remove = adapterFun$$1('remove', async function (docOrId, optsOrRev, opts, cb) {
9726 let doc;
9727 if (typeof optsOrRev === 'string') {
9728 // id, rev, opts, callback style
9729 doc = {
9730 _id: docOrId,
9731 _rev: optsOrRev
9732 };
9733 if (typeof opts === 'function') {
9734 cb = opts;
9735 opts = {};
9736 }
9737 } else {
9738 // doc, opts, callback style
9739 doc = docOrId;
9740 if (typeof optsOrRev === 'function') {
9741 cb = optsOrRev;
9742 opts = {};
9743 } else {
9744 cb = opts;
9745 opts = optsOrRev;
9746 }
9747 }
9748
9749 const rev$$1 = (doc._rev || opts.rev);
9750 const url = genDBUrl(host, encodeDocId(doc._id)) + '?rev=' + rev$$1;
9751
9752 try {
9753 const result = await fetchJSON(url, {method: 'DELETE'});
9754 cb(null, result.data);
9755 } catch (error) {
9756 cb(error);
9757 }
9758 });
9759
9760 function encodeAttachmentId(attachmentId) {
9761 return attachmentId.split("/").map(encodeURIComponent).join("/");
9762 }
9763
9764 // Get the attachment
9765 api.getAttachment = adapterFun$$1('getAttachment', async function (docId, attachmentId,
9766 opts, callback) {
9767 if (typeof opts === 'function') {
9768 callback = opts;
9769 opts = {};
9770 }
9771 const params = opts.rev ? ('?rev=' + opts.rev) : '';
9772 const url = genDBUrl(host, encodeDocId(docId)) + '/' +
9773 encodeAttachmentId(attachmentId) + params;
9774 let contentType;
9775 try {
9776 const response = await ourFetch(url, {method: 'GET'});
9777
9778 if (!response.ok) {
9779 throw response;
9780 }
9781
9782 contentType = response.headers.get('content-type');
9783 let blob;
9784 if (typeof process !== 'undefined' && !process.browser && typeof response.buffer === 'function') {
9785 blob = await response.buffer();
9786 } else {
9787 /* istanbul ignore next */
9788 blob = await response.blob();
9789 }
9790
9791 // TODO: also remove
9792 if (typeof process !== 'undefined' && !process.browser) {
9793 const typeFieldDescriptor = Object.getOwnPropertyDescriptor(blob.__proto__, 'type');
9794 if (!typeFieldDescriptor || typeFieldDescriptor.set) {
9795 blob.type = contentType;
9796 }
9797 }
9798 callback(null, blob);
9799 } catch (err) {
9800 callback(err);
9801 }
9802 });
9803
9804 // Remove the attachment given by the id and rev
9805 api.removeAttachment = adapterFun$$1('removeAttachment', async function (
9806 docId,
9807 attachmentId,
9808 rev$$1,
9809 callback,
9810 ) {
9811 const url = genDBUrl(host, encodeDocId(docId) + '/' + encodeAttachmentId(attachmentId)) + '?rev=' + rev$$1;
9812
9813 try {
9814 const result = await fetchJSON(url, {method: 'DELETE'});
9815 callback(null, result.data);
9816 } catch (error) {
9817 callback(error);
9818 }
9819 });
9820
9821 // Add the attachment given by blob and its contentType property
9822 // to the document with the given id, the revision given by rev, and
9823 // add it to the database given by host.
9824 api.putAttachment = adapterFun$$1('putAttachment', async function (
9825 docId,
9826 attachmentId,
9827 rev$$1,
9828 blob,
9829 type,
9830 callback,
9831 ) {
9832 if (typeof type === 'function') {
9833 callback = type;
9834 type = blob;
9835 blob = rev$$1;
9836 rev$$1 = null;
9837 }
9838 const id = encodeDocId(docId) + '/' + encodeAttachmentId(attachmentId);
9839 let url = genDBUrl(host, id);
9840 if (rev$$1) {
9841 url += '?rev=' + rev$$1;
9842 }
9843
9844 if (typeof blob === 'string') {
9845 // input is assumed to be a base64 string
9846 let binary;
9847 try {
9848 binary = thisAtob(blob);
9849 } catch (err) {
9850 return callback(createError(BAD_ARG,
9851 'Attachment is not a valid base64 string'));
9852 }
9853 blob = binary ? binStringToBluffer(binary, type) : '';
9854 }
9855
9856 try {
9857 // Add the attachment
9858 const result = await fetchJSON(url, {
9859 headers: new h({'Content-Type': type}),
9860 method: 'PUT',
9861 body: blob
9862 });
9863 callback(null, result.data);
9864 } catch (error) {
9865 callback(error);
9866 }
9867 });
9868
9869 // Update/create multiple documents given by req in the database
9870 // given by host.
9871 api._bulkDocs = async function (req, opts, callback) {
9872 // If new_edits=false then it prevents the database from creating
9873 // new revision numbers for the documents. Instead it just uses
9874 // the old ones. This is used in database replication.
9875 req.new_edits = opts.new_edits;
9876
9877 try {
9878 await setup();
9879 await Promise.all(req.docs.map(preprocessAttachments$1));
9880
9881 // Update/create the documents
9882 const result = await fetchJSON(genDBUrl(host, '_bulk_docs'), {
9883 method: 'POST',
9884 body: JSON.stringify(req)
9885 });
9886 callback(null, result.data);
9887 } catch (error) {
9888 callback(error);
9889 }
9890 };
9891
9892 // Update/create document
9893 api._put = async function (doc, opts, callback) {
9894 try {
9895 await setup();
9896 await preprocessAttachments$1(doc);
9897
9898 const result = await fetchJSON(genDBUrl(host, encodeDocId(doc._id)), {
9899 method: 'PUT',
9900 body: JSON.stringify(doc)
9901 });
9902 callback(null, result.data);
9903 } catch (error) {
9904 error.docId = doc && doc._id;
9905 callback(error);
9906 }
9907 };
9908
9909
9910 // Get a listing of the documents in the database given
9911 // by host and ordered by increasing id.
9912 api.allDocs = adapterFun$$1('allDocs', async function (opts, callback) {
9913 if (typeof opts === 'function') {
9914 callback = opts;
9915 opts = {};
9916 }
9917 opts = clone(opts);
9918
9919 // List of parameters to add to the GET request
9920 const params = {};
9921 let body;
9922 let method = 'GET';
9923
9924 if (opts.conflicts) {
9925 params.conflicts = true;
9926 }
9927
9928 /* istanbul ignore if */
9929 if (opts.update_seq) {
9930 params.update_seq = true;
9931 }
9932
9933 if (opts.descending) {
9934 params.descending = true;
9935 }
9936
9937 if (opts.include_docs) {
9938 params.include_docs = true;
9939 }
9940
9941 // added in CouchDB 1.6.0
9942 if (opts.attachments) {
9943 params.attachments = true;
9944 }
9945
9946 if (opts.key) {
9947 params.key = JSON.stringify(opts.key);
9948 }
9949
9950 if (opts.start_key) {
9951 opts.startkey = opts.start_key;
9952 }
9953
9954 if (opts.startkey) {
9955 params.startkey = JSON.stringify(opts.startkey);
9956 }
9957
9958 if (opts.end_key) {
9959 opts.endkey = opts.end_key;
9960 }
9961
9962 if (opts.endkey) {
9963 params.endkey = JSON.stringify(opts.endkey);
9964 }
9965
9966 if (typeof opts.inclusive_end !== 'undefined') {
9967 params.inclusive_end = !!opts.inclusive_end;
9968 }
9969
9970 if (typeof opts.limit !== 'undefined') {
9971 params.limit = opts.limit;
9972 }
9973
9974 if (typeof opts.skip !== 'undefined') {
9975 params.skip = opts.skip;
9976 }
9977
9978 const paramStr = paramsToStr(params);
9979
9980 if (typeof opts.keys !== 'undefined') {
9981 method = 'POST';
9982 body = {keys: opts.keys};
9983 }
9984
9985 try {
9986 const result = await fetchJSON(genDBUrl(host, '_all_docs' + paramStr), {
9987 method,
9988 body: JSON.stringify(body)
9989 });
9990 if (opts.include_docs && opts.attachments && opts.binary) {
9991 result.data.rows.forEach(readAttachmentsAsBlobOrBuffer);
9992 }
9993 callback(null, result.data);
9994 } catch (error) {
9995 callback(error);
9996 }
9997 });
9998
9999 // Get a list of changes made to documents in the database given by host.
10000 // TODO According to the README, there should be two other methods here,
10001 // api.changes.addListener and api.changes.removeListener.
10002 api._changes = function (opts) {
10003
10004 // We internally page the results of a changes request, this means
10005 // if there is a large set of changes to be returned we can start
10006 // processing them quicker instead of waiting on the entire
10007 // set of changes to return and attempting to process them at once
10008 const batchSize = 'batch_size' in opts ? opts.batch_size : CHANGES_BATCH_SIZE;
10009
10010 opts = clone(opts);
10011
10012 if (opts.continuous && !('heartbeat' in opts)) {
10013 opts.heartbeat = DEFAULT_HEARTBEAT;
10014 }
10015
10016 let requestTimeout = ('timeout' in opts) ? opts.timeout : 30 * 1000;
10017
10018 // ensure CHANGES_TIMEOUT_BUFFER applies
10019 if ('timeout' in opts && opts.timeout &&
10020 (requestTimeout - opts.timeout) < CHANGES_TIMEOUT_BUFFER) {
10021 requestTimeout = opts.timeout + CHANGES_TIMEOUT_BUFFER;
10022 }
10023
10024 /* istanbul ignore if */
10025 if ('heartbeat' in opts && opts.heartbeat &&
10026 (requestTimeout - opts.heartbeat) < CHANGES_TIMEOUT_BUFFER) {
10027 requestTimeout = opts.heartbeat + CHANGES_TIMEOUT_BUFFER;
10028 }
10029
10030 const params = {};
10031 if ('timeout' in opts && opts.timeout) {
10032 params.timeout = opts.timeout;
10033 }
10034
10035 const limit = (typeof opts.limit !== 'undefined') ? opts.limit : false;
10036 let leftToFetch = limit;
10037
10038 if (opts.style) {
10039 params.style = opts.style;
10040 }
10041
10042 if (opts.include_docs || opts.filter && typeof opts.filter === 'function') {
10043 params.include_docs = true;
10044 }
10045
10046 if (opts.attachments) {
10047 params.attachments = true;
10048 }
10049
10050 if (opts.continuous) {
10051 params.feed = 'longpoll';
10052 }
10053
10054 if (opts.seq_interval) {
10055 params.seq_interval = opts.seq_interval;
10056 }
10057
10058 if (opts.conflicts) {
10059 params.conflicts = true;
10060 }
10061
10062 if (opts.descending) {
10063 params.descending = true;
10064 }
10065
10066 /* istanbul ignore if */
10067 if (opts.update_seq) {
10068 params.update_seq = true;
10069 }
10070
10071 if ('heartbeat' in opts) {
10072 // If the heartbeat value is false, it disables the default heartbeat
10073 if (opts.heartbeat) {
10074 params.heartbeat = opts.heartbeat;
10075 }
10076 }
10077
10078 if (opts.filter && typeof opts.filter === 'string') {
10079 params.filter = opts.filter;
10080 }
10081
10082 if (opts.view && typeof opts.view === 'string') {
10083 params.filter = '_view';
10084 params.view = opts.view;
10085 }
10086
10087 // If opts.query_params exists, pass it through to the changes request.
10088 // These parameters may be used by the filter on the source database.
10089 if (opts.query_params && typeof opts.query_params === 'object') {
10090 for (const param_name in opts.query_params) {
10091 /* istanbul ignore else */
10092 if (Object.prototype.hasOwnProperty.call(opts.query_params, param_name)) {
10093 params[param_name] = opts.query_params[param_name];
10094 }
10095 }
10096 }
10097
10098 let method = 'GET';
10099 let body;
10100
10101 if (opts.doc_ids) {
10102 // set this automagically for the user; it's annoying that couchdb
10103 // requires both a "filter" and a "doc_ids" param.
10104 params.filter = '_doc_ids';
10105 method = 'POST';
10106 body = {doc_ids: opts.doc_ids };
10107 }
10108 /* istanbul ignore next */
10109 else if (opts.selector) {
10110 // set this automagically for the user, similar to above
10111 params.filter = '_selector';
10112 method = 'POST';
10113 body = {selector: opts.selector };
10114 }
10115
10116 const controller = new AbortController();
10117 let lastFetchedSeq;
10118
10119 // Get all the changes starting with the one immediately after the
10120 // sequence number given by since.
10121 const fetchData = async function (since, callback) {
10122 if (opts.aborted) {
10123 return;
10124 }
10125 params.since = since;
10126 // "since" can be any kind of json object in Cloudant/CouchDB 2.x
10127 /* istanbul ignore next */
10128 if (typeof params.since === "object") {
10129 params.since = JSON.stringify(params.since);
10130 }
10131
10132 if (opts.descending) {
10133 if (limit) {
10134 params.limit = leftToFetch;
10135 }
10136 } else {
10137 params.limit = (!limit || leftToFetch > batchSize) ?
10138 batchSize : leftToFetch;
10139 }
10140
10141 // Set the options for the ajax call
10142 const url = genDBUrl(host, '_changes' + paramsToStr(params));
10143 const fetchOpts = {
10144 signal: controller.signal,
10145 method,
10146 body: JSON.stringify(body)
10147 };
10148 lastFetchedSeq = since;
10149
10150 /* istanbul ignore if */
10151 if (opts.aborted) {
10152 return;
10153 }
10154
10155 // Get the changes
10156 try {
10157 await setup();
10158 const result = await fetchJSON(url, fetchOpts);
10159 callback(null, result.data);
10160 } catch (error) {
10161 callback(error);
10162 }
10163 };
10164
10165 // If opts.since exists, get all the changes from the sequence
10166 // number given by opts.since. Otherwise, get all the changes
10167 // from the sequence number 0.
10168 const results = {results: []};
10169
10170 const fetched = function (err, res) {
10171 if (opts.aborted) {
10172 return;
10173 }
10174 let raw_results_length = 0;
10175 // If the result of the ajax call (res) contains changes (res.results)
10176 if (res && res.results) {
10177 raw_results_length = res.results.length;
10178 results.last_seq = res.last_seq;
10179 let pending = null;
10180 let lastSeq = null;
10181 // Attach 'pending' property if server supports it (CouchDB 2.0+)
10182 /* istanbul ignore if */
10183 if (typeof res.pending === 'number') {
10184 pending = res.pending;
10185 }
10186 if (typeof results.last_seq === 'string' || typeof results.last_seq === 'number') {
10187 lastSeq = results.last_seq;
10188 }
10189 // For each change
10190 const req = {};
10191 req.query = opts.query_params;
10192 res.results = res.results.filter(function (c) {
10193 leftToFetch--;
10194 const ret = filterChange(opts)(c);
10195 if (ret) {
10196 if (opts.include_docs && opts.attachments && opts.binary) {
10197 readAttachmentsAsBlobOrBuffer(c);
10198 }
10199 if (opts.return_docs) {
10200 results.results.push(c);
10201 }
10202 opts.onChange(c, pending, lastSeq);
10203 }
10204 return ret;
10205 });
10206 } else if (err) {
10207 // In case of an error, stop listening for changes and call
10208 // opts.complete
10209 opts.aborted = true;
10210 opts.complete(err);
10211 return;
10212 }
10213
10214 // The changes feed may have timed out with no results
10215 // if so reuse last update sequence
10216 if (res && res.last_seq) {
10217 lastFetchedSeq = res.last_seq;
10218 }
10219
10220 const finished = (limit && leftToFetch <= 0) ||
10221 (res && raw_results_length < batchSize) ||
10222 (opts.descending);
10223
10224 if ((opts.continuous && !(limit && leftToFetch <= 0)) || !finished) {
10225 // Queue a call to fetch again with the newest sequence number
10226 nextTick(function () { fetchData(lastFetchedSeq, fetched); });
10227 } else {
10228 // We're done, call the callback
10229 opts.complete(null, results);
10230 }
10231 };
10232
10233 fetchData(opts.since || 0, fetched);
10234
10235 // Return a method to cancel this method from processing any more
10236 return {
10237 cancel: function () {
10238 opts.aborted = true;
10239 controller.abort();
10240 }
10241 };
10242 };
10243
10244 // Given a set of document/revision IDs (given by req), tets the subset of
10245 // those that do NOT correspond to revisions stored in the database.
10246 // See http://wiki.apache.org/couchdb/HttpPostRevsDiff
10247 api.revsDiff = adapterFun$$1('revsDiff', async function (req, opts, callback) {
10248 // If no options were given, set the callback to be the second parameter
10249 if (typeof opts === 'function') {
10250 callback = opts;
10251 opts = {};
10252 }
10253
10254 try {
10255 // Get the missing document/revision IDs
10256 const result = await fetchJSON(genDBUrl(host, '_revs_diff'), {
10257 method: 'POST',
10258 body: JSON.stringify(req)
10259 });
10260 callback(null, result.data);
10261 } catch (error) {
10262 callback(error);
10263 }
10264 });
10265
10266 api._close = function (callback) {
10267 callback();
10268 };
10269
10270 api._destroy = async function (options, callback) {
10271 try {
10272 const json = await fetchJSON(genDBUrl(host, ''), {method: 'DELETE'});
10273 callback(null, json);
10274 } catch (error) {
10275 if (error.status === 404) {
10276 callback(null, {ok: true});
10277 } else {
10278 callback(error);
10279 }
10280 }
10281 };
10282}
10283
10284// HttpPouch is a valid adapter.
10285HttpPouch.valid = function () {
10286 return true;
10287};
10288
10289function HttpPouch$1 (PouchDB) {
10290 PouchDB.adapter('http', HttpPouch, false);
10291 PouchDB.adapter('https', HttpPouch, false);
10292}
10293
10294class QueryParseError extends Error {
10295 constructor(message) {
10296 super();
10297 this.status = 400;
10298 this.name = 'query_parse_error';
10299 this.message = message;
10300 this.error = true;
10301 try {
10302 Error.captureStackTrace(this, QueryParseError);
10303 } catch (e) {}
10304 }
10305}
10306
10307class NotFoundError extends Error {
10308 constructor(message) {
10309 super();
10310 this.status = 404;
10311 this.name = 'not_found';
10312 this.message = message;
10313 this.error = true;
10314 try {
10315 Error.captureStackTrace(this, NotFoundError);
10316 } catch (e) {}
10317 }
10318}
10319
10320class BuiltInError extends Error {
10321 constructor(message) {
10322 super();
10323 this.status = 500;
10324 this.name = 'invalid_value';
10325 this.message = message;
10326 this.error = true;
10327 try {
10328 Error.captureStackTrace(this, BuiltInError);
10329 } catch (e) {}
10330 }
10331}
10332
10333function promisedCallback(promise, callback) {
10334 if (callback) {
10335 promise.then(function (res) {
10336 nextTick(function () {
10337 callback(null, res);
10338 });
10339 }, function (reason) {
10340 nextTick(function () {
10341 callback(reason);
10342 });
10343 });
10344 }
10345 return promise;
10346}
10347
10348function callbackify(fun) {
10349 return function (...args) {
10350 var cb = args.pop();
10351 var promise = fun.apply(this, args);
10352 if (typeof cb === 'function') {
10353 promisedCallback(promise, cb);
10354 }
10355 return promise;
10356 };
10357}
10358
10359// Promise finally util similar to Q.finally
10360function fin(promise, finalPromiseFactory) {
10361 return promise.then(function (res) {
10362 return finalPromiseFactory().then(function () {
10363 return res;
10364 });
10365 }, function (reason) {
10366 return finalPromiseFactory().then(function () {
10367 throw reason;
10368 });
10369 });
10370}
10371
10372function sequentialize(queue, promiseFactory) {
10373 return function () {
10374 var args = arguments;
10375 var that = this;
10376 return queue.add(function () {
10377 return promiseFactory.apply(that, args);
10378 });
10379 };
10380}
10381
10382// uniq an array of strings, order not guaranteed
10383// similar to underscore/lodash _.uniq
10384function uniq(arr) {
10385 var theSet = new Set(arr);
10386 var result = new Array(theSet.size);
10387 var index = -1;
10388 theSet.forEach(function (value) {
10389 result[++index] = value;
10390 });
10391 return result;
10392}
10393
10394function mapToKeysArray(map) {
10395 var result = new Array(map.size);
10396 var index = -1;
10397 map.forEach(function (value, key) {
10398 result[++index] = key;
10399 });
10400 return result;
10401}
10402
10403function createBuiltInError(name) {
10404 var message = 'builtin ' + name +
10405 ' function requires map values to be numbers' +
10406 ' or number arrays';
10407 return new BuiltInError(message);
10408}
10409
10410function sum(values) {
10411 var result = 0;
10412 for (var i = 0, len = values.length; i < len; i++) {
10413 var num = values[i];
10414 if (typeof num !== 'number') {
10415 if (Array.isArray(num)) {
10416 // lists of numbers are also allowed, sum them separately
10417 result = typeof result === 'number' ? [result] : result;
10418 for (var j = 0, jLen = num.length; j < jLen; j++) {
10419 var jNum = num[j];
10420 if (typeof jNum !== 'number') {
10421 throw createBuiltInError('_sum');
10422 } else if (typeof result[j] === 'undefined') {
10423 result.push(jNum);
10424 } else {
10425 result[j] += jNum;
10426 }
10427 }
10428 } else { // not array/number
10429 throw createBuiltInError('_sum');
10430 }
10431 } else if (typeof result === 'number') {
10432 result += num;
10433 } else { // add number to array
10434 result[0] += num;
10435 }
10436 }
10437 return result;
10438}
10439
10440var log = guardedConsole.bind(null, 'log');
10441var isArray = Array.isArray;
10442var toJSON = JSON.parse;
10443
10444function evalFunctionWithEval(func, emit) {
10445 return scopeEval(
10446 "return (" + func.replace(/;\s*$/, "") + ");",
10447 {
10448 emit,
10449 sum,
10450 log,
10451 isArray,
10452 toJSON
10453 }
10454 );
10455}
10456
10457/*
10458 * Simple task queue to sequentialize actions. Assumes
10459 * callbacks will eventually fire (once).
10460 */
10461
10462class TaskQueue$1 {
10463 constructor() {
10464 this.promise = Promise.resolve();
10465 }
10466
10467 add(promiseFactory) {
10468 this.promise = this.promise
10469 // just recover
10470 .catch(() => { })
10471 .then(() => promiseFactory());
10472 return this.promise;
10473 }
10474
10475 finish() {
10476 return this.promise;
10477 }
10478}
10479
10480function stringify(input) {
10481 if (!input) {
10482 return 'undefined'; // backwards compat for empty reduce
10483 }
10484 // for backwards compat with mapreduce, functions/strings are stringified
10485 // as-is. everything else is JSON-stringified.
10486 switch (typeof input) {
10487 case 'function':
10488 // e.g. a mapreduce map
10489 return input.toString();
10490 case 'string':
10491 // e.g. a mapreduce built-in _reduce function
10492 return input.toString();
10493 default:
10494 // e.g. a JSON object in the case of mango queries
10495 return JSON.stringify(input);
10496 }
10497}
10498
10499/* create a string signature for a view so we can cache it and uniq it */
10500function createViewSignature(mapFun, reduceFun) {
10501 // the "undefined" part is for backwards compatibility
10502 return stringify(mapFun) + stringify(reduceFun) + 'undefined';
10503}
10504
10505async function createView(sourceDB, viewName, mapFun, reduceFun, temporary, localDocName) {
10506 const viewSignature = createViewSignature(mapFun, reduceFun);
10507
10508 let cachedViews;
10509 if (!temporary) {
10510 // cache this to ensure we don't try to update the same view twice
10511 cachedViews = sourceDB._cachedViews = sourceDB._cachedViews || {};
10512 if (cachedViews[viewSignature]) {
10513 return cachedViews[viewSignature];
10514 }
10515 }
10516
10517 const promiseForView = sourceDB.info().then(async function (info) {
10518 const depDbName = info.db_name + '-mrview-' +
10519 (temporary ? 'temp' : stringMd5(viewSignature));
10520
10521 // save the view name in the source db so it can be cleaned up if necessary
10522 // (e.g. when the _design doc is deleted, remove all associated view data)
10523 function diffFunction(doc) {
10524 doc.views = doc.views || {};
10525 let fullViewName = viewName;
10526 if (fullViewName.indexOf('/') === -1) {
10527 fullViewName = viewName + '/' + viewName;
10528 }
10529 const depDbs = doc.views[fullViewName] = doc.views[fullViewName] || {};
10530 /* istanbul ignore if */
10531 if (depDbs[depDbName]) {
10532 return; // no update necessary
10533 }
10534 depDbs[depDbName] = true;
10535 return doc;
10536 }
10537 await upsert(sourceDB, '_local/' + localDocName, diffFunction);
10538 const res = await sourceDB.registerDependentDatabase(depDbName);
10539 const db = res.db;
10540 db.auto_compaction = true;
10541 const view = {
10542 name: depDbName,
10543 db,
10544 sourceDB,
10545 adapter: sourceDB.adapter,
10546 mapFun,
10547 reduceFun
10548 };
10549
10550 let lastSeqDoc;
10551 try {
10552 lastSeqDoc = await view.db.get('_local/lastSeq');
10553 } catch (err) {
10554 /* istanbul ignore if */
10555 if (err.status !== 404) {
10556 throw err;
10557 }
10558 }
10559
10560 view.seq = lastSeqDoc ? lastSeqDoc.seq : 0;
10561 if (cachedViews) {
10562 view.db.once('destroyed', function () {
10563 delete cachedViews[viewSignature];
10564 });
10565 }
10566 return view;
10567 });
10568
10569 if (cachedViews) {
10570 cachedViews[viewSignature] = promiseForView;
10571 }
10572 return promiseForView;
10573}
10574
10575const persistentQueues = {};
10576const tempViewQueue = new TaskQueue$1();
10577const CHANGES_BATCH_SIZE$1 = 50;
10578
10579function parseViewName(name) {
10580 // can be either 'ddocname/viewname' or just 'viewname'
10581 // (where the ddoc name is the same)
10582 return name.indexOf('/') === -1 ? [name, name] : name.split('/');
10583}
10584
10585function isGenOne(changes) {
10586 // only return true if the current change is 1-
10587 // and there are no other leafs
10588 return changes.length === 1 && /^1-/.test(changes[0].rev);
10589}
10590
10591function emitError(db, e, data) {
10592 try {
10593 db.emit('error', e);
10594 } catch (err) {
10595 guardedConsole('error',
10596 'The user\'s map/reduce function threw an uncaught error.\n' +
10597 'You can debug this error by doing:\n' +
10598 'myDatabase.on(\'error\', function (err) { debugger; });\n' +
10599 'Please double-check your map/reduce function.');
10600 guardedConsole('error', e, data);
10601 }
10602}
10603
10604/**
10605 * Returns an "abstract" mapreduce object of the form:
10606 *
10607 * {
10608 * query: queryFun,
10609 * viewCleanup: viewCleanupFun
10610 * }
10611 *
10612 * Arguments are:
10613 *
10614 * localDoc: string
10615 * This is for the local doc that gets saved in order to track the
10616 * "dependent" DBs and clean them up for viewCleanup. It should be
10617 * unique, so that indexer plugins don't collide with each other.
10618 * mapper: function (mapFunDef, emit)
10619 * Returns a map function based on the mapFunDef, which in the case of
10620 * normal map/reduce is just the de-stringified function, but may be
10621 * something else, such as an object in the case of pouchdb-find.
10622 * reducer: function (reduceFunDef)
10623 * Ditto, but for reducing. Modules don't have to support reducing
10624 * (e.g. pouchdb-find).
10625 * ddocValidator: function (ddoc, viewName)
10626 * Throws an error if the ddoc or viewName is not valid.
10627 * This could be a way to communicate to the user that the configuration for the
10628 * indexer is invalid.
10629 */
10630function createAbstractMapReduce(localDocName, mapper, reducer, ddocValidator) {
10631
10632 function tryMap(db, fun, doc) {
10633 // emit an event if there was an error thrown by a map function.
10634 // putting try/catches in a single function also avoids deoptimizations.
10635 try {
10636 fun(doc);
10637 } catch (e) {
10638 emitError(db, e, {fun, doc});
10639 }
10640 }
10641
10642 function tryReduce(db, fun, keys, values, rereduce) {
10643 // same as above, but returning the result or an error. there are two separate
10644 // functions to avoid extra memory allocations since the tryCode() case is used
10645 // for custom map functions (common) vs this function, which is only used for
10646 // custom reduce functions (rare)
10647 try {
10648 return {output : fun(keys, values, rereduce)};
10649 } catch (e) {
10650 emitError(db, e, {fun, keys, values, rereduce});
10651 return {error: e};
10652 }
10653 }
10654
10655 function sortByKeyThenValue(x, y) {
10656 const keyCompare = collate(x.key, y.key);
10657 return keyCompare !== 0 ? keyCompare : collate(x.value, y.value);
10658 }
10659
10660 function sliceResults(results, limit, skip) {
10661 skip = skip || 0;
10662 if (typeof limit === 'number') {
10663 return results.slice(skip, limit + skip);
10664 } else if (skip > 0) {
10665 return results.slice(skip);
10666 }
10667 return results;
10668 }
10669
10670 function rowToDocId(row) {
10671 const val = row.value;
10672 // Users can explicitly specify a joined doc _id, or it
10673 // defaults to the doc _id that emitted the key/value.
10674 const docId = (val && typeof val === 'object' && val._id) || row.id;
10675 return docId;
10676 }
10677
10678 function readAttachmentsAsBlobOrBuffer(res) {
10679 for (const row of res.rows) {
10680 const atts = row.doc && row.doc._attachments;
10681 if (!atts) {
10682 continue;
10683 }
10684 for (const filename of Object.keys(atts)) {
10685 const att = atts[filename];
10686 atts[filename].data = b64ToBluffer(att.data, att.content_type);
10687 }
10688 }
10689 }
10690
10691 function postprocessAttachments(opts) {
10692 return function (res) {
10693 if (opts.include_docs && opts.attachments && opts.binary) {
10694 readAttachmentsAsBlobOrBuffer(res);
10695 }
10696 return res;
10697 };
10698 }
10699
10700 function addHttpParam(paramName, opts, params, asJson) {
10701 // add an http param from opts to params, optionally json-encoded
10702 let val = opts[paramName];
10703 if (typeof val !== 'undefined') {
10704 if (asJson) {
10705 val = encodeURIComponent(JSON.stringify(val));
10706 }
10707 params.push(paramName + '=' + val);
10708 }
10709 }
10710
10711 function coerceInteger(integerCandidate) {
10712 if (typeof integerCandidate !== 'undefined') {
10713 const asNumber = Number(integerCandidate);
10714 // prevents e.g. '1foo' or '1.1' being coerced to 1
10715 if (!isNaN(asNumber) && asNumber === parseInt(integerCandidate, 10)) {
10716 return asNumber;
10717 } else {
10718 return integerCandidate;
10719 }
10720 }
10721 }
10722
10723 function coerceOptions(opts) {
10724 opts.group_level = coerceInteger(opts.group_level);
10725 opts.limit = coerceInteger(opts.limit);
10726 opts.skip = coerceInteger(opts.skip);
10727 return opts;
10728 }
10729
10730 function checkPositiveInteger(number) {
10731 if (number) {
10732 if (typeof number !== 'number') {
10733 return new QueryParseError(`Invalid value for integer: "${number}"`);
10734 }
10735 if (number < 0) {
10736 return new QueryParseError(`Invalid value for positive integer: "${number}"`);
10737 }
10738 }
10739 }
10740
10741 function checkQueryParseError(options, fun) {
10742 const startkeyName = options.descending ? 'endkey' : 'startkey';
10743 const endkeyName = options.descending ? 'startkey' : 'endkey';
10744
10745 if (typeof options[startkeyName] !== 'undefined' &&
10746 typeof options[endkeyName] !== 'undefined' &&
10747 collate(options[startkeyName], options[endkeyName]) > 0) {
10748 throw new QueryParseError('No rows can match your key range, ' +
10749 'reverse your start_key and end_key or set {descending : true}');
10750 } else if (fun.reduce && options.reduce !== false) {
10751 if (options.include_docs) {
10752 throw new QueryParseError('{include_docs:true} is invalid for reduce');
10753 } else if (options.keys && options.keys.length > 1 &&
10754 !options.group && !options.group_level) {
10755 throw new QueryParseError('Multi-key fetches for reduce views must use ' +
10756 '{group: true}');
10757 }
10758 }
10759 for (const optionName of ['group_level', 'limit', 'skip']) {
10760 const error = checkPositiveInteger(options[optionName]);
10761 if (error) {
10762 throw error;
10763 }
10764 }
10765 }
10766
10767 async function httpQuery(db, fun, opts) {
10768 // List of parameters to add to the PUT request
10769 let params = [];
10770 let body;
10771 let method = 'GET';
10772 let ok;
10773
10774 // If opts.reduce exists and is defined, then add it to the list
10775 // of parameters.
10776 // If reduce=false then the results are that of only the map function
10777 // not the final result of map and reduce.
10778 addHttpParam('reduce', opts, params);
10779 addHttpParam('include_docs', opts, params);
10780 addHttpParam('attachments', opts, params);
10781 addHttpParam('limit', opts, params);
10782 addHttpParam('descending', opts, params);
10783 addHttpParam('group', opts, params);
10784 addHttpParam('group_level', opts, params);
10785 addHttpParam('skip', opts, params);
10786 addHttpParam('stale', opts, params);
10787 addHttpParam('conflicts', opts, params);
10788 addHttpParam('startkey', opts, params, true);
10789 addHttpParam('start_key', opts, params, true);
10790 addHttpParam('endkey', opts, params, true);
10791 addHttpParam('end_key', opts, params, true);
10792 addHttpParam('inclusive_end', opts, params);
10793 addHttpParam('key', opts, params, true);
10794 addHttpParam('update_seq', opts, params);
10795
10796 // Format the list of parameters into a valid URI query string
10797 params = params.join('&');
10798 params = params === '' ? '' : '?' + params;
10799
10800 // If keys are supplied, issue a POST to circumvent GET query string limits
10801 // see http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options
10802 if (typeof opts.keys !== 'undefined') {
10803 const MAX_URL_LENGTH = 2000;
10804 // according to http://stackoverflow.com/a/417184/680742,
10805 // the de facto URL length limit is 2000 characters
10806
10807 const keysAsString = `keys=${encodeURIComponent(JSON.stringify(opts.keys))}`;
10808 if (keysAsString.length + params.length + 1 <= MAX_URL_LENGTH) {
10809 // If the keys are short enough, do a GET. we do this to work around
10810 // Safari not understanding 304s on POSTs (see pouchdb/pouchdb#1239)
10811 params += (params[0] === '?' ? '&' : '?') + keysAsString;
10812 } else {
10813 method = 'POST';
10814 if (typeof fun === 'string') {
10815 body = {keys: opts.keys};
10816 } else { // fun is {map : mapfun}, so append to this
10817 fun.keys = opts.keys;
10818 }
10819 }
10820 }
10821
10822 // We are referencing a query defined in the design doc
10823 if (typeof fun === 'string') {
10824 const parts = parseViewName(fun);
10825
10826 const response = await db.fetch('_design/' + parts[0] + '/_view/' + parts[1] + params, {
10827 headers: new h({'Content-Type': 'application/json'}),
10828 method,
10829 body: JSON.stringify(body)
10830 });
10831 ok = response.ok;
10832 // status = response.status;
10833 const result = await response.json();
10834
10835 if (!ok) {
10836 result.status = response.status;
10837 throw generateErrorFromResponse(result);
10838 }
10839
10840 // fail the entire request if the result contains an error
10841 for (const row of result.rows) {
10842 /* istanbul ignore if */
10843 if (row.value && row.value.error && row.value.error === "builtin_reduce_error") {
10844 throw new Error(row.reason);
10845 }
10846 }
10847
10848 return new Promise(function (resolve) {
10849 resolve(result);
10850 }).then(postprocessAttachments(opts));
10851 }
10852
10853 // We are using a temporary view, terrible for performance, good for testing
10854 body = body || {};
10855 for (const key of Object.keys(fun)) {
10856 if (Array.isArray(fun[key])) {
10857 body[key] = fun[key];
10858 } else {
10859 body[key] = fun[key].toString();
10860 }
10861 }
10862
10863 const response = await db.fetch('_temp_view' + params, {
10864 headers: new h({'Content-Type': 'application/json'}),
10865 method: 'POST',
10866 body: JSON.stringify(body)
10867 });
10868
10869 ok = response.ok;
10870 // status = response.status;
10871 const result = await response.json();
10872 if (!ok) {
10873 result.status = response.status;
10874 throw generateErrorFromResponse(result);
10875 }
10876
10877 return new Promise(function (resolve) {
10878 resolve(result);
10879 }).then(postprocessAttachments(opts));
10880 }
10881
10882 // custom adapters can define their own api._query
10883 // and override the default behavior
10884 /* istanbul ignore next */
10885 function customQuery(db, fun, opts) {
10886 return new Promise(function (resolve, reject) {
10887 db._query(fun, opts, function (err, res) {
10888 if (err) {
10889 return reject(err);
10890 }
10891 resolve(res);
10892 });
10893 });
10894 }
10895
10896 // custom adapters can define their own api._viewCleanup
10897 // and override the default behavior
10898 /* istanbul ignore next */
10899 function customViewCleanup(db) {
10900 return new Promise(function (resolve, reject) {
10901 db._viewCleanup(function (err, res) {
10902 if (err) {
10903 return reject(err);
10904 }
10905 resolve(res);
10906 });
10907 });
10908 }
10909
10910 function defaultsTo(value) {
10911 return function (reason) {
10912 /* istanbul ignore else */
10913 if (reason.status === 404) {
10914 return value;
10915 } else {
10916 throw reason;
10917 }
10918 };
10919 }
10920
10921 // returns a promise for a list of docs to update, based on the input docId.
10922 // the order doesn't matter, because post-3.2.0, bulkDocs
10923 // is an atomic operation in all three adapters.
10924 async function getDocsToPersist(docId, view, docIdsToChangesAndEmits) {
10925 const metaDocId = '_local/doc_' + docId;
10926 const defaultMetaDoc = {_id: metaDocId, keys: []};
10927 const docData = docIdsToChangesAndEmits.get(docId);
10928 const indexableKeysToKeyValues = docData[0];
10929 const changes = docData[1];
10930
10931 function getMetaDoc() {
10932 if (isGenOne(changes)) {
10933 // generation 1, so we can safely assume initial state
10934 // for performance reasons (avoids unnecessary GETs)
10935 return Promise.resolve(defaultMetaDoc);
10936 }
10937 return view.db.get(metaDocId).catch(defaultsTo(defaultMetaDoc));
10938 }
10939
10940 function getKeyValueDocs(metaDoc) {
10941 if (!metaDoc.keys.length) {
10942 // no keys, no need for a lookup
10943 return Promise.resolve({rows: []});
10944 }
10945 return view.db.allDocs({
10946 keys: metaDoc.keys,
10947 include_docs: true
10948 });
10949 }
10950
10951 function processKeyValueDocs(metaDoc, kvDocsRes) {
10952 const kvDocs = [];
10953 const oldKeys = new Set();
10954
10955 for (const row of kvDocsRes.rows) {
10956 const doc = row.doc;
10957 if (!doc) { // deleted
10958 continue;
10959 }
10960 kvDocs.push(doc);
10961 oldKeys.add(doc._id);
10962 doc._deleted = !indexableKeysToKeyValues.has(doc._id);
10963 if (!doc._deleted) {
10964 const keyValue = indexableKeysToKeyValues.get(doc._id);
10965 if ('value' in keyValue) {
10966 doc.value = keyValue.value;
10967 }
10968 }
10969 }
10970 const newKeys = mapToKeysArray(indexableKeysToKeyValues);
10971 for (const key of newKeys) {
10972 if (!oldKeys.has(key)) {
10973 // new doc
10974 const kvDoc = {
10975 _id: key
10976 };
10977 const keyValue = indexableKeysToKeyValues.get(key);
10978 if ('value' in keyValue) {
10979 kvDoc.value = keyValue.value;
10980 }
10981 kvDocs.push(kvDoc);
10982 }
10983 }
10984 metaDoc.keys = uniq(newKeys.concat(metaDoc.keys));
10985 kvDocs.push(metaDoc);
10986
10987 return kvDocs;
10988 }
10989
10990 const metaDoc = await getMetaDoc();
10991 const keyValueDocs = await getKeyValueDocs(metaDoc);
10992 return processKeyValueDocs(metaDoc, keyValueDocs);
10993 }
10994
10995 function updatePurgeSeq(view) {
10996 // with this approach, we just assume to have processed all missing purges and write the latest
10997 // purgeSeq into the _local/purgeSeq doc.
10998 return view.sourceDB.get('_local/purges').then(function (res) {
10999 const purgeSeq = res.purgeSeq;
11000 return view.db.get('_local/purgeSeq').then(function (res) {
11001 return res._rev;
11002 })
11003 .catch(defaultsTo(undefined))
11004 .then(function (rev$$1) {
11005 return view.db.put({
11006 _id: '_local/purgeSeq',
11007 _rev: rev$$1,
11008 purgeSeq,
11009 });
11010 });
11011 }).catch(function (err) {
11012 if (err.status !== 404) {
11013 throw err;
11014 }
11015 });
11016 }
11017
11018 // updates all emitted key/value docs and metaDocs in the mrview database
11019 // for the given batch of documents from the source database
11020 function saveKeyValues(view, docIdsToChangesAndEmits, seq) {
11021 var seqDocId = '_local/lastSeq';
11022 return view.db.get(seqDocId)
11023 .catch(defaultsTo({_id: seqDocId, seq: 0}))
11024 .then(function (lastSeqDoc) {
11025 var docIds = mapToKeysArray(docIdsToChangesAndEmits);
11026 return Promise.all(docIds.map(function (docId) {
11027 return getDocsToPersist(docId, view, docIdsToChangesAndEmits);
11028 })).then(function (listOfDocsToPersist) {
11029 var docsToPersist = listOfDocsToPersist.flat();
11030 lastSeqDoc.seq = seq;
11031 docsToPersist.push(lastSeqDoc);
11032 // write all docs in a single operation, update the seq once
11033 return view.db.bulkDocs({docs : docsToPersist});
11034 })
11035 // TODO: this should be placed somewhere else, probably? we're querying both docs twice
11036 // (first time when getting the actual purges).
11037 .then(() => updatePurgeSeq(view));
11038 });
11039 }
11040
11041 function getQueue(view) {
11042 const viewName = typeof view === 'string' ? view : view.name;
11043 let queue = persistentQueues[viewName];
11044 if (!queue) {
11045 queue = persistentQueues[viewName] = new TaskQueue$1();
11046 }
11047 return queue;
11048 }
11049
11050 async function updateView(view, opts) {
11051 return sequentialize(getQueue(view), function () {
11052 return updateViewInQueue(view, opts);
11053 })();
11054 }
11055
11056 async function updateViewInQueue(view, opts) {
11057 // bind the emit function once
11058 let mapResults;
11059 let doc;
11060 let taskId;
11061
11062 function emit(key, value) {
11063 const output = {id: doc._id, key: normalizeKey(key)};
11064 // Don't explicitly store the value unless it's defined and non-null.
11065 // This saves on storage space, because often people don't use it.
11066 if (typeof value !== 'undefined' && value !== null) {
11067 output.value = normalizeKey(value);
11068 }
11069 mapResults.push(output);
11070 }
11071
11072 const mapFun = mapper(view.mapFun, emit);
11073
11074 let currentSeq = view.seq || 0;
11075
11076 function createTask() {
11077 return view.sourceDB.info().then(function (info) {
11078 taskId = view.sourceDB.activeTasks.add({
11079 name: 'view_indexing',
11080 total_items: info.update_seq - currentSeq,
11081 });
11082 });
11083 }
11084
11085 function processChange(docIdsToChangesAndEmits, seq) {
11086 return function () {
11087 return saveKeyValues(view, docIdsToChangesAndEmits, seq);
11088 };
11089 }
11090
11091 let indexed_docs = 0;
11092 const progress = {
11093 view: view.name,
11094 indexed_docs
11095 };
11096 view.sourceDB.emit('indexing', progress);
11097
11098 const queue = new TaskQueue$1();
11099
11100 async function processNextBatch() {
11101 const response = await view.sourceDB.changes({
11102 return_docs: true,
11103 conflicts: true,
11104 include_docs: true,
11105 style: 'all_docs',
11106 since: currentSeq,
11107 limit: opts.changes_batch_size
11108 });
11109 const purges = await getRecentPurges();
11110 return processBatch(response, purges);
11111 }
11112
11113 function getRecentPurges() {
11114 return view.db.get('_local/purgeSeq').then(function (res) {
11115 return res.purgeSeq;
11116 })
11117 .catch(defaultsTo(-1))
11118 .then(function (purgeSeq) {
11119 return view.sourceDB.get('_local/purges').then(function (res) {
11120 const recentPurges = res.purges.filter(function (purge, index) {
11121 return index > purgeSeq;
11122 }).map((purge) => purge.docId);
11123
11124 const uniquePurges = recentPurges.filter(function (docId, index) {
11125 return recentPurges.indexOf(docId) === index;
11126 });
11127
11128 return Promise.all(uniquePurges.map(function (docId) {
11129 return view.sourceDB.get(docId).then(function (doc) {
11130 return { docId, doc };
11131 })
11132 .catch(defaultsTo({ docId }));
11133 }));
11134 })
11135 .catch(defaultsTo([]));
11136 });
11137 }
11138
11139 function processBatch(response, purges) {
11140 const results = response.results;
11141 if (!results.length && !purges.length) {
11142 return;
11143 }
11144
11145 for (const purge of purges) {
11146 const index = results.findIndex(function (change) {
11147 return change.id === purge.docId;
11148 });
11149 if (index < 0) {
11150 // mimic a db.remove() on the changes feed
11151 const entry = {
11152 _id: purge.docId,
11153 doc: {
11154 _id: purge.docId,
11155 _deleted: 1,
11156 },
11157 changes: [],
11158 };
11159
11160 if (purge.doc) {
11161 // update with new winning rev after purge
11162 entry.doc = purge.doc;
11163 entry.changes.push({ rev: purge.doc._rev });
11164 }
11165
11166 results.push(entry);
11167 }
11168 }
11169
11170 const docIdsToChangesAndEmits = createDocIdsToChangesAndEmits(results);
11171
11172 queue.add(processChange(docIdsToChangesAndEmits, currentSeq));
11173
11174 indexed_docs = indexed_docs + results.length;
11175 const progress = {
11176 view: view.name,
11177 last_seq: response.last_seq,
11178 results_count: results.length,
11179 indexed_docs
11180 };
11181 view.sourceDB.emit('indexing', progress);
11182 view.sourceDB.activeTasks.update(taskId, {completed_items: indexed_docs});
11183
11184 if (results.length < opts.changes_batch_size) {
11185 return;
11186 }
11187 return processNextBatch();
11188 }
11189
11190 function createDocIdsToChangesAndEmits(results) {
11191 const docIdsToChangesAndEmits = new Map();
11192 for (const change of results) {
11193 if (change.doc._id[0] !== '_') {
11194 mapResults = [];
11195 doc = change.doc;
11196
11197 if (!doc._deleted) {
11198 tryMap(view.sourceDB, mapFun, doc);
11199 }
11200 mapResults.sort(sortByKeyThenValue);
11201
11202 const indexableKeysToKeyValues = createIndexableKeysToKeyValues(mapResults);
11203 docIdsToChangesAndEmits.set(change.doc._id, [
11204 indexableKeysToKeyValues,
11205 change.changes
11206 ]);
11207 }
11208 currentSeq = change.seq;
11209 }
11210 return docIdsToChangesAndEmits;
11211 }
11212
11213 function createIndexableKeysToKeyValues(mapResults) {
11214 const indexableKeysToKeyValues = new Map();
11215 let lastKey;
11216 for (let i = 0, len = mapResults.length; i < len; i++) {
11217 const emittedKeyValue = mapResults[i];
11218 const complexKey = [emittedKeyValue.key, emittedKeyValue.id];
11219 if (i > 0 && collate(emittedKeyValue.key, lastKey) === 0) {
11220 complexKey.push(i); // dup key+id, so make it unique
11221 }
11222 indexableKeysToKeyValues.set(toIndexableString(complexKey), emittedKeyValue);
11223 lastKey = emittedKeyValue.key;
11224 }
11225 return indexableKeysToKeyValues;
11226 }
11227
11228 try {
11229 await createTask();
11230 await processNextBatch();
11231 await queue.finish();
11232 view.seq = currentSeq;
11233 view.sourceDB.activeTasks.remove(taskId);
11234 } catch (error) {
11235 view.sourceDB.activeTasks.remove(taskId, error);
11236 }
11237 }
11238
11239 function reduceView(view, results, options) {
11240 if (options.group_level === 0) {
11241 delete options.group_level;
11242 }
11243
11244 const shouldGroup = options.group || options.group_level;
11245 const reduceFun = reducer(view.reduceFun);
11246 const groups = [];
11247 const lvl = isNaN(options.group_level)
11248 ? Number.POSITIVE_INFINITY
11249 : options.group_level;
11250
11251 for (const result of results) {
11252 const last = groups[groups.length - 1];
11253 let groupKey = shouldGroup ? result.key : null;
11254
11255 // only set group_level for array keys
11256 if (shouldGroup && Array.isArray(groupKey)) {
11257 groupKey = groupKey.slice(0, lvl);
11258 }
11259
11260 if (last && collate(last.groupKey, groupKey) === 0) {
11261 last.keys.push([result.key, result.id]);
11262 last.values.push(result.value);
11263 continue;
11264 }
11265 groups.push({
11266 keys: [[result.key, result.id]],
11267 values: [result.value],
11268 groupKey
11269 });
11270 }
11271
11272 results = [];
11273 for (const group of groups) {
11274 const reduceTry = tryReduce(view.sourceDB, reduceFun, group.keys, group.values, false);
11275 if (reduceTry.error && reduceTry.error instanceof BuiltInError) {
11276 // CouchDB returns an error if a built-in errors out
11277 throw reduceTry.error;
11278 }
11279 results.push({
11280 // CouchDB just sets the value to null if a non-built-in errors out
11281 value: reduceTry.error ? null : reduceTry.output,
11282 key: group.groupKey
11283 });
11284 }
11285 // no total_rows/offset when reducing
11286 return { rows: sliceResults(results, options.limit, options.skip) };
11287 }
11288
11289 function queryView(view, opts) {
11290 return sequentialize(getQueue(view), function () {
11291 return queryViewInQueue(view, opts);
11292 })();
11293 }
11294
11295 async function queryViewInQueue(view, opts) {
11296 let totalRows;
11297 const shouldReduce = view.reduceFun && opts.reduce !== false;
11298 const skip = opts.skip || 0;
11299 if (typeof opts.keys !== 'undefined' && !opts.keys.length) {
11300 // equivalent query
11301 opts.limit = 0;
11302 delete opts.keys;
11303 }
11304
11305 async function fetchFromView(viewOpts) {
11306 viewOpts.include_docs = true;
11307 const res = await view.db.allDocs(viewOpts);
11308 totalRows = res.total_rows;
11309
11310 return res.rows.map(function (result) {
11311 // implicit migration - in older versions of PouchDB,
11312 // we explicitly stored the doc as {id: ..., key: ..., value: ...}
11313 // this is tested in a migration test
11314 /* istanbul ignore next */
11315 if ('value' in result.doc && typeof result.doc.value === 'object' &&
11316 result.doc.value !== null) {
11317 const keys = Object.keys(result.doc.value).sort();
11318 // this detection method is not perfect, but it's unlikely the user
11319 // emitted a value which was an object with these 3 exact keys
11320 const expectedKeys = ['id', 'key', 'value'];
11321 if (!(keys < expectedKeys || keys > expectedKeys)) {
11322 return result.doc.value;
11323 }
11324 }
11325
11326 const parsedKeyAndDocId = parseIndexableString(result.doc._id);
11327 return {
11328 key: parsedKeyAndDocId[0],
11329 id: parsedKeyAndDocId[1],
11330 value: ('value' in result.doc ? result.doc.value : null)
11331 };
11332 });
11333 }
11334
11335 async function onMapResultsReady(rows) {
11336 let finalResults;
11337 if (shouldReduce) {
11338 finalResults = reduceView(view, rows, opts);
11339 } else if (typeof opts.keys === 'undefined') {
11340 finalResults = {
11341 total_rows: totalRows,
11342 offset: skip,
11343 rows
11344 };
11345 } else {
11346 // support limit, skip for keys query
11347 finalResults = {
11348 total_rows: totalRows,
11349 offset: skip,
11350 rows: sliceResults(rows,opts.limit,opts.skip)
11351 };
11352 }
11353 /* istanbul ignore if */
11354 if (opts.update_seq) {
11355 finalResults.update_seq = view.seq;
11356 }
11357 if (opts.include_docs) {
11358 const docIds = uniq(rows.map(rowToDocId));
11359
11360 const allDocsRes = await view.sourceDB.allDocs({
11361 keys: docIds,
11362 include_docs: true,
11363 conflicts: opts.conflicts,
11364 attachments: opts.attachments,
11365 binary: opts.binary
11366 });
11367 const docIdsToDocs = new Map();
11368 for (const row of allDocsRes.rows) {
11369 docIdsToDocs.set(row.id, row.doc);
11370 }
11371 for (const row of rows) {
11372 const docId = rowToDocId(row);
11373 const doc = docIdsToDocs.get(docId);
11374 if (doc) {
11375 row.doc = doc;
11376 }
11377 }
11378 }
11379 return finalResults;
11380 }
11381
11382 if (typeof opts.keys !== 'undefined') {
11383 const keys = opts.keys;
11384 const fetchPromises = keys.map(function (key) {
11385 const viewOpts = {
11386 startkey : toIndexableString([key]),
11387 endkey : toIndexableString([key, {}])
11388 };
11389 /* istanbul ignore if */
11390 if (opts.update_seq) {
11391 viewOpts.update_seq = true;
11392 }
11393 return fetchFromView(viewOpts);
11394 });
11395 const result = await Promise.all(fetchPromises);
11396 const flattenedResult = result.flat();
11397 return onMapResultsReady(flattenedResult);
11398 } else { // normal query, no 'keys'
11399 const viewOpts = {
11400 descending : opts.descending
11401 };
11402 /* istanbul ignore if */
11403 if (opts.update_seq) {
11404 viewOpts.update_seq = true;
11405 }
11406 let startkey;
11407 let endkey;
11408 if ('start_key' in opts) {
11409 startkey = opts.start_key;
11410 }
11411 if ('startkey' in opts) {
11412 startkey = opts.startkey;
11413 }
11414 if ('end_key' in opts) {
11415 endkey = opts.end_key;
11416 }
11417 if ('endkey' in opts) {
11418 endkey = opts.endkey;
11419 }
11420 if (typeof startkey !== 'undefined') {
11421 viewOpts.startkey = opts.descending ?
11422 toIndexableString([startkey, {}]) :
11423 toIndexableString([startkey]);
11424 }
11425 if (typeof endkey !== 'undefined') {
11426 let inclusiveEnd = opts.inclusive_end !== false;
11427 if (opts.descending) {
11428 inclusiveEnd = !inclusiveEnd;
11429 }
11430
11431 viewOpts.endkey = toIndexableString(
11432 inclusiveEnd ? [endkey, {}] : [endkey]);
11433 }
11434 if (typeof opts.key !== 'undefined') {
11435 const keyStart = toIndexableString([opts.key]);
11436 const keyEnd = toIndexableString([opts.key, {}]);
11437 if (viewOpts.descending) {
11438 viewOpts.endkey = keyStart;
11439 viewOpts.startkey = keyEnd;
11440 } else {
11441 viewOpts.startkey = keyStart;
11442 viewOpts.endkey = keyEnd;
11443 }
11444 }
11445 if (!shouldReduce) {
11446 if (typeof opts.limit === 'number') {
11447 viewOpts.limit = opts.limit;
11448 }
11449 viewOpts.skip = skip;
11450 }
11451
11452 const result = await fetchFromView(viewOpts);
11453 return onMapResultsReady(result);
11454 }
11455 }
11456
11457 async function httpViewCleanup(db) {
11458 const response = await db.fetch('_view_cleanup', {
11459 headers: new h({'Content-Type': 'application/json'}),
11460 method: 'POST'
11461 });
11462 return response.json();
11463 }
11464
11465 async function localViewCleanup(db) {
11466 try {
11467 const metaDoc = await db.get('_local/' + localDocName);
11468 const docsToViews = new Map();
11469
11470 for (const fullViewName of Object.keys(metaDoc.views)) {
11471 const parts = parseViewName(fullViewName);
11472 const designDocName = '_design/' + parts[0];
11473 const viewName = parts[1];
11474 let views = docsToViews.get(designDocName);
11475 if (!views) {
11476 views = new Set();
11477 docsToViews.set(designDocName, views);
11478 }
11479 views.add(viewName);
11480 }
11481 const opts = {
11482 keys : mapToKeysArray(docsToViews),
11483 include_docs : true
11484 };
11485
11486 const res = await db.allDocs(opts);
11487 const viewsToStatus = {};
11488 for (const row of res.rows) {
11489 const ddocName = row.key.substring(8); // cuts off '_design/'
11490 for (const viewName of docsToViews.get(row.key)) {
11491 let fullViewName = ddocName + '/' + viewName;
11492 /* istanbul ignore if */
11493 if (!metaDoc.views[fullViewName]) {
11494 // new format, without slashes, to support PouchDB 2.2.0
11495 // migration test in pouchdb's browser.migration.js verifies this
11496 fullViewName = viewName;
11497 }
11498 const viewDBNames = Object.keys(metaDoc.views[fullViewName]);
11499 // design doc deleted, or view function nonexistent
11500 const statusIsGood = row.doc && row.doc.views &&
11501 row.doc.views[viewName];
11502 for (const viewDBName of viewDBNames) {
11503 viewsToStatus[viewDBName] = viewsToStatus[viewDBName] || statusIsGood;
11504 }
11505 }
11506 }
11507
11508 const dbsToDelete = Object.keys(viewsToStatus)
11509 .filter(function (viewDBName) { return !viewsToStatus[viewDBName]; });
11510
11511 const destroyPromises = dbsToDelete.map(function (viewDBName) {
11512 return sequentialize(getQueue(viewDBName), function () {
11513 return new db.constructor(viewDBName, db.__opts).destroy();
11514 })();
11515 });
11516
11517 return Promise.all(destroyPromises).then(function () {
11518 return {ok: true};
11519 });
11520 } catch (err) {
11521 if (err.status === 404) {
11522 return {ok: true};
11523 } else {
11524 throw err;
11525 }
11526 }
11527 }
11528
11529 async function queryPromised(db, fun, opts) {
11530 /* istanbul ignore next */
11531 if (typeof db._query === 'function') {
11532 return customQuery(db, fun, opts);
11533 }
11534 if (isRemote(db)) {
11535 return httpQuery(db, fun, opts);
11536 }
11537
11538 const updateViewOpts = {
11539 changes_batch_size: db.__opts.view_update_changes_batch_size || CHANGES_BATCH_SIZE$1
11540 };
11541
11542 if (typeof fun !== 'string') {
11543 // temp_view
11544 checkQueryParseError(opts, fun);
11545
11546 tempViewQueue.add(async function () {
11547 const view = await createView(
11548 /* sourceDB */ db,
11549 /* viewName */ 'temp_view/temp_view',
11550 /* mapFun */ fun.map,
11551 /* reduceFun */ fun.reduce,
11552 /* temporary */ true,
11553 /* localDocName */ localDocName);
11554
11555 return fin(updateView(view, updateViewOpts).then(
11556 function () { return queryView(view, opts); }),
11557 function () { return view.db.destroy(); }
11558 );
11559 });
11560 return tempViewQueue.finish();
11561 } else {
11562 // persistent view
11563 const fullViewName = fun;
11564 const parts = parseViewName(fullViewName);
11565 const designDocName = parts[0];
11566 const viewName = parts[1];
11567
11568 const doc = await db.get('_design/' + designDocName);
11569 fun = doc.views && doc.views[viewName];
11570
11571 if (!fun) {
11572 // basic validator; it's assumed that every subclass would want this
11573 throw new NotFoundError(`ddoc ${doc._id} has no view named ${viewName}`);
11574 }
11575
11576 ddocValidator(doc, viewName);
11577 checkQueryParseError(opts, fun);
11578
11579 const view = await createView(
11580 /* sourceDB */ db,
11581 /* viewName */ fullViewName,
11582 /* mapFun */ fun.map,
11583 /* reduceFun */ fun.reduce,
11584 /* temporary */ false,
11585 /* localDocName */ localDocName);
11586
11587 if (opts.stale === 'ok' || opts.stale === 'update_after') {
11588 if (opts.stale === 'update_after') {
11589 nextTick(function () {
11590 updateView(view, updateViewOpts);
11591 });
11592 }
11593 return queryView(view, opts);
11594 } else { // stale not ok
11595 await updateView(view, updateViewOpts);
11596 return queryView(view, opts);
11597 }
11598 }
11599 }
11600
11601 function abstractQuery(fun, opts, callback) {
11602 const db = this;
11603 if (typeof opts === 'function') {
11604 callback = opts;
11605 opts = {};
11606 }
11607 opts = opts ? coerceOptions(opts) : {};
11608
11609 if (typeof fun === 'function') {
11610 fun = {map : fun};
11611 }
11612
11613 const promise = Promise.resolve().then(function () {
11614 return queryPromised(db, fun, opts);
11615 });
11616 promisedCallback(promise, callback);
11617 return promise;
11618 }
11619
11620 const abstractViewCleanup = callbackify(function () {
11621 const db = this;
11622 /* istanbul ignore next */
11623 if (typeof db._viewCleanup === 'function') {
11624 return customViewCleanup(db);
11625 }
11626 if (isRemote(db)) {
11627 return httpViewCleanup(db);
11628 }
11629 return localViewCleanup(db);
11630 });
11631
11632 return {
11633 query: abstractQuery,
11634 viewCleanup: abstractViewCleanup
11635 };
11636}
11637
11638var builtInReduce = {
11639 _sum: function (keys, values) {
11640 return sum(values);
11641 },
11642
11643 _count: function (keys, values) {
11644 return values.length;
11645 },
11646
11647 _stats: function (keys, values) {
11648 // no need to implement rereduce=true, because Pouch
11649 // will never call it
11650 function sumsqr(values) {
11651 var _sumsqr = 0;
11652 for (var i = 0, len = values.length; i < len; i++) {
11653 var num = values[i];
11654 _sumsqr += (num * num);
11655 }
11656 return _sumsqr;
11657 }
11658 return {
11659 sum : sum(values),
11660 min : Math.min.apply(null, values),
11661 max : Math.max.apply(null, values),
11662 count : values.length,
11663 sumsqr : sumsqr(values)
11664 };
11665 }
11666};
11667
11668function getBuiltIn(reduceFunString) {
11669 if (/^_sum/.test(reduceFunString)) {
11670 return builtInReduce._sum;
11671 } else if (/^_count/.test(reduceFunString)) {
11672 return builtInReduce._count;
11673 } else if (/^_stats/.test(reduceFunString)) {
11674 return builtInReduce._stats;
11675 } else if (/^_/.test(reduceFunString)) {
11676 throw new Error(reduceFunString + ' is not a supported reduce function.');
11677 }
11678}
11679
11680function mapper(mapFun, emit) {
11681 // for temp_views one can use emit(doc, emit), see #38
11682 if (typeof mapFun === "function" && mapFun.length === 2) {
11683 var origMap = mapFun;
11684 return function (doc) {
11685 return origMap(doc, emit);
11686 };
11687 } else {
11688 return evalFunctionWithEval(mapFun.toString(), emit);
11689 }
11690}
11691
11692function reducer(reduceFun) {
11693 var reduceFunString = reduceFun.toString();
11694 var builtIn = getBuiltIn(reduceFunString);
11695 if (builtIn) {
11696 return builtIn;
11697 } else {
11698 return evalFunctionWithEval(reduceFunString);
11699 }
11700}
11701
11702function ddocValidator(ddoc, viewName) {
11703 var fun = ddoc.views && ddoc.views[viewName];
11704 if (typeof fun.map !== 'string') {
11705 throw new NotFoundError('ddoc ' + ddoc._id + ' has no string view named ' +
11706 viewName + ', instead found object of type: ' + typeof fun.map);
11707 }
11708}
11709
11710var localDocName = 'mrviews';
11711var abstract = createAbstractMapReduce(localDocName, mapper, reducer, ddocValidator);
11712
11713function query(fun, opts, callback) {
11714 return abstract.query.call(this, fun, opts, callback);
11715}
11716
11717function viewCleanup(callback) {
11718 return abstract.viewCleanup.call(this, callback);
11719}
11720
11721var mapreduce = {
11722 query,
11723 viewCleanup
11724};
11725
11726function fileHasChanged(localDoc, remoteDoc, filename) {
11727 return !localDoc._attachments ||
11728 !localDoc._attachments[filename] ||
11729 localDoc._attachments[filename].digest !== remoteDoc._attachments[filename].digest;
11730}
11731
11732function getDocAttachments(db, doc) {
11733 var filenames = Object.keys(doc._attachments);
11734 return Promise.all(filenames.map(function (filename) {
11735 return db.getAttachment(doc._id, filename, {rev: doc._rev});
11736 }));
11737}
11738
11739function getDocAttachmentsFromTargetOrSource(target, src, doc) {
11740 var doCheckForLocalAttachments = isRemote(src) && !isRemote(target);
11741 var filenames = Object.keys(doc._attachments);
11742
11743 if (!doCheckForLocalAttachments) {
11744 return getDocAttachments(src, doc);
11745 }
11746
11747 return target.get(doc._id).then(function (localDoc) {
11748 return Promise.all(filenames.map(function (filename) {
11749 if (fileHasChanged(localDoc, doc, filename)) {
11750 return src.getAttachment(doc._id, filename);
11751 }
11752
11753 return target.getAttachment(localDoc._id, filename);
11754 }));
11755 }).catch(function (error) {
11756 /* istanbul ignore if */
11757 if (error.status !== 404) {
11758 throw error;
11759 }
11760
11761 return getDocAttachments(src, doc);
11762 });
11763}
11764
11765function createBulkGetOpts(diffs) {
11766 var requests = [];
11767 Object.keys(diffs).forEach(function (id) {
11768 var missingRevs = diffs[id].missing;
11769 missingRevs.forEach(function (missingRev) {
11770 requests.push({
11771 id,
11772 rev: missingRev
11773 });
11774 });
11775 });
11776
11777 return {
11778 docs: requests,
11779 revs: true,
11780 latest: true
11781 };
11782}
11783
11784//
11785// Fetch all the documents from the src as described in the "diffs",
11786// which is a mapping of docs IDs to revisions. If the state ever
11787// changes to "cancelled", then the returned promise will be rejected.
11788// Else it will be resolved with a list of fetched documents.
11789//
11790function getDocs(src, target, diffs, state) {
11791 diffs = clone(diffs); // we do not need to modify this
11792
11793 var resultDocs = [],
11794 ok = true;
11795
11796 function getAllDocs() {
11797
11798 var bulkGetOpts = createBulkGetOpts(diffs);
11799
11800 if (!bulkGetOpts.docs.length) { // optimization: skip empty requests
11801 return;
11802 }
11803
11804 return src.bulkGet(bulkGetOpts).then(function (bulkGetResponse) {
11805 /* istanbul ignore if */
11806 if (state.cancelled) {
11807 throw new Error('cancelled');
11808 }
11809 return Promise.all(bulkGetResponse.results.map(function (bulkGetInfo) {
11810 return Promise.all(bulkGetInfo.docs.map(function (doc) {
11811 var remoteDoc = doc.ok;
11812
11813 if (doc.error) {
11814 // when AUTO_COMPACTION is set, docs can be returned which look
11815 // like this: {"missing":"1-7c3ac256b693c462af8442f992b83696"}
11816 ok = false;
11817 }
11818
11819 if (!remoteDoc || !remoteDoc._attachments) {
11820 return remoteDoc;
11821 }
11822
11823 return getDocAttachmentsFromTargetOrSource(target, src, remoteDoc).then((attachments) => {
11824 var filenames = Object.keys(remoteDoc._attachments);
11825 attachments.forEach(function (attachment, i) {
11826 var att = remoteDoc._attachments[filenames[i]];
11827 delete att.stub;
11828 delete att.length;
11829 att.data = attachment;
11830 });
11831
11832 return remoteDoc;
11833 });
11834 }));
11835 }))
11836
11837 .then(function (results) {
11838 resultDocs = resultDocs.concat(results.flat().filter(Boolean));
11839 });
11840 });
11841 }
11842
11843 function returnResult() {
11844 return { ok, docs:resultDocs };
11845 }
11846
11847 return Promise.resolve()
11848 .then(getAllDocs)
11849 .then(returnResult);
11850}
11851
11852var CHECKPOINT_VERSION = 1;
11853var REPLICATOR = "pouchdb";
11854// This is an arbitrary number to limit the
11855// amount of replication history we save in the checkpoint.
11856// If we save too much, the checkpoint docs will become very big,
11857// if we save fewer, we'll run a greater risk of having to
11858// read all the changes from 0 when checkpoint PUTs fail
11859// CouchDB 2.0 has a more involved history pruning,
11860// but let's go for the simple version for now.
11861var CHECKPOINT_HISTORY_SIZE = 5;
11862var LOWEST_SEQ = 0;
11863
11864function updateCheckpoint(db, id, checkpoint, session, returnValue) {
11865 return db.get(id).catch(function (err) {
11866 if (err.status === 404) {
11867 if (db.adapter === 'http' || db.adapter === 'https') {
11868 explainError(
11869 404, 'PouchDB is just checking if a remote checkpoint exists.'
11870 );
11871 }
11872 return {
11873 session_id: session,
11874 _id: id,
11875 history: [],
11876 replicator: REPLICATOR,
11877 version: CHECKPOINT_VERSION
11878 };
11879 }
11880 throw err;
11881 }).then(function (doc) {
11882 if (returnValue.cancelled) {
11883 return;
11884 }
11885
11886 // if the checkpoint has not changed, do not update
11887 if (doc.last_seq === checkpoint) {
11888 return;
11889 }
11890
11891 // Filter out current entry for this replication
11892 doc.history = (doc.history || []).filter(function (item) {
11893 return item.session_id !== session;
11894 });
11895
11896 // Add the latest checkpoint to history
11897 doc.history.unshift({
11898 last_seq: checkpoint,
11899 session_id: session
11900 });
11901
11902 // Just take the last pieces in history, to
11903 // avoid really big checkpoint docs.
11904 // see comment on history size above
11905 doc.history = doc.history.slice(0, CHECKPOINT_HISTORY_SIZE);
11906
11907 doc.version = CHECKPOINT_VERSION;
11908 doc.replicator = REPLICATOR;
11909
11910 doc.session_id = session;
11911 doc.last_seq = checkpoint;
11912
11913 return db.put(doc).catch(function (err) {
11914 if (err.status === 409) {
11915 // retry; someone is trying to write a checkpoint simultaneously
11916 return updateCheckpoint(db, id, checkpoint, session, returnValue);
11917 }
11918 throw err;
11919 });
11920 });
11921}
11922
11923class CheckpointerInternal {
11924 constructor(src, target, id, returnValue, opts = {
11925 writeSourceCheckpoint: true,
11926 writeTargetCheckpoint: true,
11927 }) {
11928 this.src = src;
11929 this.target = target;
11930 this.id = id;
11931 this.returnValue = returnValue;
11932 this.opts = opts;
11933
11934 if (typeof opts.writeSourceCheckpoint === "undefined") {
11935 opts.writeSourceCheckpoint = true;
11936 }
11937
11938 if (typeof opts.writeTargetCheckpoint === "undefined") {
11939 opts.writeTargetCheckpoint = true;
11940 }
11941 }
11942
11943 writeCheckpoint(checkpoint, session) {
11944 var self = this;
11945 return this.updateTarget(checkpoint, session).then(function () {
11946 return self.updateSource(checkpoint, session);
11947 });
11948 }
11949
11950 updateTarget(checkpoint, session) {
11951 if (this.opts.writeTargetCheckpoint) {
11952 return updateCheckpoint(this.target, this.id, checkpoint,
11953 session, this.returnValue);
11954 } else {
11955 return Promise.resolve(true);
11956 }
11957 }
11958
11959 updateSource(checkpoint, session) {
11960 if (this.opts.writeSourceCheckpoint) {
11961 var self = this;
11962 return updateCheckpoint(this.src, this.id, checkpoint,
11963 session, this.returnValue)
11964 .catch(function (err) {
11965 if (isForbiddenError(err)) {
11966 self.opts.writeSourceCheckpoint = false;
11967 return true;
11968 }
11969 throw err;
11970 });
11971 } else {
11972 return Promise.resolve(true);
11973 }
11974 }
11975
11976 getCheckpoint() {
11977 var self = this;
11978
11979 if (!self.opts.writeSourceCheckpoint && !self.opts.writeTargetCheckpoint) {
11980 return Promise.resolve(LOWEST_SEQ);
11981 }
11982
11983 if (self.opts && self.opts.writeSourceCheckpoint && !self.opts.writeTargetCheckpoint) {
11984 return self.src.get(self.id).then(function (sourceDoc) {
11985 return sourceDoc.last_seq || LOWEST_SEQ;
11986 }).catch(function (err) {
11987 /* istanbul ignore if */
11988 if (err.status !== 404) {
11989 throw err;
11990 }
11991 return LOWEST_SEQ;
11992 });
11993 }
11994
11995 return self.target.get(self.id).then(function (targetDoc) {
11996 if (self.opts && self.opts.writeTargetCheckpoint && !self.opts.writeSourceCheckpoint) {
11997 return targetDoc.last_seq || LOWEST_SEQ;
11998 }
11999
12000 return self.src.get(self.id).then(function (sourceDoc) {
12001 // Since we can't migrate an old version doc to a new one
12002 // (no session id), we just go with the lowest seq in this case
12003 /* istanbul ignore if */
12004 if (targetDoc.version !== sourceDoc.version) {
12005 return LOWEST_SEQ;
12006 }
12007
12008 var version;
12009 if (targetDoc.version) {
12010 version = targetDoc.version.toString();
12011 } else {
12012 version = "undefined";
12013 }
12014
12015 if (version in comparisons) {
12016 return comparisons[version](targetDoc, sourceDoc);
12017 }
12018 /* istanbul ignore next */
12019 return LOWEST_SEQ;
12020 }, function (err) {
12021 if (err.status === 404 && targetDoc.last_seq) {
12022 return self.src.put({
12023 _id: self.id,
12024 last_seq: LOWEST_SEQ
12025 }).then(function () {
12026 return LOWEST_SEQ;
12027 }, function (err) {
12028 if (isForbiddenError(err)) {
12029 self.opts.writeSourceCheckpoint = false;
12030 return targetDoc.last_seq;
12031 }
12032 /* istanbul ignore next */
12033 return LOWEST_SEQ;
12034 });
12035 }
12036 throw err;
12037 });
12038 }).catch(function (err) {
12039 if (err.status !== 404) {
12040 throw err;
12041 }
12042 return LOWEST_SEQ;
12043 });
12044 }
12045}
12046
12047var comparisons = {
12048 "undefined": function (targetDoc, sourceDoc) {
12049 // This is the previous comparison function
12050 if (collate(targetDoc.last_seq, sourceDoc.last_seq) === 0) {
12051 return sourceDoc.last_seq;
12052 }
12053 /* istanbul ignore next */
12054 return 0;
12055 },
12056 "1": function (targetDoc, sourceDoc) {
12057 // This is the comparison function ported from CouchDB
12058 return compareReplicationLogs(sourceDoc, targetDoc).last_seq;
12059 }
12060};
12061
12062// This checkpoint comparison is ported from CouchDBs source
12063// they come from here:
12064// https://github.com/apache/couchdb-couch-replicator/blob/master/src/couch_replicator.erl#L863-L906
12065
12066function compareReplicationLogs(srcDoc, tgtDoc) {
12067 if (srcDoc.session_id === tgtDoc.session_id) {
12068 return {
12069 last_seq: srcDoc.last_seq,
12070 history: srcDoc.history
12071 };
12072 }
12073
12074 return compareReplicationHistory(srcDoc.history, tgtDoc.history);
12075}
12076
12077function compareReplicationHistory(sourceHistory, targetHistory) {
12078 // the erlang loop via function arguments is not so easy to repeat in JS
12079 // therefore, doing this as recursion
12080 var S = sourceHistory[0];
12081 var sourceRest = sourceHistory.slice(1);
12082 var T = targetHistory[0];
12083 var targetRest = targetHistory.slice(1);
12084
12085 if (!S || targetHistory.length === 0) {
12086 return {
12087 last_seq: LOWEST_SEQ,
12088 history: []
12089 };
12090 }
12091
12092 var sourceId = S.session_id;
12093 /* istanbul ignore if */
12094 if (hasSessionId(sourceId, targetHistory)) {
12095 return {
12096 last_seq: S.last_seq,
12097 history: sourceHistory
12098 };
12099 }
12100
12101 var targetId = T.session_id;
12102 if (hasSessionId(targetId, sourceRest)) {
12103 return {
12104 last_seq: T.last_seq,
12105 history: targetRest
12106 };
12107 }
12108
12109 return compareReplicationHistory(sourceRest, targetRest);
12110}
12111
12112function hasSessionId(sessionId, history) {
12113 var props = history[0];
12114 var rest = history.slice(1);
12115
12116 if (!sessionId || history.length === 0) {
12117 return false;
12118 }
12119
12120 if (sessionId === props.session_id) {
12121 return true;
12122 }
12123
12124 return hasSessionId(sessionId, rest);
12125}
12126
12127function isForbiddenError(err) {
12128 return typeof err.status === 'number' && Math.floor(err.status / 100) === 4;
12129}
12130
12131function Checkpointer(src, target, id, returnValue, opts) {
12132 if (!(this instanceof CheckpointerInternal)) {
12133 return new CheckpointerInternal(src, target, id, returnValue, opts);
12134 }
12135 return Checkpointer;
12136}
12137
12138var STARTING_BACK_OFF = 0;
12139
12140function backOff(opts, returnValue, error, callback) {
12141 if (opts.retry === false) {
12142 returnValue.emit('error', error);
12143 returnValue.removeAllListeners();
12144 return;
12145 }
12146 /* istanbul ignore if */
12147 if (typeof opts.back_off_function !== 'function') {
12148 opts.back_off_function = defaultBackOff;
12149 }
12150 returnValue.emit('requestError', error);
12151 if (returnValue.state === 'active' || returnValue.state === 'pending') {
12152 returnValue.emit('paused', error);
12153 returnValue.state = 'stopped';
12154 var backOffSet = function backoffTimeSet() {
12155 opts.current_back_off = STARTING_BACK_OFF;
12156 };
12157 var removeBackOffSetter = function removeBackOffTimeSet() {
12158 returnValue.removeListener('active', backOffSet);
12159 };
12160 returnValue.once('paused', removeBackOffSetter);
12161 returnValue.once('active', backOffSet);
12162 }
12163
12164 opts.current_back_off = opts.current_back_off || STARTING_BACK_OFF;
12165 opts.current_back_off = opts.back_off_function(opts.current_back_off);
12166 setTimeout(callback, opts.current_back_off);
12167}
12168
12169function sortObjectPropertiesByKey(queryParams) {
12170 return Object.keys(queryParams).sort(collate).reduce(function (result, key) {
12171 result[key] = queryParams[key];
12172 return result;
12173 }, {});
12174}
12175
12176// Generate a unique id particular to this replication.
12177// Not guaranteed to align perfectly with CouchDB's rep ids.
12178function generateReplicationId(src, target, opts) {
12179 var docIds = opts.doc_ids ? opts.doc_ids.sort(collate) : '';
12180 var filterFun = opts.filter ? opts.filter.toString() : '';
12181 var queryParams = '';
12182 var filterViewName = '';
12183 var selector = '';
12184
12185 // possibility for checkpoints to be lost here as behaviour of
12186 // JSON.stringify is not stable (see #6226)
12187 /* istanbul ignore if */
12188 if (opts.selector) {
12189 selector = JSON.stringify(opts.selector);
12190 }
12191
12192 if (opts.filter && opts.query_params) {
12193 queryParams = JSON.stringify(sortObjectPropertiesByKey(opts.query_params));
12194 }
12195
12196 if (opts.filter && opts.filter === '_view') {
12197 filterViewName = opts.view.toString();
12198 }
12199
12200 return Promise.all([src.id(), target.id()]).then(function (res) {
12201 var queryData = res[0] + res[1] + filterFun + filterViewName +
12202 queryParams + docIds + selector;
12203 return new Promise(function (resolve) {
12204 binaryMd5(queryData, resolve);
12205 });
12206 }).then(function (md5sum) {
12207 // can't use straight-up md5 alphabet, because
12208 // the char '/' is interpreted as being for attachments,
12209 // and + is also not url-safe
12210 md5sum = md5sum.replace(/\//g, '.').replace(/\+/g, '_');
12211 return '_local/' + md5sum;
12212 });
12213}
12214
12215function replicate(src, target, opts, returnValue, result) {
12216 var batches = []; // list of batches to be processed
12217 var currentBatch; // the batch currently being processed
12218 var pendingBatch = {
12219 seq: 0,
12220 changes: [],
12221 docs: []
12222 }; // next batch, not yet ready to be processed
12223 var writingCheckpoint = false; // true while checkpoint is being written
12224 var changesCompleted = false; // true when all changes received
12225 var replicationCompleted = false; // true when replication has completed
12226 // initial_last_seq is the state of the source db before
12227 // replication started, and it is _not_ updated during
12228 // replication or used anywhere else, as opposed to last_seq
12229 var initial_last_seq = 0;
12230 var last_seq = 0;
12231 var continuous = opts.continuous || opts.live || false;
12232 var batch_size = opts.batch_size || 100;
12233 var batches_limit = opts.batches_limit || 10;
12234 var style = opts.style || 'all_docs';
12235 var changesPending = false; // true while src.changes is running
12236 var doc_ids = opts.doc_ids;
12237 var selector = opts.selector;
12238 var repId;
12239 var checkpointer;
12240 var changedDocs = [];
12241 // Like couchdb, every replication gets a unique session id
12242 var session = uuid$1();
12243 var taskId;
12244
12245 result = result || {
12246 ok: true,
12247 start_time: new Date().toISOString(),
12248 docs_read: 0,
12249 docs_written: 0,
12250 doc_write_failures: 0,
12251 errors: []
12252 };
12253
12254 var changesOpts = {};
12255 returnValue.ready(src, target);
12256
12257 function initCheckpointer() {
12258 if (checkpointer) {
12259 return Promise.resolve();
12260 }
12261 return generateReplicationId(src, target, opts).then(function (res) {
12262 repId = res;
12263
12264 var checkpointOpts = {};
12265 if (opts.checkpoint === false) {
12266 checkpointOpts = { writeSourceCheckpoint: false, writeTargetCheckpoint: false };
12267 } else if (opts.checkpoint === 'source') {
12268 checkpointOpts = { writeSourceCheckpoint: true, writeTargetCheckpoint: false };
12269 } else if (opts.checkpoint === 'target') {
12270 checkpointOpts = { writeSourceCheckpoint: false, writeTargetCheckpoint: true };
12271 } else {
12272 checkpointOpts = { writeSourceCheckpoint: true, writeTargetCheckpoint: true };
12273 }
12274
12275 checkpointer = new Checkpointer(src, target, repId, returnValue, checkpointOpts);
12276 });
12277 }
12278
12279 function writeDocs() {
12280 changedDocs = [];
12281
12282 if (currentBatch.docs.length === 0) {
12283 return;
12284 }
12285 var docs = currentBatch.docs;
12286 var bulkOpts = {timeout: opts.timeout};
12287 return target.bulkDocs({docs, new_edits: false}, bulkOpts).then(function (res) {
12288 /* istanbul ignore if */
12289 if (returnValue.cancelled) {
12290 completeReplication();
12291 throw new Error('cancelled');
12292 }
12293
12294 // `res` doesn't include full documents (which live in `docs`), so we create a map of
12295 // (id -> error), and check for errors while iterating over `docs`
12296 var errorsById = Object.create(null);
12297 res.forEach(function (res) {
12298 if (res.error) {
12299 errorsById[res.id] = res;
12300 }
12301 });
12302
12303 var errorsNo = Object.keys(errorsById).length;
12304 result.doc_write_failures += errorsNo;
12305 result.docs_written += docs.length - errorsNo;
12306
12307 docs.forEach(function (doc) {
12308 var error = errorsById[doc._id];
12309 if (error) {
12310 result.errors.push(error);
12311 // Normalize error name. i.e. 'Unauthorized' -> 'unauthorized' (eg Sync Gateway)
12312 var errorName = (error.name || '').toLowerCase();
12313 if (errorName === 'unauthorized' || errorName === 'forbidden') {
12314 returnValue.emit('denied', clone(error));
12315 } else {
12316 throw error;
12317 }
12318 } else {
12319 changedDocs.push(doc);
12320 }
12321 });
12322
12323 }, function (err) {
12324 result.doc_write_failures += docs.length;
12325 throw err;
12326 });
12327 }
12328
12329 function finishBatch() {
12330 if (currentBatch.error) {
12331 throw new Error('There was a problem getting docs.');
12332 }
12333 result.last_seq = last_seq = currentBatch.seq;
12334 var outResult = clone(result);
12335 if (changedDocs.length) {
12336 outResult.docs = changedDocs;
12337 // Attach 'pending' property if server supports it (CouchDB 2.0+)
12338 /* istanbul ignore if */
12339 if (typeof currentBatch.pending === 'number') {
12340 outResult.pending = currentBatch.pending;
12341 delete currentBatch.pending;
12342 }
12343 returnValue.emit('change', outResult);
12344 }
12345 writingCheckpoint = true;
12346
12347 src.info().then(function (info) {
12348 var task = src.activeTasks.get(taskId);
12349 if (!currentBatch || !task) {
12350 return;
12351 }
12352
12353 var completed = task.completed_items || 0;
12354 var total_items = parseInt(info.update_seq, 10) - parseInt(initial_last_seq, 10);
12355 src.activeTasks.update(taskId, {
12356 completed_items: completed + currentBatch.changes.length,
12357 total_items
12358 });
12359 });
12360
12361 return checkpointer.writeCheckpoint(currentBatch.seq,
12362 session).then(function () {
12363 returnValue.emit('checkpoint', { 'checkpoint': currentBatch.seq });
12364 writingCheckpoint = false;
12365 /* istanbul ignore if */
12366 if (returnValue.cancelled) {
12367 completeReplication();
12368 throw new Error('cancelled');
12369 }
12370 currentBatch = undefined;
12371 getChanges();
12372 }).catch(function (err) {
12373 onCheckpointError(err);
12374 throw err;
12375 });
12376 }
12377
12378 function getDiffs() {
12379 var diff = {};
12380 currentBatch.changes.forEach(function (change) {
12381 returnValue.emit('checkpoint', { 'revs_diff': change });
12382 // Couchbase Sync Gateway emits these, but we can ignore them
12383 /* istanbul ignore if */
12384 if (change.id === "_user/") {
12385 return;
12386 }
12387 diff[change.id] = change.changes.map(function (x) {
12388 return x.rev;
12389 });
12390 });
12391 return target.revsDiff(diff).then(function (diffs) {
12392 /* istanbul ignore if */
12393 if (returnValue.cancelled) {
12394 completeReplication();
12395 throw new Error('cancelled');
12396 }
12397 // currentBatch.diffs elements are deleted as the documents are written
12398 currentBatch.diffs = diffs;
12399 });
12400 }
12401
12402 function getBatchDocs() {
12403 return getDocs(src, target, currentBatch.diffs, returnValue).then(function (got) {
12404 currentBatch.error = !got.ok;
12405 got.docs.forEach(function (doc) {
12406 delete currentBatch.diffs[doc._id];
12407 result.docs_read++;
12408 currentBatch.docs.push(doc);
12409 });
12410 });
12411 }
12412
12413 function startNextBatch() {
12414 if (returnValue.cancelled || currentBatch) {
12415 return;
12416 }
12417 if (batches.length === 0) {
12418 processPendingBatch(true);
12419 return;
12420 }
12421 currentBatch = batches.shift();
12422 returnValue.emit('checkpoint', { 'start_next_batch': currentBatch.seq });
12423 getDiffs()
12424 .then(getBatchDocs)
12425 .then(writeDocs)
12426 .then(finishBatch)
12427 .then(startNextBatch)
12428 .catch(function (err) {
12429 abortReplication('batch processing terminated with error', err);
12430 });
12431 }
12432
12433
12434 function processPendingBatch(immediate) {
12435 if (pendingBatch.changes.length === 0) {
12436 if (batches.length === 0 && !currentBatch) {
12437 if ((continuous && changesOpts.live) || changesCompleted) {
12438 returnValue.state = 'pending';
12439 returnValue.emit('paused');
12440 }
12441 if (changesCompleted) {
12442 completeReplication();
12443 }
12444 }
12445 return;
12446 }
12447 if (
12448 immediate ||
12449 changesCompleted ||
12450 pendingBatch.changes.length >= batch_size
12451 ) {
12452 batches.push(pendingBatch);
12453 pendingBatch = {
12454 seq: 0,
12455 changes: [],
12456 docs: []
12457 };
12458 if (returnValue.state === 'pending' || returnValue.state === 'stopped') {
12459 returnValue.state = 'active';
12460 returnValue.emit('active');
12461 }
12462 startNextBatch();
12463 }
12464 }
12465
12466
12467 function abortReplication(reason, err) {
12468 if (replicationCompleted) {
12469 return;
12470 }
12471 if (!err.message) {
12472 err.message = reason;
12473 }
12474 result.ok = false;
12475 result.status = 'aborting';
12476 batches = [];
12477 pendingBatch = {
12478 seq: 0,
12479 changes: [],
12480 docs: []
12481 };
12482 completeReplication(err);
12483 }
12484
12485
12486 function completeReplication(fatalError) {
12487 if (replicationCompleted) {
12488 return;
12489 }
12490 /* istanbul ignore if */
12491 if (returnValue.cancelled) {
12492 result.status = 'cancelled';
12493 if (writingCheckpoint) {
12494 return;
12495 }
12496 }
12497 result.status = result.status || 'complete';
12498 result.end_time = new Date().toISOString();
12499 result.last_seq = last_seq;
12500 replicationCompleted = true;
12501
12502 src.activeTasks.remove(taskId, fatalError);
12503
12504 if (fatalError) {
12505 // need to extend the error because Firefox considers ".result" read-only
12506 fatalError = createError(fatalError);
12507 fatalError.result = result;
12508
12509 // Normalize error name. i.e. 'Unauthorized' -> 'unauthorized' (eg Sync Gateway)
12510 var errorName = (fatalError.name || '').toLowerCase();
12511 if (errorName === 'unauthorized' || errorName === 'forbidden') {
12512 returnValue.emit('error', fatalError);
12513 returnValue.removeAllListeners();
12514 } else {
12515 backOff(opts, returnValue, fatalError, function () {
12516 replicate(src, target, opts, returnValue);
12517 });
12518 }
12519 } else {
12520 returnValue.emit('complete', result);
12521 returnValue.removeAllListeners();
12522 }
12523 }
12524
12525 function onChange(change, pending, lastSeq) {
12526 /* istanbul ignore if */
12527 if (returnValue.cancelled) {
12528 return completeReplication();
12529 }
12530 // Attach 'pending' property if server supports it (CouchDB 2.0+)
12531 /* istanbul ignore if */
12532 if (typeof pending === 'number') {
12533 pendingBatch.pending = pending;
12534 }
12535
12536 var filter = filterChange(opts)(change);
12537 if (!filter) {
12538 // update processed items count by 1
12539 var task = src.activeTasks.get(taskId);
12540 if (task) {
12541 // we can assume that task exists here? shouldn't be deleted by here.
12542 var completed = task.completed_items || 0;
12543 src.activeTasks.update(taskId, {completed_items: ++completed});
12544 }
12545 return;
12546 }
12547 pendingBatch.seq = change.seq || lastSeq;
12548 pendingBatch.changes.push(change);
12549 returnValue.emit('checkpoint', { 'pending_batch': pendingBatch.seq });
12550 nextTick(function () {
12551 processPendingBatch(batches.length === 0 && changesOpts.live);
12552 });
12553 }
12554
12555
12556 function onChangesComplete(changes) {
12557 changesPending = false;
12558 /* istanbul ignore if */
12559 if (returnValue.cancelled) {
12560 return completeReplication();
12561 }
12562
12563 // if no results were returned then we're done,
12564 // else fetch more
12565 if (changes.results.length > 0) {
12566 changesOpts.since = changes.results[changes.results.length - 1].seq;
12567 getChanges();
12568 processPendingBatch(true);
12569 } else {
12570
12571 var complete = function () {
12572 if (continuous) {
12573 changesOpts.live = true;
12574 getChanges();
12575 } else {
12576 changesCompleted = true;
12577 }
12578 processPendingBatch(true);
12579 };
12580
12581 // update the checkpoint so we start from the right seq next time
12582 if (!currentBatch && changes.results.length === 0) {
12583 writingCheckpoint = true;
12584 checkpointer.writeCheckpoint(changes.last_seq,
12585 session).then(function () {
12586 writingCheckpoint = false;
12587 result.last_seq = last_seq = changes.last_seq;
12588 if (returnValue.cancelled) {
12589 completeReplication();
12590 throw new Error('cancelled');
12591 } else {
12592 complete();
12593 }
12594 })
12595 .catch(onCheckpointError);
12596 } else {
12597 complete();
12598 }
12599 }
12600 }
12601
12602
12603 function onChangesError(err) {
12604 changesPending = false;
12605 /* istanbul ignore if */
12606 if (returnValue.cancelled) {
12607 return completeReplication();
12608 }
12609 abortReplication('changes rejected', err);
12610 }
12611
12612
12613 function getChanges() {
12614 if (!(
12615 !changesPending &&
12616 !changesCompleted &&
12617 batches.length < batches_limit
12618 )) {
12619 return;
12620 }
12621 changesPending = true;
12622 function abortChanges() {
12623 changes.cancel();
12624 }
12625 function removeListener() {
12626 returnValue.removeListener('cancel', abortChanges);
12627 }
12628
12629 if (returnValue._changes) { // remove old changes() and listeners
12630 returnValue.removeListener('cancel', returnValue._abortChanges);
12631 returnValue._changes.cancel();
12632 }
12633 returnValue.once('cancel', abortChanges);
12634
12635 var changes = src.changes(changesOpts)
12636 .on('change', onChange);
12637 changes.then(removeListener, removeListener);
12638 changes.then(onChangesComplete)
12639 .catch(onChangesError);
12640
12641 if (opts.retry) {
12642 // save for later so we can cancel if necessary
12643 returnValue._changes = changes;
12644 returnValue._abortChanges = abortChanges;
12645 }
12646 }
12647
12648 function createTask(checkpoint) {
12649 return src.info().then(function (info) {
12650 var total_items = typeof opts.since === 'undefined' ?
12651 parseInt(info.update_seq, 10) - parseInt(checkpoint, 10) :
12652 parseInt(info.update_seq, 10);
12653
12654 taskId = src.activeTasks.add({
12655 name: `${continuous ? 'continuous ' : ''}replication from ${info.db_name}` ,
12656 total_items,
12657 });
12658
12659 return checkpoint;
12660 });
12661 }
12662
12663 function startChanges() {
12664 initCheckpointer().then(function () {
12665 /* istanbul ignore if */
12666 if (returnValue.cancelled) {
12667 completeReplication();
12668 return;
12669 }
12670 return checkpointer.getCheckpoint().then(createTask).then(function (checkpoint) {
12671 last_seq = checkpoint;
12672 initial_last_seq = checkpoint;
12673 changesOpts = {
12674 since: last_seq,
12675 limit: batch_size,
12676 batch_size,
12677 style,
12678 doc_ids,
12679 selector,
12680 return_docs: true // required so we know when we're done
12681 };
12682 if (opts.filter) {
12683 if (typeof opts.filter !== 'string') {
12684 // required for the client-side filter in onChange
12685 changesOpts.include_docs = true;
12686 } else { // ddoc filter
12687 changesOpts.filter = opts.filter;
12688 }
12689 }
12690 if ('heartbeat' in opts) {
12691 changesOpts.heartbeat = opts.heartbeat;
12692 }
12693 if ('timeout' in opts) {
12694 changesOpts.timeout = opts.timeout;
12695 }
12696 if (opts.query_params) {
12697 changesOpts.query_params = opts.query_params;
12698 }
12699 if (opts.view) {
12700 changesOpts.view = opts.view;
12701 }
12702 getChanges();
12703 });
12704 }).catch(function (err) {
12705 abortReplication('getCheckpoint rejected with ', err);
12706 });
12707 }
12708
12709 /* istanbul ignore next */
12710 function onCheckpointError(err) {
12711 writingCheckpoint = false;
12712 abortReplication('writeCheckpoint completed with error', err);
12713 }
12714
12715 /* istanbul ignore if */
12716 if (returnValue.cancelled) { // cancelled immediately
12717 completeReplication();
12718 return;
12719 }
12720
12721 if (!returnValue._addedListeners) {
12722 returnValue.once('cancel', completeReplication);
12723
12724 if (typeof opts.complete === 'function') {
12725 returnValue.once('error', opts.complete);
12726 returnValue.once('complete', function (result) {
12727 opts.complete(null, result);
12728 });
12729 }
12730 returnValue._addedListeners = true;
12731 }
12732
12733 if (typeof opts.since === 'undefined') {
12734 startChanges();
12735 } else {
12736 initCheckpointer().then(function () {
12737 writingCheckpoint = true;
12738 return checkpointer.writeCheckpoint(opts.since, session);
12739 }).then(function () {
12740 writingCheckpoint = false;
12741 /* istanbul ignore if */
12742 if (returnValue.cancelled) {
12743 completeReplication();
12744 return;
12745 }
12746 last_seq = opts.since;
12747 startChanges();
12748 }).catch(onCheckpointError);
12749 }
12750}
12751
12752// We create a basic promise so the caller can cancel the replication possibly
12753// before we have actually started listening to changes etc
12754class Replication extends EE {
12755 constructor() {
12756 super();
12757 this.cancelled = false;
12758 this.state = 'pending';
12759 const promise = new Promise((fulfill, reject) => {
12760 this.once('complete', fulfill);
12761 this.once('error', reject);
12762 });
12763 this.then = function (resolve, reject) {
12764 return promise.then(resolve, reject);
12765 };
12766 this.catch = function (reject) {
12767 return promise.catch(reject);
12768 };
12769 // As we allow error handling via "error" event as well,
12770 // put a stub in here so that rejecting never throws UnhandledError.
12771 this.catch(function () {});
12772 }
12773
12774 cancel() {
12775 this.cancelled = true;
12776 this.state = 'cancelled';
12777 this.emit('cancel');
12778 }
12779
12780 ready(src, target) {
12781 if (this._readyCalled) {
12782 return;
12783 }
12784 this._readyCalled = true;
12785
12786 const onDestroy = () => {
12787 this.cancel();
12788 };
12789 src.once('destroyed', onDestroy);
12790 target.once('destroyed', onDestroy);
12791 function cleanup() {
12792 src.removeListener('destroyed', onDestroy);
12793 target.removeListener('destroyed', onDestroy);
12794 }
12795 this.once('complete', cleanup);
12796 this.once('error', cleanup);
12797 }
12798}
12799
12800function toPouch(db, opts) {
12801 var PouchConstructor = opts.PouchConstructor;
12802 if (typeof db === 'string') {
12803 return new PouchConstructor(db, opts);
12804 } else {
12805 return db;
12806 }
12807}
12808
12809function replicateWrapper(src, target, opts, callback) {
12810
12811 if (typeof opts === 'function') {
12812 callback = opts;
12813 opts = {};
12814 }
12815 if (typeof opts === 'undefined') {
12816 opts = {};
12817 }
12818
12819 if (opts.doc_ids && !Array.isArray(opts.doc_ids)) {
12820 throw createError(BAD_REQUEST,
12821 "`doc_ids` filter parameter is not a list.");
12822 }
12823
12824 opts.complete = callback;
12825 opts = clone(opts);
12826 opts.continuous = opts.continuous || opts.live;
12827 opts.retry = ('retry' in opts) ? opts.retry : false;
12828 opts.PouchConstructor = opts.PouchConstructor || this;
12829 var replicateRet = new Replication(opts);
12830 var srcPouch = toPouch(src, opts);
12831 var targetPouch = toPouch(target, opts);
12832 replicate(srcPouch, targetPouch, opts, replicateRet);
12833 return replicateRet;
12834}
12835
12836function sync(src, target, opts, callback) {
12837 if (typeof opts === 'function') {
12838 callback = opts;
12839 opts = {};
12840 }
12841 if (typeof opts === 'undefined') {
12842 opts = {};
12843 }
12844 opts = clone(opts);
12845 opts.PouchConstructor = opts.PouchConstructor || this;
12846 src = toPouch(src, opts);
12847 target = toPouch(target, opts);
12848 return new Sync(src, target, opts, callback);
12849}
12850
12851class Sync extends EE {
12852 constructor(src, target, opts, callback) {
12853 super();
12854 this.canceled = false;
12855
12856 const optsPush = opts.push ? Object.assign({}, opts, opts.push) : opts;
12857 const optsPull = opts.pull ? Object.assign({}, opts, opts.pull) : opts;
12858
12859 this.push = replicateWrapper(src, target, optsPush);
12860 this.pull = replicateWrapper(target, src, optsPull);
12861
12862 this.pushPaused = true;
12863 this.pullPaused = true;
12864
12865 const pullChange = (change) => {
12866 this.emit('change', {
12867 direction: 'pull',
12868 change
12869 });
12870 };
12871 const pushChange = (change) => {
12872 this.emit('change', {
12873 direction: 'push',
12874 change
12875 });
12876 };
12877 const pushDenied = (doc) => {
12878 this.emit('denied', {
12879 direction: 'push',
12880 doc
12881 });
12882 };
12883 const pullDenied = (doc) => {
12884 this.emit('denied', {
12885 direction: 'pull',
12886 doc
12887 });
12888 };
12889 const pushPaused = () => {
12890 this.pushPaused = true;
12891 /* istanbul ignore if */
12892 if (this.pullPaused) {
12893 this.emit('paused');
12894 }
12895 };
12896 const pullPaused = () => {
12897 this.pullPaused = true;
12898 /* istanbul ignore if */
12899 if (this.pushPaused) {
12900 this.emit('paused');
12901 }
12902 };
12903 const pushActive = () => {
12904 this.pushPaused = false;
12905 /* istanbul ignore if */
12906 if (this.pullPaused) {
12907 this.emit('active', {
12908 direction: 'push'
12909 });
12910 }
12911 };
12912 const pullActive = () => {
12913 this.pullPaused = false;
12914 /* istanbul ignore if */
12915 if (this.pushPaused) {
12916 this.emit('active', {
12917 direction: 'pull'
12918 });
12919 }
12920 };
12921
12922 let removed = {};
12923
12924 const removeAll = (type) => { // type is 'push' or 'pull'
12925 return (event, func) => {
12926 const isChange = event === 'change' &&
12927 (func === pullChange || func === pushChange);
12928 const isDenied = event === 'denied' &&
12929 (func === pullDenied || func === pushDenied);
12930 const isPaused = event === 'paused' &&
12931 (func === pullPaused || func === pushPaused);
12932 const isActive = event === 'active' &&
12933 (func === pullActive || func === pushActive);
12934
12935 if (isChange || isDenied || isPaused || isActive) {
12936 if (!(event in removed)) {
12937 removed[event] = {};
12938 }
12939 removed[event][type] = true;
12940 if (Object.keys(removed[event]).length === 2) {
12941 // both push and pull have asked to be removed
12942 this.removeAllListeners(event);
12943 }
12944 }
12945 };
12946 };
12947
12948 if (opts.live) {
12949 this.push.on('complete', this.pull.cancel.bind(this.pull));
12950 this.pull.on('complete', this.push.cancel.bind(this.push));
12951 }
12952
12953 function addOneListener(ee, event, listener) {
12954 if (ee.listeners(event).indexOf(listener) == -1) {
12955 ee.on(event, listener);
12956 }
12957 }
12958
12959 this.on('newListener', function (event) {
12960 if (event === 'change') {
12961 addOneListener(this.pull, 'change', pullChange);
12962 addOneListener(this.push, 'change', pushChange);
12963 } else if (event === 'denied') {
12964 addOneListener(this.pull, 'denied', pullDenied);
12965 addOneListener(this.push, 'denied', pushDenied);
12966 } else if (event === 'active') {
12967 addOneListener(this.pull, 'active', pullActive);
12968 addOneListener(this.push, 'active', pushActive);
12969 } else if (event === 'paused') {
12970 addOneListener(this.pull, 'paused', pullPaused);
12971 addOneListener(this.push, 'paused', pushPaused);
12972 }
12973 });
12974
12975 this.on('removeListener', function (event) {
12976 if (event === 'change') {
12977 this.pull.removeListener('change', pullChange);
12978 this.push.removeListener('change', pushChange);
12979 } else if (event === 'denied') {
12980 this.pull.removeListener('denied', pullDenied);
12981 this.push.removeListener('denied', pushDenied);
12982 } else if (event === 'active') {
12983 this.pull.removeListener('active', pullActive);
12984 this.push.removeListener('active', pushActive);
12985 } else if (event === 'paused') {
12986 this.pull.removeListener('paused', pullPaused);
12987 this.push.removeListener('paused', pushPaused);
12988 }
12989 });
12990
12991 this.pull.on('removeListener', removeAll('pull'));
12992 this.push.on('removeListener', removeAll('push'));
12993
12994 const promise = Promise.all([
12995 this.push,
12996 this.pull
12997 ]).then((resp) => {
12998 const out = {
12999 push: resp[0],
13000 pull: resp[1]
13001 };
13002 this.emit('complete', out);
13003 if (callback) {
13004 callback(null, out);
13005 }
13006 this.removeAllListeners();
13007 return out;
13008 }, (err) => {
13009 this.cancel();
13010 if (callback) {
13011 // if there's a callback, then the callback can receive
13012 // the error event
13013 callback(err);
13014 } else {
13015 // if there's no callback, then we're safe to emit an error
13016 // event, which would otherwise throw an unhandled error
13017 // due to 'error' being a special event in EventEmitters
13018 this.emit('error', err);
13019 }
13020 this.removeAllListeners();
13021 if (callback) {
13022 // no sense throwing if we're already emitting an 'error' event
13023 throw err;
13024 }
13025 });
13026
13027 this.then = function (success, err) {
13028 return promise.then(success, err);
13029 };
13030
13031 this.catch = function (err) {
13032 return promise.catch(err);
13033 };
13034 }
13035
13036 cancel() {
13037 if (!this.canceled) {
13038 this.canceled = true;
13039 this.push.cancel();
13040 this.pull.cancel();
13041 }
13042 }
13043}
13044
13045function replication(PouchDB) {
13046 PouchDB.replicate = replicateWrapper;
13047 PouchDB.sync = sync;
13048
13049 Object.defineProperty(PouchDB.prototype, 'replicate', {
13050 get: function () {
13051 var self = this;
13052 if (typeof this.replicateMethods === 'undefined') {
13053 this.replicateMethods = {
13054 from: function (other, opts, callback) {
13055 return self.constructor.replicate(other, self, opts, callback);
13056 },
13057 to: function (other, opts, callback) {
13058 return self.constructor.replicate(self, other, opts, callback);
13059 }
13060 };
13061 }
13062 return this.replicateMethods;
13063 }
13064 });
13065
13066 PouchDB.prototype.sync = function (dbName, opts, callback) {
13067 return this.constructor.sync(this, dbName, opts, callback);
13068 };
13069}
13070
13071PouchDB.plugin(IDBPouch)
13072 .plugin(HttpPouch$1)
13073 .plugin(mapreduce)
13074 .plugin(replication);
13075
13076// Pull from src because pouchdb-node/pouchdb-browser themselves
13077
13078module.exports = PouchDB;
13079
13080}).call(this)}).call(this,_dereq_('_process'))
13081},{"_process":2,"events":1,"spark-md5":3,"uuid":4,"vuvuzela":19}]},{},[20])(20)
13082});