UNPKG

301 kBJavaScriptView Raw
1import Md5 from 'spark-md5';
2import { v4 } from 'uuid';
3import vuvuzela from 'vuvuzela';
4import EE from 'events';
5
6function isBinaryObject(object) {
7 return (typeof ArrayBuffer !== 'undefined' && object instanceof ArrayBuffer) ||
8 (typeof Blob !== 'undefined' && object instanceof Blob);
9}
10
11/**
12 * @template {ArrayBuffer | Blob} T
13 * @param {T} object
14 * @returns {T}
15 */
16function cloneBinaryObject(object) {
17 return object instanceof ArrayBuffer
18 ? object.slice(0)
19 : object.slice(0, object.size, object.type);
20}
21
22// most of this is borrowed from lodash.isPlainObject:
23// https://github.com/fis-components/lodash.isplainobject/
24// blob/29c358140a74f252aeb08c9eb28bef86f2217d4a/index.js
25
26var funcToString = Function.prototype.toString;
27var objectCtorString = funcToString.call(Object);
28
29function isPlainObject(value) {
30 var proto = Object.getPrototypeOf(value);
31 /* istanbul ignore if */
32 if (proto === null) { // not sure when this happens, but I guess it can
33 return true;
34 }
35 var Ctor = proto.constructor;
36 return (typeof Ctor == 'function' &&
37 Ctor instanceof Ctor && funcToString.call(Ctor) == objectCtorString);
38}
39
40function clone(object) {
41 var newObject;
42 var i;
43 var len;
44
45 if (!object || typeof object !== 'object') {
46 return object;
47 }
48
49 if (Array.isArray(object)) {
50 newObject = [];
51 for (i = 0, len = object.length; i < len; i++) {
52 newObject[i] = clone(object[i]);
53 }
54 return newObject;
55 }
56
57 // special case: to avoid inconsistencies between IndexedDB
58 // and other backends, we automatically stringify Dates
59 if (object instanceof Date && isFinite(object)) {
60 return object.toISOString();
61 }
62
63 if (isBinaryObject(object)) {
64 return cloneBinaryObject(object);
65 }
66
67 if (!isPlainObject(object)) {
68 return object; // don't clone objects like Workers
69 }
70
71 newObject = {};
72 for (i in object) {
73 /* istanbul ignore else */
74 if (Object.prototype.hasOwnProperty.call(object, i)) {
75 var value = clone(object[i]);
76 if (typeof value !== 'undefined') {
77 newObject[i] = value;
78 }
79 }
80 }
81 return newObject;
82}
83
84function once(fun) {
85 var called = false;
86 return function (...args) {
87 /* istanbul ignore if */
88 if (called) {
89 // this is a smoke test and should never actually happen
90 throw new Error('once called more than once');
91 } else {
92 called = true;
93 fun.apply(this, args);
94 }
95 };
96}
97
98function toPromise(func) {
99 //create the function we will be returning
100 return function (...args) {
101 // Clone arguments
102 args = clone(args);
103 var self = this;
104 // if the last argument is a function, assume its a callback
105 var usedCB = (typeof args[args.length - 1] === 'function') ? args.pop() : false;
106 var promise = new Promise(function (fulfill, reject) {
107 var resp;
108 try {
109 var callback = once(function (err, mesg) {
110 if (err) {
111 reject(err);
112 } else {
113 fulfill(mesg);
114 }
115 });
116 // create a callback for this invocation
117 // apply the function in the orig context
118 args.push(callback);
119 resp = func.apply(self, args);
120 if (resp && typeof resp.then === 'function') {
121 fulfill(resp);
122 }
123 } catch (e) {
124 reject(e);
125 }
126 });
127 // if there is a callback, call it back
128 if (usedCB) {
129 promise.then(function (result) {
130 usedCB(null, result);
131 }, usedCB);
132 }
133 return promise;
134 };
135}
136
137function logApiCall(self, name, args) {
138 /* istanbul ignore if */
139 if (self.constructor.listeners('debug').length) {
140 var logArgs = ['api', self.name, name];
141 for (var i = 0; i < args.length - 1; i++) {
142 logArgs.push(args[i]);
143 }
144 self.constructor.emit('debug', logArgs);
145
146 // override the callback itself to log the response
147 var origCallback = args[args.length - 1];
148 args[args.length - 1] = function (err, res) {
149 var responseArgs = ['api', self.name, name];
150 responseArgs = responseArgs.concat(
151 err ? ['error', err] : ['success', res]
152 );
153 self.constructor.emit('debug', responseArgs);
154 origCallback(err, res);
155 };
156 }
157}
158
159function adapterFun(name, callback) {
160 return toPromise(function (...args) {
161 if (this._closed) {
162 return Promise.reject(new Error('database is closed'));
163 }
164 if (this._destroyed) {
165 return Promise.reject(new Error('database is destroyed'));
166 }
167 var self = this;
168 logApiCall(self, name, args);
169 if (!this.taskqueue.isReady) {
170 return new Promise(function (fulfill, reject) {
171 self.taskqueue.addTask(function (failed) {
172 if (failed) {
173 reject(failed);
174 } else {
175 fulfill(self[name].apply(self, args));
176 }
177 });
178 });
179 }
180 return callback.apply(this, args);
181 });
182}
183
184// like underscore/lodash _.pick()
185function pick(obj, arr) {
186 var res = {};
187 for (var i = 0, len = arr.length; i < len; i++) {
188 var prop = arr[i];
189 if (prop in obj) {
190 res[prop] = obj[prop];
191 }
192 }
193 return res;
194}
195
196// Most browsers throttle concurrent requests at 6, so it's silly
197// to shim _bulk_get by trying to launch potentially hundreds of requests
198// and then letting the majority time out. We can handle this ourselves.
199var MAX_NUM_CONCURRENT_REQUESTS = 6;
200
201function identityFunction(x) {
202 return x;
203}
204
205function formatResultForOpenRevsGet(result) {
206 return [{
207 ok: result
208 }];
209}
210
211// shim for P/CouchDB adapters that don't directly implement _bulk_get
212function bulkGet(db, opts, callback) {
213 var requests = opts.docs;
214
215 // consolidate into one request per doc if possible
216 var requestsById = new Map();
217 requests.forEach(function (request) {
218 if (requestsById.has(request.id)) {
219 requestsById.get(request.id).push(request);
220 } else {
221 requestsById.set(request.id, [request]);
222 }
223 });
224
225 var numDocs = requestsById.size;
226 var numDone = 0;
227 var perDocResults = new Array(numDocs);
228
229 function collapseResultsAndFinish() {
230 var results = [];
231 perDocResults.forEach(function (res) {
232 res.docs.forEach(function (info) {
233 results.push({
234 id: res.id,
235 docs: [info]
236 });
237 });
238 });
239 callback(null, {results});
240 }
241
242 function checkDone() {
243 if (++numDone === numDocs) {
244 collapseResultsAndFinish();
245 }
246 }
247
248 function gotResult(docIndex, id, docs) {
249 perDocResults[docIndex] = {id, docs};
250 checkDone();
251 }
252
253 var allRequests = [];
254 requestsById.forEach(function (value, key) {
255 allRequests.push(key);
256 });
257
258 var i = 0;
259
260 function nextBatch() {
261
262 if (i >= allRequests.length) {
263 return;
264 }
265
266 var upTo = Math.min(i + MAX_NUM_CONCURRENT_REQUESTS, allRequests.length);
267 var batch = allRequests.slice(i, upTo);
268 processBatch(batch, i);
269 i += batch.length;
270 }
271
272 function processBatch(batch, offset) {
273 batch.forEach(function (docId, j) {
274 var docIdx = offset + j;
275 var docRequests = requestsById.get(docId);
276
277 // just use the first request as the "template"
278 // TODO: The _bulk_get API allows for more subtle use cases than this,
279 // but for now it is unlikely that there will be a mix of different
280 // "atts_since" or "attachments" in the same request, since it's just
281 // replicate.js that is using this for the moment.
282 // Also, atts_since is aspirational, since we don't support it yet.
283 var docOpts = pick(docRequests[0], ['atts_since', 'attachments']);
284 docOpts.open_revs = docRequests.map(function (request) {
285 // rev is optional, open_revs disallowed
286 return request.rev;
287 });
288
289 // remove falsey / undefined revisions
290 docOpts.open_revs = docOpts.open_revs.filter(identityFunction);
291
292 var formatResult = identityFunction;
293
294 if (docOpts.open_revs.length === 0) {
295 delete docOpts.open_revs;
296
297 // when fetching only the "winning" leaf,
298 // transform the result so it looks like an open_revs
299 // request
300 formatResult = formatResultForOpenRevsGet;
301 }
302
303 // globally-supplied options
304 ['revs', 'attachments', 'binary', 'ajax', 'latest'].forEach(function (param) {
305 if (param in opts) {
306 docOpts[param] = opts[param];
307 }
308 });
309 db.get(docId, docOpts, function (err, res) {
310 var result;
311 /* istanbul ignore if */
312 if (err) {
313 result = [{error: err}];
314 } else {
315 result = formatResult(res);
316 }
317 gotResult(docIdx, docId, result);
318 nextBatch();
319 });
320 });
321 }
322
323 nextBatch();
324
325}
326
327var hasLocal;
328
329try {
330 localStorage.setItem('_pouch_check_localstorage', 1);
331 hasLocal = !!localStorage.getItem('_pouch_check_localstorage');
332} catch (e) {
333 hasLocal = false;
334}
335
336function hasLocalStorage() {
337 return hasLocal;
338}
339
340const nextTick = typeof queueMicrotask === "function"
341 ? queueMicrotask
342 : function nextTick(fn) {
343 Promise.resolve().then(fn);
344 };
345
346class Changes extends EE {
347 constructor() {
348 super();
349
350 this._listeners = {};
351
352 if (hasLocalStorage()) {
353 addEventListener("storage", (e) => {
354 this.emit(e.key);
355 });
356 }
357 }
358
359 addListener(dbName, id, db, opts) {
360 if (this._listeners[id]) {
361 return;
362 }
363 var inprogress = false;
364 var self = this;
365 function eventFunction() {
366 if (!self._listeners[id]) {
367 return;
368 }
369 if (inprogress) {
370 inprogress = 'waiting';
371 return;
372 }
373 inprogress = true;
374 var changesOpts = pick(opts, [
375 'style', 'include_docs', 'attachments', 'conflicts', 'filter',
376 'doc_ids', 'view', 'since', 'query_params', 'binary', 'return_docs'
377 ]);
378
379 function onError() {
380 inprogress = false;
381 }
382
383 db.changes(changesOpts).on('change', function (c) {
384 if (c.seq > opts.since && !opts.cancelled) {
385 opts.since = c.seq;
386 opts.onChange(c);
387 }
388 }).on('complete', function () {
389 if (inprogress === 'waiting') {
390 nextTick(eventFunction);
391 }
392 inprogress = false;
393 }).on('error', onError);
394 }
395 this._listeners[id] = eventFunction;
396 this.on(dbName, eventFunction);
397 }
398
399 removeListener(dbName, id) {
400 if (!(id in this._listeners)) {
401 return;
402 }
403 super.removeListener(dbName, this._listeners[id]);
404 delete this._listeners[id];
405 }
406
407 notifyLocalWindows(dbName) {
408 //do a useless change on a storage thing
409 //in order to get other windows's listeners to activate
410 if (hasLocalStorage()) {
411 localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a";
412 }
413 }
414
415 notify(dbName) {
416 this.emit(dbName);
417 this.notifyLocalWindows(dbName);
418 }
419}
420
421function guardedConsole(method) {
422 /* istanbul ignore else */
423 if (typeof console !== 'undefined' && typeof console[method] === 'function') {
424 var args = Array.prototype.slice.call(arguments, 1);
425 console[method].apply(console, args);
426 }
427}
428
429function randomNumber(min, max) {
430 var maxTimeout = 600000; // Hard-coded default of 10 minutes
431 min = parseInt(min, 10) || 0;
432 max = parseInt(max, 10);
433 if (max !== max || max <= min) {
434 max = (min || 1) << 1; //doubling
435 } else {
436 max = max + 1;
437 }
438 // In order to not exceed maxTimeout, pick a random value between half of maxTimeout and maxTimeout
439 if (max > maxTimeout) {
440 min = maxTimeout >> 1; // divide by two
441 max = maxTimeout;
442 }
443 var ratio = Math.random();
444 var range = max - min;
445
446 return ~~(range * ratio + min); // ~~ coerces to an int, but fast.
447}
448
449function defaultBackOff(min) {
450 var max = 0;
451 if (!min) {
452 max = 2000;
453 }
454 return randomNumber(min, max);
455}
456
457// designed to give info to browser users, who are disturbed
458// when they see http errors in the console
459function explainError(status, str) {
460 guardedConsole('info', 'The above ' + status + ' is totally normal. ' + str);
461}
462
463class PouchError extends Error {
464 constructor(status, error, reason) {
465 super();
466 this.status = status;
467 this.name = error;
468 this.message = reason;
469 this.error = true;
470 }
471
472 toString() {
473 return JSON.stringify({
474 status: this.status,
475 name: this.name,
476 message: this.message,
477 reason: this.reason
478 });
479 }
480}
481
482var UNAUTHORIZED = new PouchError(401, 'unauthorized', "Name or password is incorrect.");
483var MISSING_BULK_DOCS = new PouchError(400, 'bad_request', "Missing JSON list of 'docs'");
484var MISSING_DOC = new PouchError(404, 'not_found', 'missing');
485var REV_CONFLICT = new PouchError(409, 'conflict', 'Document update conflict');
486var INVALID_ID = new PouchError(400, 'bad_request', '_id field must contain a string');
487var MISSING_ID = new PouchError(412, 'missing_id', '_id is required for puts');
488var RESERVED_ID = new PouchError(400, 'bad_request', 'Only reserved document ids may start with underscore.');
489var NOT_OPEN = new PouchError(412, 'precondition_failed', 'Database not open');
490var UNKNOWN_ERROR = new PouchError(500, 'unknown_error', 'Database encountered an unknown error');
491var BAD_ARG = new PouchError(500, 'badarg', 'Some query argument is invalid');
492var INVALID_REQUEST = new PouchError(400, 'invalid_request', 'Request was invalid');
493var QUERY_PARSE_ERROR = new PouchError(400, 'query_parse_error', 'Some query parameter is invalid');
494var DOC_VALIDATION = new PouchError(500, 'doc_validation', 'Bad special document member');
495var BAD_REQUEST = new PouchError(400, 'bad_request', 'Something wrong with the request');
496var NOT_AN_OBJECT = new PouchError(400, 'bad_request', 'Document must be a JSON object');
497var DB_MISSING = new PouchError(404, 'not_found', 'Database not found');
498var IDB_ERROR = new PouchError(500, 'indexed_db_went_bad', 'unknown');
499var WSQ_ERROR = new PouchError(500, 'web_sql_went_bad', 'unknown');
500var LDB_ERROR = new PouchError(500, 'levelDB_went_went_bad', 'unknown');
501var FORBIDDEN = new PouchError(403, 'forbidden', 'Forbidden by design doc validate_doc_update function');
502var INVALID_REV = new PouchError(400, 'bad_request', 'Invalid rev format');
503var FILE_EXISTS = new PouchError(412, 'file_exists', 'The database could not be created, the file already exists.');
504var MISSING_STUB = new PouchError(412, 'missing_stub', 'A pre-existing attachment stub wasn\'t found');
505var INVALID_URL = new PouchError(413, 'invalid_url', 'Provided URL is invalid');
506
507function createError(error, reason) {
508 function CustomPouchError(reason) {
509 // inherit error properties from our parent error manually
510 // so as to allow proper JSON parsing.
511 var names = Object.getOwnPropertyNames(error);
512 for (var i = 0, len = names.length; i < len; i++) {
513 if (typeof error[names[i]] !== 'function') {
514 this[names[i]] = error[names[i]];
515 }
516 }
517
518 if (this.stack === undefined) {
519 this.stack = (new Error()).stack;
520 }
521
522 if (reason !== undefined) {
523 this.reason = reason;
524 }
525 }
526 CustomPouchError.prototype = PouchError.prototype;
527 return new CustomPouchError(reason);
528}
529
530function generateErrorFromResponse(err) {
531
532 if (typeof err !== 'object') {
533 var data = err;
534 err = UNKNOWN_ERROR;
535 err.data = data;
536 }
537
538 if ('error' in err && err.error === 'conflict') {
539 err.name = 'conflict';
540 err.status = 409;
541 }
542
543 if (!('name' in err)) {
544 err.name = err.error || 'unknown';
545 }
546
547 if (!('status' in err)) {
548 err.status = 500;
549 }
550
551 if (!('message' in err)) {
552 err.message = err.message || err.reason;
553 }
554
555 if (!('stack' in err)) {
556 err.stack = (new Error()).stack;
557 }
558
559 return err;
560}
561
562function tryFilter(filter, doc, req) {
563 try {
564 return !filter(doc, req);
565 } catch (err) {
566 var msg = 'Filter function threw: ' + err.toString();
567 return createError(BAD_REQUEST, msg);
568 }
569}
570
571function filterChange(opts) {
572 var req = {};
573 var hasFilter = opts.filter && typeof opts.filter === 'function';
574 req.query = opts.query_params;
575
576 return function filter(change) {
577 if (!change.doc) {
578 // CSG sends events on the changes feed that don't have documents,
579 // this hack makes a whole lot of existing code robust.
580 change.doc = {};
581 }
582
583 var filterReturn = hasFilter && tryFilter(opts.filter, change.doc, req);
584
585 if (typeof filterReturn === 'object') {
586 return filterReturn;
587 }
588
589 if (filterReturn) {
590 return false;
591 }
592
593 if (!opts.include_docs) {
594 delete change.doc;
595 } else if (!opts.attachments) {
596 for (var att in change.doc._attachments) {
597 /* istanbul ignore else */
598 if (Object.prototype.hasOwnProperty.call(change.doc._attachments, att)) {
599 change.doc._attachments[att].stub = true;
600 }
601 }
602 }
603 return true;
604 };
605}
606
607// shim for Function.prototype.name,
608
609// Determine id an ID is valid
610// - invalid IDs begin with an underescore that does not begin '_design' or
611// '_local'
612// - any other string value is a valid id
613// Returns the specific error object for each case
614function invalidIdError(id) {
615 var err;
616 if (!id) {
617 err = createError(MISSING_ID);
618 } else if (typeof id !== 'string') {
619 err = createError(INVALID_ID);
620 } else if (/^_/.test(id) && !(/^_(design|local)/).test(id)) {
621 err = createError(RESERVED_ID);
622 }
623 if (err) {
624 throw err;
625 }
626}
627
628// Checks if a PouchDB object is "remote" or not. This is
629
630function isRemote(db) {
631 if (typeof db._remote === 'boolean') {
632 return db._remote;
633 }
634 /* istanbul ignore next */
635 if (typeof db.type === 'function') {
636 guardedConsole('warn',
637 'db.type() is deprecated and will be removed in ' +
638 'a future version of PouchDB');
639 return db.type() === 'http';
640 }
641 /* istanbul ignore next */
642 return false;
643}
644
645function listenerCount(ee, type) {
646 return 'listenerCount' in ee ? ee.listenerCount(type) :
647 EE.listenerCount(ee, type);
648}
649
650function parseDesignDocFunctionName(s) {
651 if (!s) {
652 return null;
653 }
654 var parts = s.split('/');
655 if (parts.length === 2) {
656 return parts;
657 }
658 if (parts.length === 1) {
659 return [s, s];
660 }
661 return null;
662}
663
664function normalizeDesignDocFunctionName(s) {
665 var normalized = parseDesignDocFunctionName(s);
666 return normalized ? normalized.join('/') : null;
667}
668
669// originally parseUri 1.2.2, now patched by us
670// (c) Steven Levithan <stevenlevithan.com>
671// MIT License
672var keys = ["source", "protocol", "authority", "userInfo", "user", "password",
673 "host", "port", "relative", "path", "directory", "file", "query", "anchor"];
674var qName ="queryKey";
675var qParser = /(?:^|&)([^&=]*)=?([^&]*)/g;
676
677// use the "loose" parser
678/* eslint no-useless-escape: 0 */
679var parser = /^(?:(?![^:@]+:[^:@\/]*@)([^:\/?#.]+):)?(?:\/\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/;
680
681function parseUri(str) {
682 var m = parser.exec(str);
683 var uri = {};
684 var i = 14;
685
686 while (i--) {
687 var key = keys[i];
688 var value = m[i] || "";
689 var encoded = ['user', 'password'].indexOf(key) !== -1;
690 uri[key] = encoded ? decodeURIComponent(value) : value;
691 }
692
693 uri[qName] = {};
694 uri[keys[12]].replace(qParser, function ($0, $1, $2) {
695 if ($1) {
696 uri[qName][$1] = $2;
697 }
698 });
699
700 return uri;
701}
702
703// Based on https://github.com/alexdavid/scope-eval v0.0.3
704// (source: https://unpkg.com/scope-eval@0.0.3/scope_eval.js)
705// This is basically just a wrapper around new Function()
706
707function scopeEval(source, scope) {
708 var keys = [];
709 var values = [];
710 for (var key in scope) {
711 if (Object.prototype.hasOwnProperty.call(scope, key)) {
712 keys.push(key);
713 values.push(scope[key]);
714 }
715 }
716 keys.push(source);
717 return Function.apply(null, keys).apply(null, values);
718}
719
720// this is essentially the "update sugar" function from daleharvey/pouchdb#1388
721// the diffFun tells us what delta to apply to the doc. it either returns
722// the doc, or false if it doesn't need to do an update after all
723function upsert(db, docId, diffFun) {
724 return db.get(docId)
725 .catch(function (err) {
726 /* istanbul ignore next */
727 if (err.status !== 404) {
728 throw err;
729 }
730 return {};
731 })
732 .then(function (doc) {
733 // the user might change the _rev, so save it for posterity
734 var docRev = doc._rev;
735 var newDoc = diffFun(doc);
736
737 if (!newDoc) {
738 // if the diffFun returns falsy, we short-circuit as
739 // an optimization
740 return {updated: false, rev: docRev};
741 }
742
743 // users aren't allowed to modify these values,
744 // so reset them here
745 newDoc._id = docId;
746 newDoc._rev = docRev;
747 return tryAndPut(db, newDoc, diffFun);
748 });
749}
750
751function tryAndPut(db, doc, diffFun) {
752 return db.put(doc).then(function (res) {
753 return {
754 updated: true,
755 rev: res.rev
756 };
757 }, function (err) {
758 /* istanbul ignore next */
759 if (err.status !== 409) {
760 throw err;
761 }
762 return upsert(db, doc._id, diffFun);
763 });
764}
765
766var thisAtob = function (str) {
767 return atob(str);
768};
769
770var thisBtoa = function (str) {
771 return btoa(str);
772};
773
774// Abstracts constructing a Blob object, so it also works in older
775// browsers that don't support the native Blob constructor (e.g.
776// old QtWebKit versions, Android < 4.4).
777function createBlob(parts, properties) {
778 /* global BlobBuilder,MSBlobBuilder,MozBlobBuilder,WebKitBlobBuilder */
779 parts = parts || [];
780 properties = properties || {};
781 try {
782 return new Blob(parts, properties);
783 } catch (e) {
784 if (e.name !== "TypeError") {
785 throw e;
786 }
787 var Builder = typeof BlobBuilder !== 'undefined' ? BlobBuilder :
788 typeof MSBlobBuilder !== 'undefined' ? MSBlobBuilder :
789 typeof MozBlobBuilder !== 'undefined' ? MozBlobBuilder :
790 WebKitBlobBuilder;
791 var builder = new Builder();
792 for (var i = 0; i < parts.length; i += 1) {
793 builder.append(parts[i]);
794 }
795 return builder.getBlob(properties.type);
796 }
797}
798
799// From http://stackoverflow.com/questions/14967647/ (continues on next line)
800// encode-decode-image-with-base64-breaks-image (2013-04-21)
801function binaryStringToArrayBuffer(bin) {
802 var length = bin.length;
803 var buf = new ArrayBuffer(length);
804 var arr = new Uint8Array(buf);
805 for (var i = 0; i < length; i++) {
806 arr[i] = bin.charCodeAt(i);
807 }
808 return buf;
809}
810
811function binStringToBluffer(binString, type) {
812 return createBlob([binaryStringToArrayBuffer(binString)], {type});
813}
814
815function b64ToBluffer(b64, type) {
816 return binStringToBluffer(thisAtob(b64), type);
817}
818
819//Can't find original post, but this is close
820//http://stackoverflow.com/questions/6965107/ (continues on next line)
821//converting-between-strings-and-arraybuffers
822function arrayBufferToBinaryString(buffer) {
823 var binary = '';
824 var bytes = new Uint8Array(buffer);
825 var length = bytes.byteLength;
826 for (var i = 0; i < length; i++) {
827 binary += String.fromCharCode(bytes[i]);
828 }
829 return binary;
830}
831
832// shim for browsers that don't support it
833function readAsBinaryString(blob, callback) {
834 var reader = new FileReader();
835 var hasBinaryString = typeof reader.readAsBinaryString === 'function';
836 reader.onloadend = function (e) {
837 var result = e.target.result || '';
838 if (hasBinaryString) {
839 return callback(result);
840 }
841 callback(arrayBufferToBinaryString(result));
842 };
843 if (hasBinaryString) {
844 reader.readAsBinaryString(blob);
845 } else {
846 reader.readAsArrayBuffer(blob);
847 }
848}
849
850function blobToBinaryString(blobOrBuffer, callback) {
851 readAsBinaryString(blobOrBuffer, function (bin) {
852 callback(bin);
853 });
854}
855
856function blobToBase64(blobOrBuffer, callback) {
857 blobToBinaryString(blobOrBuffer, function (base64) {
858 callback(thisBtoa(base64));
859 });
860}
861
862// simplified API. universal browser support is assumed
863function readAsArrayBuffer(blob, callback) {
864 var reader = new FileReader();
865 reader.onloadend = function (e) {
866 var result = e.target.result || new ArrayBuffer(0);
867 callback(result);
868 };
869 reader.readAsArrayBuffer(blob);
870}
871
872// this is not used in the browser
873
874var setImmediateShim = self.setImmediate || self.setTimeout;
875var MD5_CHUNK_SIZE = 32768;
876
877function rawToBase64(raw) {
878 return thisBtoa(raw);
879}
880
881function appendBlob(buffer, blob, start, end, callback) {
882 if (start > 0 || end < blob.size) {
883 // only slice blob if we really need to
884 blob = blob.slice(start, end);
885 }
886 readAsArrayBuffer(blob, function (arrayBuffer) {
887 buffer.append(arrayBuffer);
888 callback();
889 });
890}
891
892function appendString(buffer, string, start, end, callback) {
893 if (start > 0 || end < string.length) {
894 // only create a substring if we really need to
895 string = string.substring(start, end);
896 }
897 buffer.appendBinary(string);
898 callback();
899}
900
901function binaryMd5(data, callback) {
902 var inputIsString = typeof data === 'string';
903 var len = inputIsString ? data.length : data.size;
904 var chunkSize = Math.min(MD5_CHUNK_SIZE, len);
905 var chunks = Math.ceil(len / chunkSize);
906 var currentChunk = 0;
907 var buffer = inputIsString ? new Md5() : new Md5.ArrayBuffer();
908
909 var append = inputIsString ? appendString : appendBlob;
910
911 function next() {
912 setImmediateShim(loadNextChunk);
913 }
914
915 function done() {
916 var raw = buffer.end(true);
917 var base64 = rawToBase64(raw);
918 callback(base64);
919 buffer.destroy();
920 }
921
922 function loadNextChunk() {
923 var start = currentChunk * chunkSize;
924 var end = start + chunkSize;
925 currentChunk++;
926 if (currentChunk < chunks) {
927 append(buffer, data, start, end, next);
928 } else {
929 append(buffer, data, start, end, done);
930 }
931 }
932 loadNextChunk();
933}
934
935function stringMd5(string) {
936 return Md5.hash(string);
937}
938
939/**
940 * Creates a new revision string that does NOT include the revision height
941 * For example '56649f1b0506c6ca9fda0746eb0cacdf'
942 */
943function rev(doc, deterministic_revs) {
944 if (!deterministic_revs) {
945 return v4().replace(/-/g, '').toLowerCase();
946 }
947
948 var mutateableDoc = Object.assign({}, doc);
949 delete mutateableDoc._rev_tree;
950 return stringMd5(JSON.stringify(mutateableDoc));
951}
952
953var uuid = v4; // mimic old import, only v4 is ever used elsewhere
954
955// We fetch all leafs of the revision tree, and sort them based on tree length
956// and whether they were deleted, undeleted documents with the longest revision
957// tree (most edits) win
958// The final sort algorithm is slightly documented in a sidebar here:
959// http://guide.couchdb.org/draft/conflicts.html
960function winningRev(metadata) {
961 var winningId;
962 var winningPos;
963 var winningDeleted;
964 var toVisit = metadata.rev_tree.slice();
965 var node;
966 while ((node = toVisit.pop())) {
967 var tree = node.ids;
968 var branches = tree[2];
969 var pos = node.pos;
970 if (branches.length) { // non-leaf
971 for (var i = 0, len = branches.length; i < len; i++) {
972 toVisit.push({pos: pos + 1, ids: branches[i]});
973 }
974 continue;
975 }
976 var deleted = !!tree[1].deleted;
977 var id = tree[0];
978 // sort by deleted, then pos, then id
979 if (!winningId || (winningDeleted !== deleted ? winningDeleted :
980 winningPos !== pos ? winningPos < pos : winningId < id)) {
981 winningId = id;
982 winningPos = pos;
983 winningDeleted = deleted;
984 }
985 }
986
987 return winningPos + '-' + winningId;
988}
989
990// Pretty much all below can be combined into a higher order function to
991// traverse revisions
992// The return value from the callback will be passed as context to all
993// children of that node
994function traverseRevTree(revs, callback) {
995 var toVisit = revs.slice();
996
997 var node;
998 while ((node = toVisit.pop())) {
999 var pos = node.pos;
1000 var tree = node.ids;
1001 var branches = tree[2];
1002 var newCtx =
1003 callback(branches.length === 0, pos, tree[0], node.ctx, tree[1]);
1004 for (var i = 0, len = branches.length; i < len; i++) {
1005 toVisit.push({pos: pos + 1, ids: branches[i], ctx: newCtx});
1006 }
1007 }
1008}
1009
1010function sortByPos(a, b) {
1011 return a.pos - b.pos;
1012}
1013
1014function collectLeaves(revs) {
1015 var leaves = [];
1016 traverseRevTree(revs, function (isLeaf, pos, id, acc, opts) {
1017 if (isLeaf) {
1018 leaves.push({rev: pos + "-" + id, pos, opts});
1019 }
1020 });
1021 leaves.sort(sortByPos).reverse();
1022 for (var i = 0, len = leaves.length; i < len; i++) {
1023 delete leaves[i].pos;
1024 }
1025 return leaves;
1026}
1027
1028// returns revs of all conflicts that is leaves such that
1029// 1. are not deleted and
1030// 2. are different than winning revision
1031function collectConflicts(metadata) {
1032 var win = winningRev(metadata);
1033 var leaves = collectLeaves(metadata.rev_tree);
1034 var conflicts = [];
1035 for (var i = 0, len = leaves.length; i < len; i++) {
1036 var leaf = leaves[i];
1037 if (leaf.rev !== win && !leaf.opts.deleted) {
1038 conflicts.push(leaf.rev);
1039 }
1040 }
1041 return conflicts;
1042}
1043
1044// compact a tree by marking its non-leafs as missing,
1045// and return a list of revs to delete
1046function compactTree(metadata) {
1047 var revs = [];
1048 traverseRevTree(metadata.rev_tree, function (isLeaf, pos,
1049 revHash, ctx, opts) {
1050 if (opts.status === 'available' && !isLeaf) {
1051 revs.push(pos + '-' + revHash);
1052 opts.status = 'missing';
1053 }
1054 });
1055 return revs;
1056}
1057
1058// `findPathToLeaf()` returns an array of revs that goes from the specified
1059// leaf rev to the root of that leaf’s branch.
1060//
1061// eg. for this rev tree:
1062// 1-9692 ▶ 2-37aa ▶ 3-df22 ▶ 4-6e94 ▶ 5-df4a ▶ 6-6a3a ▶ 7-57e5
1063// ┃ ┗━━━━━━▶ 5-8d8c ▶ 6-65e0
1064// ┗━━━━━━▶ 3-43f6 ▶ 4-a3b4
1065//
1066// For a `targetRev` of '7-57e5', `findPathToLeaf()` would return ['7-57e5', '6-6a3a', '5-df4a']
1067// The `revs` argument has the same structure as what `revs_tree` has on e.g.
1068// the IndexedDB representation of the rev tree datastructure. Please refer to
1069// tests/unit/test.purge.js for examples of what these look like.
1070//
1071// This function will throw an error if:
1072// - The requested revision does not exist
1073// - The requested revision is not a leaf
1074function findPathToLeaf(revs, targetRev) {
1075 let path = [];
1076 const toVisit = revs.slice();
1077
1078 let node;
1079 while ((node = toVisit.pop())) {
1080 const { pos, ids: tree } = node;
1081 const rev = `${pos}-${tree[0]}`;
1082 const branches = tree[2];
1083
1084 // just assuming we're already working on the path up towards our desired leaf.
1085 path.push(rev);
1086
1087 // we've reached the leaf of our dreams, so return the computed path.
1088 if (rev === targetRev) {
1089 //…unleeeeess
1090 if (branches.length !== 0) {
1091 throw new Error('The requested revision is not a leaf');
1092 }
1093 return path.reverse();
1094 }
1095
1096 // this is based on the assumption that after we have a leaf (`branches.length == 0`), we handle the next
1097 // branch. this is true for all branches other than the path leading to the winning rev (which is 7-57e5 in
1098 // the example above. i've added a reset condition for branching nodes (`branches.length > 1`) as well.
1099 if (branches.length === 0 || branches.length > 1) {
1100 path = [];
1101 }
1102
1103 // as a next step, we push the branches of this node to `toVisit` for visiting it during the next iteration
1104 for (let i = 0, len = branches.length; i < len; i++) {
1105 toVisit.push({ pos: pos + 1, ids: branches[i] });
1106 }
1107 }
1108 if (path.length === 0) {
1109 throw new Error('The requested revision does not exist');
1110 }
1111 return path.reverse();
1112}
1113
1114// build up a list of all the paths to the leafs in this revision tree
1115function rootToLeaf(revs) {
1116 var paths = [];
1117 var toVisit = revs.slice();
1118 var node;
1119 while ((node = toVisit.pop())) {
1120 var pos = node.pos;
1121 var tree = node.ids;
1122 var id = tree[0];
1123 var opts = tree[1];
1124 var branches = tree[2];
1125 var isLeaf = branches.length === 0;
1126
1127 var history = node.history ? node.history.slice() : [];
1128 history.push({id, opts});
1129 if (isLeaf) {
1130 paths.push({pos: (pos + 1 - history.length), ids: history});
1131 }
1132 for (var i = 0, len = branches.length; i < len; i++) {
1133 toVisit.push({pos: pos + 1, ids: branches[i], history});
1134 }
1135 }
1136 return paths.reverse();
1137}
1138
1139// for a better overview of what this is doing, read:
1140
1141function sortByPos$1(a, b) {
1142 return a.pos - b.pos;
1143}
1144
1145// classic binary search
1146function binarySearch(arr, item, comparator) {
1147 var low = 0;
1148 var high = arr.length;
1149 var mid;
1150 while (low < high) {
1151 mid = (low + high) >>> 1;
1152 if (comparator(arr[mid], item) < 0) {
1153 low = mid + 1;
1154 } else {
1155 high = mid;
1156 }
1157 }
1158 return low;
1159}
1160
1161// assuming the arr is sorted, insert the item in the proper place
1162function insertSorted(arr, item, comparator) {
1163 var idx = binarySearch(arr, item, comparator);
1164 arr.splice(idx, 0, item);
1165}
1166
1167// Turn a path as a flat array into a tree with a single branch.
1168// If any should be stemmed from the beginning of the array, that's passed
1169// in as the second argument
1170function pathToTree(path, numStemmed) {
1171 var root;
1172 var leaf;
1173 for (var i = numStemmed, len = path.length; i < len; i++) {
1174 var node = path[i];
1175 var currentLeaf = [node.id, node.opts, []];
1176 if (leaf) {
1177 leaf[2].push(currentLeaf);
1178 leaf = currentLeaf;
1179 } else {
1180 root = leaf = currentLeaf;
1181 }
1182 }
1183 return root;
1184}
1185
1186// compare the IDs of two trees
1187function compareTree(a, b) {
1188 return a[0] < b[0] ? -1 : 1;
1189}
1190
1191// Merge two trees together
1192// The roots of tree1 and tree2 must be the same revision
1193function mergeTree(in_tree1, in_tree2) {
1194 var queue = [{tree1: in_tree1, tree2: in_tree2}];
1195 var conflicts = false;
1196 while (queue.length > 0) {
1197 var item = queue.pop();
1198 var tree1 = item.tree1;
1199 var tree2 = item.tree2;
1200
1201 if (tree1[1].status || tree2[1].status) {
1202 tree1[1].status =
1203 (tree1[1].status === 'available' ||
1204 tree2[1].status === 'available') ? 'available' : 'missing';
1205 }
1206
1207 for (var i = 0; i < tree2[2].length; i++) {
1208 if (!tree1[2][0]) {
1209 conflicts = 'new_leaf';
1210 tree1[2][0] = tree2[2][i];
1211 continue;
1212 }
1213
1214 var merged = false;
1215 for (var j = 0; j < tree1[2].length; j++) {
1216 if (tree1[2][j][0] === tree2[2][i][0]) {
1217 queue.push({tree1: tree1[2][j], tree2: tree2[2][i]});
1218 merged = true;
1219 }
1220 }
1221 if (!merged) {
1222 conflicts = 'new_branch';
1223 insertSorted(tree1[2], tree2[2][i], compareTree);
1224 }
1225 }
1226 }
1227 return {conflicts, tree: in_tree1};
1228}
1229
1230function doMerge(tree, path, dontExpand) {
1231 var restree = [];
1232 var conflicts = false;
1233 var merged = false;
1234 var res;
1235
1236 if (!tree.length) {
1237 return {tree: [path], conflicts: 'new_leaf'};
1238 }
1239
1240 for (var i = 0, len = tree.length; i < len; i++) {
1241 var branch = tree[i];
1242 if (branch.pos === path.pos && branch.ids[0] === path.ids[0]) {
1243 // Paths start at the same position and have the same root, so they need
1244 // merged
1245 res = mergeTree(branch.ids, path.ids);
1246 restree.push({pos: branch.pos, ids: res.tree});
1247 conflicts = conflicts || res.conflicts;
1248 merged = true;
1249 } else if (dontExpand !== true) {
1250 // The paths start at a different position, take the earliest path and
1251 // traverse up until it as at the same point from root as the path we
1252 // want to merge. If the keys match we return the longer path with the
1253 // other merged After stemming we don't want to expand the trees
1254
1255 var t1 = branch.pos < path.pos ? branch : path;
1256 var t2 = branch.pos < path.pos ? path : branch;
1257 var diff = t2.pos - t1.pos;
1258
1259 var candidateParents = [];
1260
1261 var trees = [];
1262 trees.push({ids: t1.ids, diff, parent: null, parentIdx: null});
1263 while (trees.length > 0) {
1264 var item = trees.pop();
1265 if (item.diff === 0) {
1266 if (item.ids[0] === t2.ids[0]) {
1267 candidateParents.push(item);
1268 }
1269 continue;
1270 }
1271 var elements = item.ids[2];
1272 for (var j = 0, elementsLen = elements.length; j < elementsLen; j++) {
1273 trees.push({
1274 ids: elements[j],
1275 diff: item.diff - 1,
1276 parent: item.ids,
1277 parentIdx: j
1278 });
1279 }
1280 }
1281
1282 var el = candidateParents[0];
1283
1284 if (!el) {
1285 restree.push(branch);
1286 } else {
1287 res = mergeTree(el.ids, t2.ids);
1288 el.parent[2][el.parentIdx] = res.tree;
1289 restree.push({pos: t1.pos, ids: t1.ids});
1290 conflicts = conflicts || res.conflicts;
1291 merged = true;
1292 }
1293 } else {
1294 restree.push(branch);
1295 }
1296 }
1297
1298 // We didnt find
1299 if (!merged) {
1300 restree.push(path);
1301 }
1302
1303 restree.sort(sortByPos$1);
1304
1305 return {
1306 tree: restree,
1307 conflicts: conflicts || 'internal_node'
1308 };
1309}
1310
1311// To ensure we don't grow the revision tree infinitely, we stem old revisions
1312function stem(tree, depth) {
1313 // First we break out the tree into a complete list of root to leaf paths
1314 var paths = rootToLeaf(tree);
1315 var stemmedRevs;
1316
1317 var result;
1318 for (var i = 0, len = paths.length; i < len; i++) {
1319 // Then for each path, we cut off the start of the path based on the
1320 // `depth` to stem to, and generate a new set of flat trees
1321 var path = paths[i];
1322 var stemmed = path.ids;
1323 var node;
1324 if (stemmed.length > depth) {
1325 // only do the stemming work if we actually need to stem
1326 if (!stemmedRevs) {
1327 stemmedRevs = {}; // avoid allocating this object unnecessarily
1328 }
1329 var numStemmed = stemmed.length - depth;
1330 node = {
1331 pos: path.pos + numStemmed,
1332 ids: pathToTree(stemmed, numStemmed)
1333 };
1334
1335 for (var s = 0; s < numStemmed; s++) {
1336 var rev = (path.pos + s) + '-' + stemmed[s].id;
1337 stemmedRevs[rev] = true;
1338 }
1339 } else { // no need to actually stem
1340 node = {
1341 pos: path.pos,
1342 ids: pathToTree(stemmed, 0)
1343 };
1344 }
1345
1346 // Then we remerge all those flat trees together, ensuring that we don't
1347 // connect trees that would go beyond the depth limit
1348 if (result) {
1349 result = doMerge(result, node, true).tree;
1350 } else {
1351 result = [node];
1352 }
1353 }
1354
1355 // this is memory-heavy per Chrome profiler, avoid unless we actually stemmed
1356 if (stemmedRevs) {
1357 traverseRevTree(result, function (isLeaf, pos, revHash) {
1358 // some revisions may have been removed in a branch but not in another
1359 delete stemmedRevs[pos + '-' + revHash];
1360 });
1361 }
1362
1363 return {
1364 tree: result,
1365 revs: stemmedRevs ? Object.keys(stemmedRevs) : []
1366 };
1367}
1368
1369function merge(tree, path, depth) {
1370 var newTree = doMerge(tree, path);
1371 var stemmed = stem(newTree.tree, depth);
1372 return {
1373 tree: stemmed.tree,
1374 stemmedRevs: stemmed.revs,
1375 conflicts: newTree.conflicts
1376 };
1377}
1378
1379// return true if a rev exists in the rev tree, false otherwise
1380function revExists(revs, rev) {
1381 var toVisit = revs.slice();
1382 var splitRev = rev.split('-');
1383 var targetPos = parseInt(splitRev[0], 10);
1384 var targetId = splitRev[1];
1385
1386 var node;
1387 while ((node = toVisit.pop())) {
1388 if (node.pos === targetPos && node.ids[0] === targetId) {
1389 return true;
1390 }
1391 var branches = node.ids[2];
1392 for (var i = 0, len = branches.length; i < len; i++) {
1393 toVisit.push({pos: node.pos + 1, ids: branches[i]});
1394 }
1395 }
1396 return false;
1397}
1398
1399function getTrees(node) {
1400 return node.ids;
1401}
1402
1403// check if a specific revision of a doc has been deleted
1404// - metadata: the metadata object from the doc store
1405// - rev: (optional) the revision to check. defaults to winning revision
1406function isDeleted(metadata, rev) {
1407 if (!rev) {
1408 rev = winningRev(metadata);
1409 }
1410 var id = rev.substring(rev.indexOf('-') + 1);
1411 var toVisit = metadata.rev_tree.map(getTrees);
1412
1413 var tree;
1414 while ((tree = toVisit.pop())) {
1415 if (tree[0] === id) {
1416 return !!tree[1].deleted;
1417 }
1418 toVisit = toVisit.concat(tree[2]);
1419 }
1420}
1421
1422function isLocalId(id) {
1423 return typeof id === 'string' && id.startsWith('_local/');
1424}
1425
1426// returns the current leaf node for a given revision
1427function latest(rev, metadata) {
1428 var toVisit = metadata.rev_tree.slice();
1429 var node;
1430 while ((node = toVisit.pop())) {
1431 var pos = node.pos;
1432 var tree = node.ids;
1433 var id = tree[0];
1434 var opts = tree[1];
1435 var branches = tree[2];
1436 var isLeaf = branches.length === 0;
1437
1438 var history = node.history ? node.history.slice() : [];
1439 history.push({id, pos, opts});
1440
1441 if (isLeaf) {
1442 for (var i = 0, len = history.length; i < len; i++) {
1443 var historyNode = history[i];
1444 var historyRev = historyNode.pos + '-' + historyNode.id;
1445
1446 if (historyRev === rev) {
1447 // return the rev of this leaf
1448 return pos + '-' + id;
1449 }
1450 }
1451 }
1452
1453 for (var j = 0, l = branches.length; j < l; j++) {
1454 toVisit.push({pos: pos + 1, ids: branches[j], history});
1455 }
1456 }
1457
1458 /* istanbul ignore next */
1459 throw new Error('Unable to resolve latest revision for id ' + metadata.id + ', rev ' + rev);
1460}
1461
1462function tryCatchInChangeListener(self, change, pending, lastSeq) {
1463 // isolate try/catches to avoid V8 deoptimizations
1464 try {
1465 self.emit('change', change, pending, lastSeq);
1466 } catch (e) {
1467 guardedConsole('error', 'Error in .on("change", function):', e);
1468 }
1469}
1470
1471function processChange(doc, metadata, opts) {
1472 var changeList = [{rev: doc._rev}];
1473 if (opts.style === 'all_docs') {
1474 changeList = collectLeaves(metadata.rev_tree)
1475 .map(function (x) { return {rev: x.rev}; });
1476 }
1477 var change = {
1478 id: metadata.id,
1479 changes: changeList,
1480 doc
1481 };
1482
1483 if (isDeleted(metadata, doc._rev)) {
1484 change.deleted = true;
1485 }
1486 if (opts.conflicts) {
1487 change.doc._conflicts = collectConflicts(metadata);
1488 if (!change.doc._conflicts.length) {
1489 delete change.doc._conflicts;
1490 }
1491 }
1492 return change;
1493}
1494
1495class Changes$1 extends EE {
1496 constructor(db, opts, callback) {
1497 super();
1498 this.db = db;
1499 opts = opts ? clone(opts) : {};
1500 var complete = opts.complete = once((err, resp) => {
1501 if (err) {
1502 if (listenerCount(this, 'error') > 0) {
1503 this.emit('error', err);
1504 }
1505 } else {
1506 this.emit('complete', resp);
1507 }
1508 this.removeAllListeners();
1509 db.removeListener('destroyed', onDestroy);
1510 });
1511 if (callback) {
1512 this.on('complete', function (resp) {
1513 callback(null, resp);
1514 });
1515 this.on('error', callback);
1516 }
1517 const onDestroy = () => {
1518 this.cancel();
1519 };
1520 db.once('destroyed', onDestroy);
1521
1522 opts.onChange = (change, pending, lastSeq) => {
1523 /* istanbul ignore if */
1524 if (this.isCancelled) {
1525 return;
1526 }
1527 tryCatchInChangeListener(this, change, pending, lastSeq);
1528 };
1529
1530 var promise = new Promise(function (fulfill, reject) {
1531 opts.complete = function (err, res) {
1532 if (err) {
1533 reject(err);
1534 } else {
1535 fulfill(res);
1536 }
1537 };
1538 });
1539 this.once('cancel', function () {
1540 db.removeListener('destroyed', onDestroy);
1541 opts.complete(null, {status: 'cancelled'});
1542 });
1543 this.then = promise.then.bind(promise);
1544 this['catch'] = promise['catch'].bind(promise);
1545 this.then(function (result) {
1546 complete(null, result);
1547 }, complete);
1548
1549
1550
1551 if (!db.taskqueue.isReady) {
1552 db.taskqueue.addTask((failed) => {
1553 if (failed) {
1554 opts.complete(failed);
1555 } else if (this.isCancelled) {
1556 this.emit('cancel');
1557 } else {
1558 this.validateChanges(opts);
1559 }
1560 });
1561 } else {
1562 this.validateChanges(opts);
1563 }
1564 }
1565
1566 cancel() {
1567 this.isCancelled = true;
1568 if (this.db.taskqueue.isReady) {
1569 this.emit('cancel');
1570 }
1571 }
1572
1573 validateChanges(opts) {
1574 var callback = opts.complete;
1575
1576 /* istanbul ignore else */
1577 if (PouchDB._changesFilterPlugin) {
1578 PouchDB._changesFilterPlugin.validate(opts, (err) => {
1579 if (err) {
1580 return callback(err);
1581 }
1582 this.doChanges(opts);
1583 });
1584 } else {
1585 this.doChanges(opts);
1586 }
1587 }
1588
1589 doChanges(opts) {
1590 var callback = opts.complete;
1591
1592 opts = clone(opts);
1593 if ('live' in opts && !('continuous' in opts)) {
1594 opts.continuous = opts.live;
1595 }
1596 opts.processChange = processChange;
1597
1598 if (opts.since === 'latest') {
1599 opts.since = 'now';
1600 }
1601 if (!opts.since) {
1602 opts.since = 0;
1603 }
1604 if (opts.since === 'now') {
1605 this.db.info().then((info) => {
1606 /* istanbul ignore if */
1607 if (this.isCancelled) {
1608 callback(null, {status: 'cancelled'});
1609 return;
1610 }
1611 opts.since = info.update_seq;
1612 this.doChanges(opts);
1613 }, callback);
1614 return;
1615 }
1616
1617 /* istanbul ignore else */
1618 if (PouchDB._changesFilterPlugin) {
1619 PouchDB._changesFilterPlugin.normalize(opts);
1620 if (PouchDB._changesFilterPlugin.shouldFilter(this, opts)) {
1621 return PouchDB._changesFilterPlugin.filter(this, opts);
1622 }
1623 } else {
1624 ['doc_ids', 'filter', 'selector', 'view'].forEach(function (key) {
1625 if (key in opts) {
1626 guardedConsole('warn',
1627 'The "' + key + '" option was passed in to changes/replicate, ' +
1628 'but pouchdb-changes-filter plugin is not installed, so it ' +
1629 'was ignored. Please install the plugin to enable filtering.'
1630 );
1631 }
1632 });
1633 }
1634
1635 if (!('descending' in opts)) {
1636 opts.descending = false;
1637 }
1638
1639 // 0 and 1 should return 1 document
1640 opts.limit = opts.limit === 0 ? 1 : opts.limit;
1641 opts.complete = callback;
1642 var newPromise = this.db._changes(opts);
1643 /* istanbul ignore else */
1644 if (newPromise && typeof newPromise.cancel === 'function') {
1645 const cancel = this.cancel;
1646 this.cancel = (...args) => {
1647 newPromise.cancel();
1648 cancel.apply(this, args);
1649 };
1650 }
1651 }
1652}
1653
1654/*
1655 * A generic pouch adapter
1656 */
1657
1658// Wrapper for functions that call the bulkdocs api with a single doc,
1659// if the first result is an error, return an error
1660function yankError(callback, docId) {
1661 return function (err, results) {
1662 if (err || (results[0] && results[0].error)) {
1663 err = err || results[0];
1664 err.docId = docId;
1665 callback(err);
1666 } else {
1667 callback(null, results.length ? results[0] : results);
1668 }
1669 };
1670}
1671
1672// clean docs given to us by the user
1673function cleanDocs(docs) {
1674 for (var i = 0; i < docs.length; i++) {
1675 var doc = docs[i];
1676 if (doc._deleted) {
1677 delete doc._attachments; // ignore atts for deleted docs
1678 } else if (doc._attachments) {
1679 // filter out extraneous keys from _attachments
1680 var atts = Object.keys(doc._attachments);
1681 for (var j = 0; j < atts.length; j++) {
1682 var att = atts[j];
1683 doc._attachments[att] = pick(doc._attachments[att],
1684 ['data', 'digest', 'content_type', 'length', 'revpos', 'stub']);
1685 }
1686 }
1687 }
1688}
1689
1690// compare two docs, first by _id then by _rev
1691function compareByIdThenRev(a, b) {
1692 if (a._id === b._id) {
1693 const aStart = a._revisions ? a._revisions.start : 0;
1694 const bStart = b._revisions ? b._revisions.start : 0;
1695 return aStart - bStart;
1696 }
1697 return a._id < b._id ? -1 : 1;
1698}
1699
1700// for every node in a revision tree computes its distance from the closest
1701// leaf
1702function computeHeight(revs) {
1703 var height = {};
1704 var edges = [];
1705 traverseRevTree(revs, function (isLeaf, pos, id, prnt) {
1706 var rev$$1 = pos + "-" + id;
1707 if (isLeaf) {
1708 height[rev$$1] = 0;
1709 }
1710 if (prnt !== undefined) {
1711 edges.push({from: prnt, to: rev$$1});
1712 }
1713 return rev$$1;
1714 });
1715
1716 edges.reverse();
1717 edges.forEach(function (edge) {
1718 if (height[edge.from] === undefined) {
1719 height[edge.from] = 1 + height[edge.to];
1720 } else {
1721 height[edge.from] = Math.min(height[edge.from], 1 + height[edge.to]);
1722 }
1723 });
1724 return height;
1725}
1726
1727function allDocsKeysParse(opts) {
1728 var keys = ('limit' in opts) ?
1729 opts.keys.slice(opts.skip, opts.limit + opts.skip) :
1730 (opts.skip > 0) ? opts.keys.slice(opts.skip) : opts.keys;
1731 opts.keys = keys;
1732 opts.skip = 0;
1733 delete opts.limit;
1734 if (opts.descending) {
1735 keys.reverse();
1736 opts.descending = false;
1737 }
1738}
1739
1740// all compaction is done in a queue, to avoid attaching
1741// too many listeners at once
1742function doNextCompaction(self) {
1743 var task = self._compactionQueue[0];
1744 var opts = task.opts;
1745 var callback = task.callback;
1746 self.get('_local/compaction').catch(function () {
1747 return false;
1748 }).then(function (doc) {
1749 if (doc && doc.last_seq) {
1750 opts.last_seq = doc.last_seq;
1751 }
1752 self._compact(opts, function (err, res) {
1753 /* istanbul ignore if */
1754 if (err) {
1755 callback(err);
1756 } else {
1757 callback(null, res);
1758 }
1759 nextTick(function () {
1760 self._compactionQueue.shift();
1761 if (self._compactionQueue.length) {
1762 doNextCompaction(self);
1763 }
1764 });
1765 });
1766 });
1767}
1768
1769function appendPurgeSeq(db, docId, rev$$1) {
1770 return db.get('_local/purges').then(function (doc) {
1771 const purgeSeq = doc.purgeSeq + 1;
1772 doc.purges.push({
1773 docId,
1774 rev: rev$$1,
1775 purgeSeq,
1776 });
1777 if (doc.purges.length > self.purged_infos_limit) {
1778 doc.purges.splice(0, doc.purges.length - self.purged_infos_limit);
1779 }
1780 doc.purgeSeq = purgeSeq;
1781 return doc;
1782 }).catch(function (err) {
1783 if (err.status !== 404) {
1784 throw err;
1785 }
1786 return {
1787 _id: '_local/purges',
1788 purges: [{
1789 docId,
1790 rev: rev$$1,
1791 purgeSeq: 0,
1792 }],
1793 purgeSeq: 0,
1794 };
1795 }).then(function (doc) {
1796 return db.put(doc);
1797 });
1798}
1799
1800function attachmentNameError(name) {
1801 if (name.charAt(0) === '_') {
1802 return name + ' is not a valid attachment name, attachment ' +
1803 'names cannot start with \'_\'';
1804 }
1805 return false;
1806}
1807
1808function isNotSingleDoc(doc) {
1809 return doc === null || typeof doc !== 'object' || Array.isArray(doc);
1810}
1811
1812const validRevRegex = /^\d+-[^-]*$/;
1813function isValidRev(rev$$1) {
1814 return typeof rev$$1 === 'string' && validRevRegex.test(rev$$1);
1815}
1816
1817class AbstractPouchDB extends EE {
1818 _setup() {
1819 this.post = adapterFun('post', function (doc, opts, callback) {
1820 if (typeof opts === 'function') {
1821 callback = opts;
1822 opts = {};
1823 }
1824 if (isNotSingleDoc(doc)) {
1825 return callback(createError(NOT_AN_OBJECT));
1826 }
1827 this.bulkDocs({docs: [doc]}, opts, yankError(callback, doc._id));
1828 }).bind(this);
1829
1830 this.put = adapterFun('put', function (doc, opts, cb) {
1831 if (typeof opts === 'function') {
1832 cb = opts;
1833 opts = {};
1834 }
1835 if (isNotSingleDoc(doc)) {
1836 return cb(createError(NOT_AN_OBJECT));
1837 }
1838 invalidIdError(doc._id);
1839 if ('_rev' in doc && !isValidRev(doc._rev)) {
1840 return cb(createError(INVALID_REV));
1841 }
1842 if (isLocalId(doc._id) && typeof this._putLocal === 'function') {
1843 if (doc._deleted) {
1844 return this._removeLocal(doc, cb);
1845 } else {
1846 return this._putLocal(doc, cb);
1847 }
1848 }
1849
1850 const putDoc = (next) => {
1851 if (typeof this._put === 'function' && opts.new_edits !== false) {
1852 this._put(doc, opts, next);
1853 } else {
1854 this.bulkDocs({docs: [doc]}, opts, yankError(next, doc._id));
1855 }
1856 };
1857
1858 if (opts.force && doc._rev) {
1859 transformForceOptionToNewEditsOption();
1860 putDoc(function (err) {
1861 var result = err ? null : {ok: true, id: doc._id, rev: doc._rev};
1862 cb(err, result);
1863 });
1864 } else {
1865 putDoc(cb);
1866 }
1867
1868 function transformForceOptionToNewEditsOption() {
1869 var parts = doc._rev.split('-');
1870 var oldRevId = parts[1];
1871 var oldRevNum = parseInt(parts[0], 10);
1872
1873 var newRevNum = oldRevNum + 1;
1874 var newRevId = rev();
1875
1876 doc._revisions = {
1877 start: newRevNum,
1878 ids: [newRevId, oldRevId]
1879 };
1880 doc._rev = newRevNum + '-' + newRevId;
1881 opts.new_edits = false;
1882 }
1883 }).bind(this);
1884
1885 this.putAttachment = adapterFun('putAttachment', function (docId, attachmentId, rev$$1, blob, type) {
1886 var api = this;
1887 if (typeof type === 'function') {
1888 type = blob;
1889 blob = rev$$1;
1890 rev$$1 = null;
1891 }
1892 // Lets fix in https://github.com/pouchdb/pouchdb/issues/3267
1893 /* istanbul ignore if */
1894 if (typeof type === 'undefined') {
1895 type = blob;
1896 blob = rev$$1;
1897 rev$$1 = null;
1898 }
1899 if (!type) {
1900 guardedConsole('warn', 'Attachment', attachmentId, 'on document', docId, 'is missing content_type');
1901 }
1902
1903 function createAttachment(doc) {
1904 var prevrevpos = '_rev' in doc ? parseInt(doc._rev, 10) : 0;
1905 doc._attachments = doc._attachments || {};
1906 doc._attachments[attachmentId] = {
1907 content_type: type,
1908 data: blob,
1909 revpos: ++prevrevpos
1910 };
1911 return api.put(doc);
1912 }
1913
1914 return api.get(docId).then(function (doc) {
1915 if (doc._rev !== rev$$1) {
1916 throw createError(REV_CONFLICT);
1917 }
1918
1919 return createAttachment(doc);
1920 }, function (err) {
1921 // create new doc
1922 /* istanbul ignore else */
1923 if (err.reason === MISSING_DOC.message) {
1924 return createAttachment({_id: docId});
1925 } else {
1926 throw err;
1927 }
1928 });
1929 }).bind(this);
1930
1931 this.removeAttachment = adapterFun('removeAttachment', function (docId, attachmentId, rev$$1, callback) {
1932 this.get(docId, (err, obj) => {
1933 /* istanbul ignore if */
1934 if (err) {
1935 callback(err);
1936 return;
1937 }
1938 if (obj._rev !== rev$$1) {
1939 callback(createError(REV_CONFLICT));
1940 return;
1941 }
1942 /* istanbul ignore if */
1943 if (!obj._attachments) {
1944 return callback();
1945 }
1946 delete obj._attachments[attachmentId];
1947 if (Object.keys(obj._attachments).length === 0) {
1948 delete obj._attachments;
1949 }
1950 this.put(obj, callback);
1951 });
1952 }).bind(this);
1953
1954 this.remove = adapterFun('remove', function (docOrId, optsOrRev, opts, callback) {
1955 var doc;
1956 if (typeof optsOrRev === 'string') {
1957 // id, rev, opts, callback style
1958 doc = {
1959 _id: docOrId,
1960 _rev: optsOrRev
1961 };
1962 if (typeof opts === 'function') {
1963 callback = opts;
1964 opts = {};
1965 }
1966 } else {
1967 // doc, opts, callback style
1968 doc = docOrId;
1969 if (typeof optsOrRev === 'function') {
1970 callback = optsOrRev;
1971 opts = {};
1972 } else {
1973 callback = opts;
1974 opts = optsOrRev;
1975 }
1976 }
1977 opts = opts || {};
1978 opts.was_delete = true;
1979 var newDoc = {_id: doc._id, _rev: (doc._rev || opts.rev)};
1980 newDoc._deleted = true;
1981 if (isLocalId(newDoc._id) && typeof this._removeLocal === 'function') {
1982 return this._removeLocal(doc, callback);
1983 }
1984 this.bulkDocs({docs: [newDoc]}, opts, yankError(callback, newDoc._id));
1985 }).bind(this);
1986
1987 this.revsDiff = adapterFun('revsDiff', function (req, opts, callback) {
1988 if (typeof opts === 'function') {
1989 callback = opts;
1990 opts = {};
1991 }
1992 var ids = Object.keys(req);
1993
1994 if (!ids.length) {
1995 return callback(null, {});
1996 }
1997
1998 var count = 0;
1999 var missing = new Map();
2000
2001 function addToMissing(id, revId) {
2002 if (!missing.has(id)) {
2003 missing.set(id, {missing: []});
2004 }
2005 missing.get(id).missing.push(revId);
2006 }
2007
2008 function processDoc(id, rev_tree) {
2009 // Is this fast enough? Maybe we should switch to a set simulated by a map
2010 var missingForId = req[id].slice(0);
2011 traverseRevTree(rev_tree, function (isLeaf, pos, revHash, ctx,
2012 opts) {
2013 var rev$$1 = pos + '-' + revHash;
2014 var idx = missingForId.indexOf(rev$$1);
2015 if (idx === -1) {
2016 return;
2017 }
2018
2019 missingForId.splice(idx, 1);
2020 /* istanbul ignore if */
2021 if (opts.status !== 'available') {
2022 addToMissing(id, rev$$1);
2023 }
2024 });
2025
2026 // Traversing the tree is synchronous, so now `missingForId` contains
2027 // revisions that were not found in the tree
2028 missingForId.forEach(function (rev$$1) {
2029 addToMissing(id, rev$$1);
2030 });
2031 }
2032
2033 ids.forEach(function (id) {
2034 this._getRevisionTree(id, function (err, rev_tree) {
2035 if (err && err.status === 404 && err.message === 'missing') {
2036 missing.set(id, {missing: req[id]});
2037 } else if (err) {
2038 /* istanbul ignore next */
2039 return callback(err);
2040 } else {
2041 processDoc(id, rev_tree);
2042 }
2043
2044 if (++count === ids.length) {
2045 // convert LazyMap to object
2046 var missingObj = {};
2047 missing.forEach(function (value, key) {
2048 missingObj[key] = value;
2049 });
2050 return callback(null, missingObj);
2051 }
2052 });
2053 }, this);
2054 }).bind(this);
2055
2056 // _bulk_get API for faster replication, as described in
2057 // https://github.com/apache/couchdb-chttpd/pull/33
2058 // At the "abstract" level, it will just run multiple get()s in
2059 // parallel, because this isn't much of a performance cost
2060 // for local databases (except the cost of multiple transactions, which is
2061 // small). The http adapter overrides this in order
2062 // to do a more efficient single HTTP request.
2063 this.bulkGet = adapterFun('bulkGet', function (opts, callback) {
2064 bulkGet(this, opts, callback);
2065 }).bind(this);
2066
2067 // compact one document and fire callback
2068 // by compacting we mean removing all revisions which
2069 // are further from the leaf in revision tree than max_height
2070 this.compactDocument = adapterFun('compactDocument', function (docId, maxHeight, callback) {
2071 this._getRevisionTree(docId, (err, revTree) => {
2072 /* istanbul ignore if */
2073 if (err) {
2074 return callback(err);
2075 }
2076 var height = computeHeight(revTree);
2077 var candidates = [];
2078 var revs = [];
2079 Object.keys(height).forEach(function (rev$$1) {
2080 if (height[rev$$1] > maxHeight) {
2081 candidates.push(rev$$1);
2082 }
2083 });
2084
2085 traverseRevTree(revTree, function (isLeaf, pos, revHash, ctx, opts) {
2086 var rev$$1 = pos + '-' + revHash;
2087 if (opts.status === 'available' && candidates.indexOf(rev$$1) !== -1) {
2088 revs.push(rev$$1);
2089 }
2090 });
2091 this._doCompaction(docId, revs, callback);
2092 });
2093 }).bind(this);
2094
2095 // compact the whole database using single document
2096 // compaction
2097 this.compact = adapterFun('compact', function (opts, callback) {
2098 if (typeof opts === 'function') {
2099 callback = opts;
2100 opts = {};
2101 }
2102
2103 opts = opts || {};
2104
2105 this._compactionQueue = this._compactionQueue || [];
2106 this._compactionQueue.push({opts, callback});
2107 if (this._compactionQueue.length === 1) {
2108 doNextCompaction(this);
2109 }
2110 }).bind(this);
2111
2112 /* Begin api wrappers. Specific functionality to storage belongs in the _[method] */
2113 this.get = adapterFun('get', function (id, opts, cb) {
2114 if (typeof opts === 'function') {
2115 cb = opts;
2116 opts = {};
2117 }
2118 opts = opts || {};
2119 if (typeof id !== 'string') {
2120 return cb(createError(INVALID_ID));
2121 }
2122 if (isLocalId(id) && typeof this._getLocal === 'function') {
2123 return this._getLocal(id, cb);
2124 }
2125 var leaves = [];
2126
2127 const finishOpenRevs = () => {
2128 var result = [];
2129 var count = leaves.length;
2130 /* istanbul ignore if */
2131 if (!count) {
2132 return cb(null, result);
2133 }
2134
2135 // order with open_revs is unspecified
2136 leaves.forEach((leaf) => {
2137 this.get(id, {
2138 rev: leaf,
2139 revs: opts.revs,
2140 latest: opts.latest,
2141 attachments: opts.attachments,
2142 binary: opts.binary
2143 }, function (err, doc) {
2144 if (!err) {
2145 // using latest=true can produce duplicates
2146 var existing;
2147 for (var i = 0, l = result.length; i < l; i++) {
2148 if (result[i].ok && result[i].ok._rev === doc._rev) {
2149 existing = true;
2150 break;
2151 }
2152 }
2153 if (!existing) {
2154 result.push({ok: doc});
2155 }
2156 } else {
2157 result.push({missing: leaf});
2158 }
2159 count--;
2160 if (!count) {
2161 cb(null, result);
2162 }
2163 });
2164 });
2165 };
2166
2167 if (opts.open_revs) {
2168 if (opts.open_revs === "all") {
2169 this._getRevisionTree(id, function (err, rev_tree) {
2170 /* istanbul ignore if */
2171 if (err) {
2172 return cb(err);
2173 }
2174 leaves = collectLeaves(rev_tree).map(function (leaf) {
2175 return leaf.rev;
2176 });
2177 finishOpenRevs();
2178 });
2179 } else {
2180 if (Array.isArray(opts.open_revs)) {
2181 leaves = opts.open_revs;
2182 for (var i = 0; i < leaves.length; i++) {
2183 var l = leaves[i];
2184 // looks like it's the only thing couchdb checks
2185 if (!isValidRev(l)) {
2186 return cb(createError(INVALID_REV));
2187 }
2188 }
2189 finishOpenRevs();
2190 } else {
2191 return cb(createError(UNKNOWN_ERROR, 'function_clause'));
2192 }
2193 }
2194 return; // open_revs does not like other options
2195 }
2196
2197 return this._get(id, opts, (err, result) => {
2198 if (err) {
2199 err.docId = id;
2200 return cb(err);
2201 }
2202
2203 var doc = result.doc;
2204 var metadata = result.metadata;
2205 var ctx = result.ctx;
2206
2207 if (opts.conflicts) {
2208 var conflicts = collectConflicts(metadata);
2209 if (conflicts.length) {
2210 doc._conflicts = conflicts;
2211 }
2212 }
2213
2214 if (isDeleted(metadata, doc._rev)) {
2215 doc._deleted = true;
2216 }
2217
2218 if (opts.revs || opts.revs_info) {
2219 var splittedRev = doc._rev.split('-');
2220 var revNo = parseInt(splittedRev[0], 10);
2221 var revHash = splittedRev[1];
2222
2223 var paths = rootToLeaf(metadata.rev_tree);
2224 var path = null;
2225
2226 for (var i = 0; i < paths.length; i++) {
2227 var currentPath = paths[i];
2228 const hashIndex = currentPath.ids.findIndex(x => x.id === revHash);
2229 var hashFoundAtRevPos = hashIndex === (revNo - 1);
2230
2231 if (hashFoundAtRevPos || (!path && hashIndex !== -1)) {
2232 path = currentPath;
2233 }
2234 }
2235
2236 /* istanbul ignore if */
2237 if (!path) {
2238 err = new Error('invalid rev tree');
2239 err.docId = id;
2240 return cb(err);
2241 }
2242
2243 const pathId = doc._rev.split('-')[1];
2244 const indexOfRev = path.ids.findIndex(x => x.id === pathId) + 1;
2245 var howMany = path.ids.length - indexOfRev;
2246 path.ids.splice(indexOfRev, howMany);
2247 path.ids.reverse();
2248
2249 if (opts.revs) {
2250 doc._revisions = {
2251 start: (path.pos + path.ids.length) - 1,
2252 ids: path.ids.map(function (rev$$1) {
2253 return rev$$1.id;
2254 })
2255 };
2256 }
2257 if (opts.revs_info) {
2258 var pos = path.pos + path.ids.length;
2259 doc._revs_info = path.ids.map(function (rev$$1) {
2260 pos--;
2261 return {
2262 rev: pos + '-' + rev$$1.id,
2263 status: rev$$1.opts.status
2264 };
2265 });
2266 }
2267 }
2268
2269 if (opts.attachments && doc._attachments) {
2270 var attachments = doc._attachments;
2271 var count = Object.keys(attachments).length;
2272 if (count === 0) {
2273 return cb(null, doc);
2274 }
2275 Object.keys(attachments).forEach((key) => {
2276 this._getAttachment(doc._id, key, attachments[key], {
2277 binary: opts.binary,
2278 metadata,
2279 ctx
2280 }, function (err, data) {
2281 var att = doc._attachments[key];
2282 att.data = data;
2283 delete att.stub;
2284 delete att.length;
2285 if (!--count) {
2286 cb(null, doc);
2287 }
2288 });
2289 });
2290 } else {
2291 if (doc._attachments) {
2292 for (var key in doc._attachments) {
2293 /* istanbul ignore else */
2294 if (Object.prototype.hasOwnProperty.call(doc._attachments, key)) {
2295 doc._attachments[key].stub = true;
2296 }
2297 }
2298 }
2299 cb(null, doc);
2300 }
2301 });
2302 }).bind(this);
2303
2304 // TODO: I don't like this, it forces an extra read for every
2305 // attachment read and enforces a confusing api between
2306 // adapter.js and the adapter implementation
2307 this.getAttachment = adapterFun('getAttachment', function (docId, attachmentId, opts, callback) {
2308 if (opts instanceof Function) {
2309 callback = opts;
2310 opts = {};
2311 }
2312 this._get(docId, opts, (err, res) => {
2313 if (err) {
2314 return callback(err);
2315 }
2316 if (res.doc._attachments && res.doc._attachments[attachmentId]) {
2317 opts.ctx = res.ctx;
2318 opts.binary = true;
2319 opts.metadata = res.metadata;
2320 this._getAttachment(docId, attachmentId,
2321 res.doc._attachments[attachmentId], opts, callback);
2322 } else {
2323 return callback(createError(MISSING_DOC));
2324 }
2325 });
2326 }).bind(this);
2327
2328 this.allDocs = adapterFun('allDocs', function (opts, callback) {
2329 if (typeof opts === 'function') {
2330 callback = opts;
2331 opts = {};
2332 }
2333 opts.skip = typeof opts.skip !== 'undefined' ? opts.skip : 0;
2334 if (opts.start_key) {
2335 opts.startkey = opts.start_key;
2336 }
2337 if (opts.end_key) {
2338 opts.endkey = opts.end_key;
2339 }
2340 if ('keys' in opts) {
2341 if (!Array.isArray(opts.keys)) {
2342 return callback(new TypeError('options.keys must be an array'));
2343 }
2344 var incompatibleOpt =
2345 ['startkey', 'endkey', 'key'].filter(function (incompatibleOpt) {
2346 return incompatibleOpt in opts;
2347 })[0];
2348 if (incompatibleOpt) {
2349 callback(createError(QUERY_PARSE_ERROR,
2350 'Query parameter `' + incompatibleOpt +
2351 '` is not compatible with multi-get'
2352 ));
2353 return;
2354 }
2355 if (!isRemote(this)) {
2356 allDocsKeysParse(opts);
2357 if (opts.keys.length === 0) {
2358 return this._allDocs({limit: 0}, callback);
2359 }
2360 }
2361 }
2362
2363 return this._allDocs(opts, callback);
2364 }).bind(this);
2365
2366 this.close = adapterFun('close', function (callback) {
2367 this._closed = true;
2368 this.emit('closed');
2369 return this._close(callback);
2370 }).bind(this);
2371
2372 this.info = adapterFun('info', function (callback) {
2373 this._info((err, info) => {
2374 if (err) {
2375 return callback(err);
2376 }
2377 // assume we know better than the adapter, unless it informs us
2378 info.db_name = info.db_name || this.name;
2379 info.auto_compaction = !!(this.auto_compaction && !isRemote(this));
2380 info.adapter = this.adapter;
2381 callback(null, info);
2382 });
2383 }).bind(this);
2384
2385 this.id = adapterFun('id', function (callback) {
2386 return this._id(callback);
2387 }).bind(this);
2388
2389 this.bulkDocs = adapterFun('bulkDocs', function (req, opts, callback) {
2390 if (typeof opts === 'function') {
2391 callback = opts;
2392 opts = {};
2393 }
2394
2395 opts = opts || {};
2396
2397 if (Array.isArray(req)) {
2398 req = {
2399 docs: req
2400 };
2401 }
2402
2403 if (!req || !req.docs || !Array.isArray(req.docs)) {
2404 return callback(createError(MISSING_BULK_DOCS));
2405 }
2406
2407 for (var i = 0; i < req.docs.length; ++i) {
2408 const doc = req.docs[i];
2409 if (isNotSingleDoc(doc)) {
2410 return callback(createError(NOT_AN_OBJECT));
2411 }
2412 if ('_rev' in doc && !isValidRev(doc._rev)) {
2413 return callback(createError(INVALID_REV));
2414 }
2415 }
2416
2417 var attachmentError;
2418 req.docs.forEach(function (doc) {
2419 if (doc._attachments) {
2420 Object.keys(doc._attachments).forEach(function (name) {
2421 attachmentError = attachmentError || attachmentNameError(name);
2422 if (!doc._attachments[name].content_type) {
2423 guardedConsole('warn', 'Attachment', name, 'on document', doc._id, 'is missing content_type');
2424 }
2425 });
2426 }
2427 });
2428
2429 if (attachmentError) {
2430 return callback(createError(BAD_REQUEST, attachmentError));
2431 }
2432
2433 if (!('new_edits' in opts)) {
2434 if ('new_edits' in req) {
2435 opts.new_edits = req.new_edits;
2436 } else {
2437 opts.new_edits = true;
2438 }
2439 }
2440
2441 var adapter = this;
2442 if (!opts.new_edits && !isRemote(adapter)) {
2443 // ensure revisions of the same doc are sorted, so that
2444 // the local adapter processes them correctly (#2935)
2445 req.docs.sort(compareByIdThenRev);
2446 }
2447
2448 cleanDocs(req.docs);
2449
2450 // in the case of conflicts, we want to return the _ids to the user
2451 // however, the underlying adapter may destroy the docs array, so
2452 // create a copy here
2453 var ids = req.docs.map(function (doc) {
2454 return doc._id;
2455 });
2456
2457 this._bulkDocs(req, opts, function (err, res) {
2458 if (err) {
2459 return callback(err);
2460 }
2461 if (!opts.new_edits) {
2462 // this is what couch does when new_edits is false
2463 res = res.filter(function (x) {
2464 return x.error;
2465 });
2466 }
2467 // add ids for error/conflict responses (not required for CouchDB)
2468 if (!isRemote(adapter)) {
2469 for (var i = 0, l = res.length; i < l; i++) {
2470 res[i].id = res[i].id || ids[i];
2471 }
2472 }
2473
2474 callback(null, res);
2475 });
2476 }).bind(this);
2477
2478 this.registerDependentDatabase = adapterFun('registerDependentDatabase', function (dependentDb, callback) {
2479 var dbOptions = clone(this.__opts);
2480 if (this.__opts.view_adapter) {
2481 dbOptions.adapter = this.__opts.view_adapter;
2482 }
2483
2484 var depDB = new this.constructor(dependentDb, dbOptions);
2485
2486 function diffFun(doc) {
2487 doc.dependentDbs = doc.dependentDbs || {};
2488 if (doc.dependentDbs[dependentDb]) {
2489 return false; // no update required
2490 }
2491 doc.dependentDbs[dependentDb] = true;
2492 return doc;
2493 }
2494 upsert(this, '_local/_pouch_dependentDbs', diffFun).then(function () {
2495 callback(null, {db: depDB});
2496 }).catch(callback);
2497 }).bind(this);
2498
2499 this.destroy = adapterFun('destroy', function (opts, callback) {
2500
2501 if (typeof opts === 'function') {
2502 callback = opts;
2503 opts = {};
2504 }
2505
2506 var usePrefix = 'use_prefix' in this ? this.use_prefix : true;
2507
2508 const destroyDb = () => {
2509 // call destroy method of the particular adaptor
2510 this._destroy(opts, (err, resp) => {
2511 if (err) {
2512 return callback(err);
2513 }
2514 this._destroyed = true;
2515 this.emit('destroyed');
2516 callback(null, resp || { 'ok': true });
2517 });
2518 };
2519
2520 if (isRemote(this)) {
2521 // no need to check for dependent DBs if it's a remote DB
2522 return destroyDb();
2523 }
2524
2525 this.get('_local/_pouch_dependentDbs', (err, localDoc) => {
2526 if (err) {
2527 /* istanbul ignore if */
2528 if (err.status !== 404) {
2529 return callback(err);
2530 } else { // no dependencies
2531 return destroyDb();
2532 }
2533 }
2534 var dependentDbs = localDoc.dependentDbs;
2535 var PouchDB = this.constructor;
2536 var deletedMap = Object.keys(dependentDbs).map((name) => {
2537 // use_prefix is only false in the browser
2538 /* istanbul ignore next */
2539 var trueName = usePrefix ?
2540 name.replace(new RegExp('^' + PouchDB.prefix), '') : name;
2541 return new PouchDB(trueName, this.__opts).destroy();
2542 });
2543 Promise.all(deletedMap).then(destroyDb, callback);
2544 });
2545 }).bind(this);
2546 }
2547
2548 _compact(opts, callback) {
2549 var changesOpts = {
2550 return_docs: false,
2551 last_seq: opts.last_seq || 0,
2552 since: opts.last_seq || 0
2553 };
2554 var promises = [];
2555
2556 var taskId;
2557 var compactedDocs = 0;
2558
2559 const onChange = (row) => {
2560 this.activeTasks.update(taskId, {
2561 completed_items: ++compactedDocs
2562 });
2563 promises.push(this.compactDocument(row.id, 0));
2564 };
2565 const onError = (err) => {
2566 this.activeTasks.remove(taskId, err);
2567 callback(err);
2568 };
2569 const onComplete = (resp) => {
2570 var lastSeq = resp.last_seq;
2571 Promise.all(promises).then(() => {
2572 return upsert(this, '_local/compaction', (doc) => {
2573 if (!doc.last_seq || doc.last_seq < lastSeq) {
2574 doc.last_seq = lastSeq;
2575 return doc;
2576 }
2577 return false; // somebody else got here first, don't update
2578 });
2579 }).then(() => {
2580 this.activeTasks.remove(taskId);
2581 callback(null, {ok: true});
2582 }).catch(onError);
2583 };
2584
2585 this.info().then((info) => {
2586 taskId = this.activeTasks.add({
2587 name: 'database_compaction',
2588 total_items: info.update_seq - changesOpts.last_seq,
2589 });
2590
2591 this.changes(changesOpts)
2592 .on('change', onChange)
2593 .on('complete', onComplete)
2594 .on('error', onError);
2595 });
2596 }
2597
2598 changes(opts, callback) {
2599 if (typeof opts === 'function') {
2600 callback = opts;
2601 opts = {};
2602 }
2603
2604 opts = opts || {};
2605
2606 // By default set return_docs to false if the caller has opts.live = true,
2607 // this will prevent us from collecting the set of changes indefinitely
2608 // resulting in growing memory
2609 opts.return_docs = ('return_docs' in opts) ? opts.return_docs : !opts.live;
2610
2611 return new Changes$1(this, opts, callback);
2612 }
2613
2614 type() {
2615 return (typeof this._type === 'function') ? this._type() : this.adapter;
2616 }
2617}
2618
2619// The abstract purge implementation expects a doc id and the rev of a leaf node in that doc.
2620// It will return errors if the rev doesn’t exist or isn’t a leaf.
2621AbstractPouchDB.prototype.purge = adapterFun('_purge', function (docId, rev$$1, callback) {
2622 if (typeof this._purge === 'undefined') {
2623 return callback(createError(UNKNOWN_ERROR, 'Purge is not implemented in the ' + this.adapter + ' adapter.'));
2624 }
2625 var self = this;
2626
2627 self._getRevisionTree(docId, (error, revs) => {
2628 if (error) {
2629 return callback(error);
2630 }
2631 if (!revs) {
2632 return callback(createError(MISSING_DOC));
2633 }
2634 let path;
2635 try {
2636 path = findPathToLeaf(revs, rev$$1);
2637 } catch (error) {
2638 return callback(error.message || error);
2639 }
2640 self._purge(docId, path, (error, result) => {
2641 if (error) {
2642 return callback(error);
2643 } else {
2644 appendPurgeSeq(self, docId, rev$$1).then(function () {
2645 return callback(null, result);
2646 });
2647 }
2648 });
2649 });
2650});
2651
2652class TaskQueue {
2653 constructor() {
2654 this.isReady = false;
2655 this.failed = false;
2656 this.queue = [];
2657 }
2658
2659 execute() {
2660 var fun;
2661 if (this.failed) {
2662 while ((fun = this.queue.shift())) {
2663 fun(this.failed);
2664 }
2665 } else {
2666 while ((fun = this.queue.shift())) {
2667 fun();
2668 }
2669 }
2670 }
2671
2672 fail(err) {
2673 this.failed = err;
2674 this.execute();
2675 }
2676
2677 ready(db) {
2678 this.isReady = true;
2679 this.db = db;
2680 this.execute();
2681 }
2682
2683 addTask(fun) {
2684 this.queue.push(fun);
2685 if (this.failed) {
2686 this.execute();
2687 }
2688 }
2689}
2690
2691function parseAdapter(name, opts) {
2692 var match = name.match(/([a-z-]*):\/\/(.*)/);
2693 if (match) {
2694 // the http adapter expects the fully qualified name
2695 return {
2696 name: /https?/.test(match[1]) ? match[1] + '://' + match[2] : match[2],
2697 adapter: match[1]
2698 };
2699 }
2700
2701 var adapters = PouchDB.adapters;
2702 var preferredAdapters = PouchDB.preferredAdapters;
2703 var prefix = PouchDB.prefix;
2704 var adapterName = opts.adapter;
2705
2706 if (!adapterName) { // automatically determine adapter
2707 for (var i = 0; i < preferredAdapters.length; ++i) {
2708 adapterName = preferredAdapters[i];
2709 // check for browsers that have been upgraded from websql-only to websql+idb
2710 /* istanbul ignore if */
2711 if (adapterName === 'idb' && 'websql' in adapters &&
2712 hasLocalStorage() && localStorage['_pouch__websqldb_' + prefix + name]) {
2713 // log it, because this can be confusing during development
2714 guardedConsole('log', 'PouchDB is downgrading "' + name + '" to WebSQL to' +
2715 ' avoid data loss, because it was already opened with WebSQL.');
2716 continue; // keep using websql to avoid user data loss
2717 }
2718 break;
2719 }
2720 }
2721
2722 var adapter = adapters[adapterName];
2723
2724 // if adapter is invalid, then an error will be thrown later
2725 var usePrefix = (adapter && 'use_prefix' in adapter) ?
2726 adapter.use_prefix : true;
2727
2728 return {
2729 name: usePrefix ? (prefix + name) : name,
2730 adapter: adapterName
2731 };
2732}
2733
2734function inherits(A, B) {
2735 A.prototype = Object.create(B.prototype, {
2736 constructor: { value: A }
2737 });
2738}
2739
2740function createClass(parent, init) {
2741 let klass = function (...args) {
2742 if (!(this instanceof klass)) {
2743 return new klass(...args);
2744 }
2745 init.apply(this, args);
2746 };
2747 inherits(klass, parent);
2748 return klass;
2749}
2750
2751// OK, so here's the deal. Consider this code:
2752// var db1 = new PouchDB('foo');
2753// var db2 = new PouchDB('foo');
2754// db1.destroy();
2755// ^ these two both need to emit 'destroyed' events,
2756// as well as the PouchDB constructor itself.
2757// So we have one db object (whichever one got destroy() called on it)
2758// responsible for emitting the initial event, which then gets emitted
2759// by the constructor, which then broadcasts it to any other dbs
2760// that may have been created with the same name.
2761function prepareForDestruction(self) {
2762
2763 function onDestroyed(from_constructor) {
2764 self.removeListener('closed', onClosed);
2765 if (!from_constructor) {
2766 self.constructor.emit('destroyed', self.name);
2767 }
2768 }
2769
2770 function onClosed() {
2771 self.removeListener('destroyed', onDestroyed);
2772 self.constructor.emit('unref', self);
2773 }
2774
2775 self.once('destroyed', onDestroyed);
2776 self.once('closed', onClosed);
2777 self.constructor.emit('ref', self);
2778}
2779
2780class PouchInternal extends AbstractPouchDB {
2781 constructor(name, opts) {
2782 super();
2783 this._setup(name, opts);
2784 }
2785
2786 _setup(name, opts) {
2787 super._setup();
2788 opts = opts || {};
2789
2790 if (name && typeof name === 'object') {
2791 opts = name;
2792 name = opts.name;
2793 delete opts.name;
2794 }
2795
2796 if (opts.deterministic_revs === undefined) {
2797 opts.deterministic_revs = true;
2798 }
2799
2800 this.__opts = opts = clone(opts);
2801
2802 this.auto_compaction = opts.auto_compaction;
2803 this.purged_infos_limit = opts.purged_infos_limit || 1000;
2804 this.prefix = PouchDB.prefix;
2805
2806 if (typeof name !== 'string') {
2807 throw new Error('Missing/invalid DB name');
2808 }
2809
2810 var prefixedName = (opts.prefix || '') + name;
2811 var backend = parseAdapter(prefixedName, opts);
2812
2813 opts.name = backend.name;
2814 opts.adapter = opts.adapter || backend.adapter;
2815
2816 this.name = name;
2817 this._adapter = opts.adapter;
2818 PouchDB.emit('debug', ['adapter', 'Picked adapter: ', opts.adapter]);
2819
2820 if (!PouchDB.adapters[opts.adapter] ||
2821 !PouchDB.adapters[opts.adapter].valid()) {
2822 throw new Error('Invalid Adapter: ' + opts.adapter);
2823 }
2824
2825 if (opts.view_adapter) {
2826 if (!PouchDB.adapters[opts.view_adapter] ||
2827 !PouchDB.adapters[opts.view_adapter].valid()) {
2828 throw new Error('Invalid View Adapter: ' + opts.view_adapter);
2829 }
2830 }
2831
2832 this.taskqueue = new TaskQueue();
2833
2834 this.adapter = opts.adapter;
2835
2836 PouchDB.adapters[opts.adapter].call(this, opts, (err) => {
2837 if (err) {
2838 return this.taskqueue.fail(err);
2839 }
2840 prepareForDestruction(this);
2841
2842 this.emit('created', this);
2843 PouchDB.emit('created', this.name);
2844 this.taskqueue.ready(this);
2845 });
2846 }
2847}
2848
2849const PouchDB = createClass(PouchInternal, function (name, opts) {
2850 PouchInternal.prototype._setup.call(this, name, opts);
2851});
2852
2853var f$1 = fetch;
2854var h = Headers;
2855
2856class ActiveTasks {
2857 constructor() {
2858 this.tasks = {};
2859 }
2860
2861 list() {
2862 return Object.values(this.tasks);
2863 }
2864
2865 add(task) {
2866 const id = v4();
2867 this.tasks[id] = {
2868 id,
2869 name: task.name,
2870 total_items: task.total_items,
2871 created_at: new Date().toJSON()
2872 };
2873 return id;
2874 }
2875
2876 get(id) {
2877 return this.tasks[id];
2878 }
2879
2880 /* eslint-disable no-unused-vars */
2881 remove(id, reason) {
2882 delete this.tasks[id];
2883 return this.tasks;
2884 }
2885
2886 update(id, updatedTask) {
2887 const task = this.tasks[id];
2888 if (typeof task !== 'undefined') {
2889 const mergedTask = {
2890 id: task.id,
2891 name: task.name,
2892 created_at: task.created_at,
2893 total_items: updatedTask.total_items || task.total_items,
2894 completed_items: updatedTask.completed_items || task.completed_items,
2895 updated_at: new Date().toJSON()
2896 };
2897 this.tasks[id] = mergedTask;
2898 }
2899 return this.tasks;
2900 }
2901}
2902
2903PouchDB.adapters = {};
2904PouchDB.preferredAdapters = [];
2905
2906PouchDB.prefix = '_pouch_';
2907
2908var eventEmitter = new EE();
2909
2910function setUpEventEmitter(Pouch) {
2911 Object.keys(EE.prototype).forEach(function (key) {
2912 if (typeof EE.prototype[key] === 'function') {
2913 Pouch[key] = eventEmitter[key].bind(eventEmitter);
2914 }
2915 });
2916
2917 // these are created in constructor.js, and allow us to notify each DB with
2918 // the same name that it was destroyed, via the constructor object
2919 var destructListeners = Pouch._destructionListeners = new Map();
2920
2921 Pouch.on('ref', function onConstructorRef(db) {
2922 if (!destructListeners.has(db.name)) {
2923 destructListeners.set(db.name, []);
2924 }
2925 destructListeners.get(db.name).push(db);
2926 });
2927
2928 Pouch.on('unref', function onConstructorUnref(db) {
2929 if (!destructListeners.has(db.name)) {
2930 return;
2931 }
2932 var dbList = destructListeners.get(db.name);
2933 var pos = dbList.indexOf(db);
2934 if (pos < 0) {
2935 /* istanbul ignore next */
2936 return;
2937 }
2938 dbList.splice(pos, 1);
2939 if (dbList.length > 1) {
2940 /* istanbul ignore next */
2941 destructListeners.set(db.name, dbList);
2942 } else {
2943 destructListeners.delete(db.name);
2944 }
2945 });
2946
2947 Pouch.on('destroyed', function onConstructorDestroyed(name) {
2948 if (!destructListeners.has(name)) {
2949 return;
2950 }
2951 var dbList = destructListeners.get(name);
2952 destructListeners.delete(name);
2953 dbList.forEach(function (db) {
2954 db.emit('destroyed',true);
2955 });
2956 });
2957}
2958
2959setUpEventEmitter(PouchDB);
2960
2961PouchDB.adapter = function (id, obj, addToPreferredAdapters) {
2962 /* istanbul ignore else */
2963 if (obj.valid()) {
2964 PouchDB.adapters[id] = obj;
2965 if (addToPreferredAdapters) {
2966 PouchDB.preferredAdapters.push(id);
2967 }
2968 }
2969};
2970
2971PouchDB.plugin = function (obj) {
2972 if (typeof obj === 'function') { // function style for plugins
2973 obj(PouchDB);
2974 } else if (typeof obj !== 'object' || Object.keys(obj).length === 0) {
2975 throw new Error('Invalid plugin: got "' + obj + '", expected an object or a function');
2976 } else {
2977 Object.keys(obj).forEach(function (id) { // object style for plugins
2978 PouchDB.prototype[id] = obj[id];
2979 });
2980 }
2981 if (this.__defaults) {
2982 PouchDB.__defaults = Object.assign({}, this.__defaults);
2983 }
2984 return PouchDB;
2985};
2986
2987PouchDB.defaults = function (defaultOpts) {
2988 let PouchWithDefaults = createClass(PouchDB, function (name, opts) {
2989 opts = opts || {};
2990
2991 if (name && typeof name === 'object') {
2992 opts = name;
2993 name = opts.name;
2994 delete opts.name;
2995 }
2996
2997 opts = Object.assign({}, PouchWithDefaults.__defaults, opts);
2998 PouchDB.call(this, name, opts);
2999 });
3000
3001 PouchWithDefaults.preferredAdapters = PouchDB.preferredAdapters.slice();
3002 Object.keys(PouchDB).forEach(function (key) {
3003 if (!(key in PouchWithDefaults)) {
3004 PouchWithDefaults[key] = PouchDB[key];
3005 }
3006 });
3007
3008 // make default options transitive
3009 // https://github.com/pouchdb/pouchdb/issues/5922
3010 PouchWithDefaults.__defaults = Object.assign({}, this.__defaults, defaultOpts);
3011
3012 return PouchWithDefaults;
3013};
3014
3015PouchDB.fetch = function (url, opts) {
3016 return f$1(url, opts);
3017};
3018
3019PouchDB.prototype.activeTasks = PouchDB.activeTasks = new ActiveTasks();
3020
3021// managed automatically by set-version.js
3022var version = "9.0.0";
3023
3024// this would just be "return doc[field]", but fields
3025// can be "deep" due to dot notation
3026function getFieldFromDoc(doc, parsedField) {
3027 var value = doc;
3028 for (var i = 0, len = parsedField.length; i < len; i++) {
3029 var key = parsedField[i];
3030 value = value[key];
3031 if (!value) {
3032 break;
3033 }
3034 }
3035 return value;
3036}
3037
3038function compare(left, right) {
3039 return left < right ? -1 : left > right ? 1 : 0;
3040}
3041
3042// Converts a string in dot notation to an array of its components, with backslash escaping
3043function parseField(fieldName) {
3044 // fields may be deep (e.g. "foo.bar.baz"), so parse
3045 var fields = [];
3046 var current = '';
3047 for (var i = 0, len = fieldName.length; i < len; i++) {
3048 var ch = fieldName[i];
3049 if (i > 0 && fieldName[i - 1] === '\\' && (ch === '$' || ch === '.')) {
3050 // escaped delimiter
3051 current = current.substring(0, current.length - 1) + ch;
3052 } else if (ch === '.') {
3053 // When `.` is not escaped (above), it is a field delimiter
3054 fields.push(current);
3055 current = '';
3056 } else { // normal character
3057 current += ch;
3058 }
3059 }
3060 fields.push(current);
3061 return fields;
3062}
3063
3064var combinationFields = ['$or', '$nor', '$not'];
3065function isCombinationalField(field) {
3066 return combinationFields.indexOf(field) > -1;
3067}
3068
3069function getKey(obj) {
3070 return Object.keys(obj)[0];
3071}
3072
3073function getValue(obj) {
3074 return obj[getKey(obj)];
3075}
3076
3077
3078// flatten an array of selectors joined by an $and operator
3079function mergeAndedSelectors(selectors) {
3080
3081 // sort to ensure that e.g. if the user specified
3082 // $and: [{$gt: 'a'}, {$gt: 'b'}], then it's collapsed into
3083 // just {$gt: 'b'}
3084 var res = {};
3085 var first = {$or: true, $nor: true};
3086
3087 selectors.forEach(function (selector) {
3088 Object.keys(selector).forEach(function (field) {
3089 var matcher = selector[field];
3090 if (typeof matcher !== 'object') {
3091 matcher = {$eq: matcher};
3092 }
3093
3094 if (isCombinationalField(field)) {
3095 // or, nor
3096 if (matcher instanceof Array) {
3097 if (first[field]) {
3098 first[field] = false;
3099 res[field] = matcher;
3100 return;
3101 }
3102
3103 var entries = [];
3104 res[field].forEach(function (existing) {
3105 Object.keys(matcher).forEach(function (key) {
3106 var m = matcher[key];
3107 var longest = Math.max(Object.keys(existing).length, Object.keys(m).length);
3108 var merged = mergeAndedSelectors([existing, m]);
3109 if (Object.keys(merged).length <= longest) {
3110 // we have a situation like: (a :{$eq :1} || ...) && (a {$eq: 2} || ...)
3111 // merging would produce a $eq 2 when actually we shouldn't ever match against these merged conditions
3112 // merged should always contain more values to be valid
3113 return;
3114 }
3115 entries.push(merged);
3116 });
3117 });
3118 res[field] = entries;
3119 } else {
3120 // not
3121 res[field] = mergeAndedSelectors([matcher]);
3122 }
3123 } else {
3124 var fieldMatchers = res[field] = res[field] || {};
3125 Object.keys(matcher).forEach(function (operator) {
3126 var value = matcher[operator];
3127
3128 if (operator === '$gt' || operator === '$gte') {
3129 return mergeGtGte(operator, value, fieldMatchers);
3130 } else if (operator === '$lt' || operator === '$lte') {
3131 return mergeLtLte(operator, value, fieldMatchers);
3132 } else if (operator === '$ne') {
3133 return mergeNe(value, fieldMatchers);
3134 } else if (operator === '$eq') {
3135 return mergeEq(value, fieldMatchers);
3136 } else if (operator === "$regex") {
3137 return mergeRegex(value, fieldMatchers);
3138 }
3139 fieldMatchers[operator] = value;
3140 });
3141 }
3142 });
3143 });
3144
3145 return res;
3146}
3147
3148
3149
3150// collapse logically equivalent gt/gte values
3151function mergeGtGte(operator, value, fieldMatchers) {
3152 if (typeof fieldMatchers.$eq !== 'undefined') {
3153 return; // do nothing
3154 }
3155 if (typeof fieldMatchers.$gte !== 'undefined') {
3156 if (operator === '$gte') {
3157 if (value > fieldMatchers.$gte) { // more specificity
3158 fieldMatchers.$gte = value;
3159 }
3160 } else { // operator === '$gt'
3161 if (value >= fieldMatchers.$gte) { // more specificity
3162 delete fieldMatchers.$gte;
3163 fieldMatchers.$gt = value;
3164 }
3165 }
3166 } else if (typeof fieldMatchers.$gt !== 'undefined') {
3167 if (operator === '$gte') {
3168 if (value > fieldMatchers.$gt) { // more specificity
3169 delete fieldMatchers.$gt;
3170 fieldMatchers.$gte = value;
3171 }
3172 } else { // operator === '$gt'
3173 if (value > fieldMatchers.$gt) { // more specificity
3174 fieldMatchers.$gt = value;
3175 }
3176 }
3177 } else {
3178 fieldMatchers[operator] = value;
3179 }
3180}
3181
3182// collapse logically equivalent lt/lte values
3183function mergeLtLte(operator, value, fieldMatchers) {
3184 if (typeof fieldMatchers.$eq !== 'undefined') {
3185 return; // do nothing
3186 }
3187 if (typeof fieldMatchers.$lte !== 'undefined') {
3188 if (operator === '$lte') {
3189 if (value < fieldMatchers.$lte) { // more specificity
3190 fieldMatchers.$lte = value;
3191 }
3192 } else { // operator === '$gt'
3193 if (value <= fieldMatchers.$lte) { // more specificity
3194 delete fieldMatchers.$lte;
3195 fieldMatchers.$lt = value;
3196 }
3197 }
3198 } else if (typeof fieldMatchers.$lt !== 'undefined') {
3199 if (operator === '$lte') {
3200 if (value < fieldMatchers.$lt) { // more specificity
3201 delete fieldMatchers.$lt;
3202 fieldMatchers.$lte = value;
3203 }
3204 } else { // operator === '$gt'
3205 if (value < fieldMatchers.$lt) { // more specificity
3206 fieldMatchers.$lt = value;
3207 }
3208 }
3209 } else {
3210 fieldMatchers[operator] = value;
3211 }
3212}
3213
3214// combine $ne values into one array
3215function mergeNe(value, fieldMatchers) {
3216 if ('$ne' in fieldMatchers) {
3217 // there are many things this could "not" be
3218 fieldMatchers.$ne.push(value);
3219 } else { // doesn't exist yet
3220 fieldMatchers.$ne = [value];
3221 }
3222}
3223
3224// add $eq into the mix
3225function mergeEq(value, fieldMatchers) {
3226 // these all have less specificity than the $eq
3227 // TODO: check for user errors here
3228 delete fieldMatchers.$gt;
3229 delete fieldMatchers.$gte;
3230 delete fieldMatchers.$lt;
3231 delete fieldMatchers.$lte;
3232 delete fieldMatchers.$ne;
3233 fieldMatchers.$eq = value;
3234}
3235
3236// combine $regex values into one array
3237function mergeRegex(value, fieldMatchers) {
3238 if ('$regex' in fieldMatchers) {
3239 // a value could match multiple regexes
3240 fieldMatchers.$regex.push(value);
3241 } else { // doesn't exist yet
3242 fieldMatchers.$regex = [value];
3243 }
3244}
3245
3246//#7458: execute function mergeAndedSelectors on nested $and
3247function mergeAndedSelectorsNested(obj) {
3248 for (var prop in obj) {
3249 if (Array.isArray(obj)) {
3250 for (var i in obj) {
3251 if (obj[i]['$and']) {
3252 obj[i] = mergeAndedSelectors(obj[i]['$and']);
3253 }
3254 }
3255 }
3256 var value = obj[prop];
3257 if (typeof value === 'object') {
3258 mergeAndedSelectorsNested(value); // <- recursive call
3259 }
3260 }
3261 return obj;
3262}
3263
3264//#7458: determine id $and is present in selector (at any level)
3265function isAndInSelector(obj, isAnd) {
3266 for (var prop in obj) {
3267 if (prop === '$and') {
3268 isAnd = true;
3269 }
3270 var value = obj[prop];
3271 if (typeof value === 'object') {
3272 isAnd = isAndInSelector(value, isAnd); // <- recursive call
3273 }
3274 }
3275 return isAnd;
3276}
3277
3278//
3279// normalize the selector
3280//
3281function massageSelector(input) {
3282 var result = clone(input);
3283
3284 //#7458: if $and is present in selector (at any level) merge nested $and
3285 if (isAndInSelector(result, false)) {
3286 result = mergeAndedSelectorsNested(result);
3287 if ('$and' in result) {
3288 result = mergeAndedSelectors(result['$and']);
3289 }
3290 }
3291
3292 ['$or', '$nor'].forEach(function (orOrNor) {
3293 if (orOrNor in result) {
3294 // message each individual selector
3295 // e.g. {foo: 'bar'} becomes {foo: {$eq: 'bar'}}
3296 result[orOrNor].forEach(function (subSelector) {
3297 var fields = Object.keys(subSelector);
3298 for (var i = 0; i < fields.length; i++) {
3299 var field = fields[i];
3300 var matcher = subSelector[field];
3301 if (typeof matcher !== 'object' || matcher === null) {
3302 subSelector[field] = {$eq: matcher};
3303 }
3304 }
3305 });
3306 }
3307 });
3308
3309 if ('$not' in result) {
3310 //This feels a little like forcing, but it will work for now,
3311 //I would like to come back to this and make the merging of selectors a little more generic
3312 result['$not'] = mergeAndedSelectors([result['$not']]);
3313 }
3314
3315 var fields = Object.keys(result);
3316
3317 for (var i = 0; i < fields.length; i++) {
3318 var field = fields[i];
3319 var matcher = result[field];
3320
3321 if (typeof matcher !== 'object' || matcher === null) {
3322 matcher = {$eq: matcher};
3323 }
3324 result[field] = matcher;
3325 }
3326
3327 normalizeArrayOperators(result);
3328
3329 return result;
3330}
3331
3332//
3333// The $ne and $regex values must be placed in an array because these operators can be used multiple times on the same field.
3334// When $and is used, mergeAndedSelectors takes care of putting some of them into arrays, otherwise it's done here.
3335//
3336function normalizeArrayOperators(selector) {
3337 Object.keys(selector).forEach(function (field) {
3338 var matcher = selector[field];
3339
3340 if (Array.isArray(matcher)) {
3341 matcher.forEach(function (matcherItem) {
3342 if (matcherItem && typeof matcherItem === 'object') {
3343 normalizeArrayOperators(matcherItem);
3344 }
3345 });
3346 } else if (field === '$ne') {
3347 selector.$ne = [matcher];
3348 } else if (field === '$regex') {
3349 selector.$regex = [matcher];
3350 } else if (matcher && typeof matcher === 'object') {
3351 normalizeArrayOperators(matcher);
3352 }
3353 });
3354}
3355
3356function pad(str, padWith, upToLength) {
3357 var padding = '';
3358 var targetLength = upToLength - str.length;
3359 /* istanbul ignore next */
3360 while (padding.length < targetLength) {
3361 padding += padWith;
3362 }
3363 return padding;
3364}
3365
3366function padLeft(str, padWith, upToLength) {
3367 var padding = pad(str, padWith, upToLength);
3368 return padding + str;
3369}
3370
3371var MIN_MAGNITUDE = -324; // verified by -Number.MIN_VALUE
3372var MAGNITUDE_DIGITS = 3; // ditto
3373var SEP = ''; // set to '_' for easier debugging
3374
3375function collate(a, b) {
3376
3377 if (a === b) {
3378 return 0;
3379 }
3380
3381 a = normalizeKey(a);
3382 b = normalizeKey(b);
3383
3384 var ai = collationIndex(a);
3385 var bi = collationIndex(b);
3386 if ((ai - bi) !== 0) {
3387 return ai - bi;
3388 }
3389 switch (typeof a) {
3390 case 'number':
3391 return a - b;
3392 case 'boolean':
3393 return a < b ? -1 : 1;
3394 case 'string':
3395 return stringCollate(a, b);
3396 }
3397 return Array.isArray(a) ? arrayCollate(a, b) : objectCollate(a, b);
3398}
3399
3400// couch considers null/NaN/Infinity/-Infinity === undefined,
3401// for the purposes of mapreduce indexes. also, dates get stringified.
3402function normalizeKey(key) {
3403 switch (typeof key) {
3404 case 'undefined':
3405 return null;
3406 case 'number':
3407 if (key === Infinity || key === -Infinity || isNaN(key)) {
3408 return null;
3409 }
3410 return key;
3411 case 'object':
3412 var origKey = key;
3413 if (Array.isArray(key)) {
3414 var len = key.length;
3415 key = new Array(len);
3416 for (var i = 0; i < len; i++) {
3417 key[i] = normalizeKey(origKey[i]);
3418 }
3419 /* istanbul ignore next */
3420 } else if (key instanceof Date) {
3421 return key.toJSON();
3422 } else if (key !== null) { // generic object
3423 key = {};
3424 for (var k in origKey) {
3425 if (Object.prototype.hasOwnProperty.call(origKey, k)) {
3426 var val = origKey[k];
3427 if (typeof val !== 'undefined') {
3428 key[k] = normalizeKey(val);
3429 }
3430 }
3431 }
3432 }
3433 }
3434 return key;
3435}
3436
3437function indexify(key) {
3438 if (key !== null) {
3439 switch (typeof key) {
3440 case 'boolean':
3441 return key ? 1 : 0;
3442 case 'number':
3443 return numToIndexableString(key);
3444 case 'string':
3445 // We've to be sure that key does not contain \u0000
3446 // Do order-preserving replacements:
3447 // 0 -> 1, 1
3448 // 1 -> 1, 2
3449 // 2 -> 2, 2
3450 /* eslint-disable no-control-regex */
3451 return key
3452 .replace(/\u0002/g, '\u0002\u0002')
3453 .replace(/\u0001/g, '\u0001\u0002')
3454 .replace(/\u0000/g, '\u0001\u0001');
3455 /* eslint-enable no-control-regex */
3456 case 'object':
3457 var isArray = Array.isArray(key);
3458 var arr = isArray ? key : Object.keys(key);
3459 var i = -1;
3460 var len = arr.length;
3461 var result = '';
3462 if (isArray) {
3463 while (++i < len) {
3464 result += toIndexableString(arr[i]);
3465 }
3466 } else {
3467 while (++i < len) {
3468 var objKey = arr[i];
3469 result += toIndexableString(objKey) +
3470 toIndexableString(key[objKey]);
3471 }
3472 }
3473 return result;
3474 }
3475 }
3476 return '';
3477}
3478
3479// convert the given key to a string that would be appropriate
3480// for lexical sorting, e.g. within a database, where the
3481// sorting is the same given by the collate() function.
3482function toIndexableString(key) {
3483 var zero = '\u0000';
3484 key = normalizeKey(key);
3485 return collationIndex(key) + SEP + indexify(key) + zero;
3486}
3487
3488function parseNumber(str, i) {
3489 var originalIdx = i;
3490 var num;
3491 var zero = str[i] === '1';
3492 if (zero) {
3493 num = 0;
3494 i++;
3495 } else {
3496 var neg = str[i] === '0';
3497 i++;
3498 var numAsString = '';
3499 var magAsString = str.substring(i, i + MAGNITUDE_DIGITS);
3500 var magnitude = parseInt(magAsString, 10) + MIN_MAGNITUDE;
3501 /* istanbul ignore next */
3502 if (neg) {
3503 magnitude = -magnitude;
3504 }
3505 i += MAGNITUDE_DIGITS;
3506 while (true) {
3507 var ch = str[i];
3508 if (ch === '\u0000') {
3509 break;
3510 } else {
3511 numAsString += ch;
3512 }
3513 i++;
3514 }
3515 numAsString = numAsString.split('.');
3516 if (numAsString.length === 1) {
3517 num = parseInt(numAsString, 10);
3518 } else {
3519 /* istanbul ignore next */
3520 num = parseFloat(numAsString[0] + '.' + numAsString[1]);
3521 }
3522 /* istanbul ignore next */
3523 if (neg) {
3524 num = num - 10;
3525 }
3526 /* istanbul ignore next */
3527 if (magnitude !== 0) {
3528 // parseFloat is more reliable than pow due to rounding errors
3529 // e.g. Number.MAX_VALUE would return Infinity if we did
3530 // num * Math.pow(10, magnitude);
3531 num = parseFloat(num + 'e' + magnitude);
3532 }
3533 }
3534 return {num, length : i - originalIdx};
3535}
3536
3537// move up the stack while parsing
3538// this function moved outside of parseIndexableString for performance
3539function pop(stack, metaStack) {
3540 var obj = stack.pop();
3541
3542 if (metaStack.length) {
3543 var lastMetaElement = metaStack[metaStack.length - 1];
3544 if (obj === lastMetaElement.element) {
3545 // popping a meta-element, e.g. an object whose value is another object
3546 metaStack.pop();
3547 lastMetaElement = metaStack[metaStack.length - 1];
3548 }
3549 var element = lastMetaElement.element;
3550 var lastElementIndex = lastMetaElement.index;
3551 if (Array.isArray(element)) {
3552 element.push(obj);
3553 } else if (lastElementIndex === stack.length - 2) { // obj with key+value
3554 var key = stack.pop();
3555 element[key] = obj;
3556 } else {
3557 stack.push(obj); // obj with key only
3558 }
3559 }
3560}
3561
3562function parseIndexableString(str) {
3563 var stack = [];
3564 var metaStack = []; // stack for arrays and objects
3565 var i = 0;
3566
3567 /*eslint no-constant-condition: ["error", { "checkLoops": false }]*/
3568 while (true) {
3569 var collationIndex = str[i++];
3570 if (collationIndex === '\u0000') {
3571 if (stack.length === 1) {
3572 return stack.pop();
3573 } else {
3574 pop(stack, metaStack);
3575 continue;
3576 }
3577 }
3578 switch (collationIndex) {
3579 case '1':
3580 stack.push(null);
3581 break;
3582 case '2':
3583 stack.push(str[i] === '1');
3584 i++;
3585 break;
3586 case '3':
3587 var parsedNum = parseNumber(str, i);
3588 stack.push(parsedNum.num);
3589 i += parsedNum.length;
3590 break;
3591 case '4':
3592 var parsedStr = '';
3593 /*eslint no-constant-condition: ["error", { "checkLoops": false }]*/
3594 while (true) {
3595 var ch = str[i];
3596 if (ch === '\u0000') {
3597 break;
3598 }
3599 parsedStr += ch;
3600 i++;
3601 }
3602 // perform the reverse of the order-preserving replacement
3603 // algorithm (see above)
3604 /* eslint-disable no-control-regex */
3605 parsedStr = parsedStr.replace(/\u0001\u0001/g, '\u0000')
3606 .replace(/\u0001\u0002/g, '\u0001')
3607 .replace(/\u0002\u0002/g, '\u0002');
3608 /* eslint-enable no-control-regex */
3609 stack.push(parsedStr);
3610 break;
3611 case '5':
3612 var arrayElement = { element: [], index: stack.length };
3613 stack.push(arrayElement.element);
3614 metaStack.push(arrayElement);
3615 break;
3616 case '6':
3617 var objElement = { element: {}, index: stack.length };
3618 stack.push(objElement.element);
3619 metaStack.push(objElement);
3620 break;
3621 /* istanbul ignore next */
3622 default:
3623 throw new Error(
3624 'bad collationIndex or unexpectedly reached end of input: ' +
3625 collationIndex);
3626 }
3627 }
3628}
3629
3630function arrayCollate(a, b) {
3631 var len = Math.min(a.length, b.length);
3632 for (var i = 0; i < len; i++) {
3633 var sort = collate(a[i], b[i]);
3634 if (sort !== 0) {
3635 return sort;
3636 }
3637 }
3638 return (a.length === b.length) ? 0 :
3639 (a.length > b.length) ? 1 : -1;
3640}
3641function stringCollate(a, b) {
3642 // See: https://github.com/daleharvey/pouchdb/issues/40
3643 // This is incompatible with the CouchDB implementation, but its the
3644 // best we can do for now
3645 return (a === b) ? 0 : ((a > b) ? 1 : -1);
3646}
3647function objectCollate(a, b) {
3648 var ak = Object.keys(a), bk = Object.keys(b);
3649 var len = Math.min(ak.length, bk.length);
3650 for (var i = 0; i < len; i++) {
3651 // First sort the keys
3652 var sort = collate(ak[i], bk[i]);
3653 if (sort !== 0) {
3654 return sort;
3655 }
3656 // if the keys are equal sort the values
3657 sort = collate(a[ak[i]], b[bk[i]]);
3658 if (sort !== 0) {
3659 return sort;
3660 }
3661
3662 }
3663 return (ak.length === bk.length) ? 0 :
3664 (ak.length > bk.length) ? 1 : -1;
3665}
3666// The collation is defined by erlangs ordered terms
3667// the atoms null, true, false come first, then numbers, strings,
3668// arrays, then objects
3669// null/undefined/NaN/Infinity/-Infinity are all considered null
3670function collationIndex(x) {
3671 var id = ['boolean', 'number', 'string', 'object'];
3672 var idx = id.indexOf(typeof x);
3673 //false if -1 otherwise true, but fast!!!!1
3674 if (~idx) {
3675 if (x === null) {
3676 return 1;
3677 }
3678 if (Array.isArray(x)) {
3679 return 5;
3680 }
3681 return idx < 3 ? (idx + 2) : (idx + 3);
3682 }
3683 /* istanbul ignore next */
3684 if (Array.isArray(x)) {
3685 return 5;
3686 }
3687}
3688
3689// conversion:
3690// x yyy zz...zz
3691// x = 0 for negative, 1 for 0, 2 for positive
3692// y = exponent (for negative numbers negated) moved so that it's >= 0
3693// z = mantisse
3694function numToIndexableString(num) {
3695
3696 if (num === 0) {
3697 return '1';
3698 }
3699
3700 // convert number to exponential format for easier and
3701 // more succinct string sorting
3702 var expFormat = num.toExponential().split(/e\+?/);
3703 var magnitude = parseInt(expFormat[1], 10);
3704
3705 var neg = num < 0;
3706
3707 var result = neg ? '0' : '2';
3708
3709 // first sort by magnitude
3710 // it's easier if all magnitudes are positive
3711 var magForComparison = ((neg ? -magnitude : magnitude) - MIN_MAGNITUDE);
3712 var magString = padLeft((magForComparison).toString(), '0', MAGNITUDE_DIGITS);
3713
3714 result += SEP + magString;
3715
3716 // then sort by the factor
3717 var factor = Math.abs(parseFloat(expFormat[0])); // [1..10)
3718 /* istanbul ignore next */
3719 if (neg) { // for negative reverse ordering
3720 factor = 10 - factor;
3721 }
3722
3723 var factorStr = factor.toFixed(20);
3724
3725 // strip zeros from the end
3726 factorStr = factorStr.replace(/\.?0+$/, '');
3727
3728 result += SEP + factorStr;
3729
3730 return result;
3731}
3732
3733// create a comparator based on the sort object
3734function createFieldSorter(sort) {
3735
3736 function getFieldValuesAsArray(doc) {
3737 return sort.map(function (sorting) {
3738 var fieldName = getKey(sorting);
3739 var parsedField = parseField(fieldName);
3740 var docFieldValue = getFieldFromDoc(doc, parsedField);
3741 return docFieldValue;
3742 });
3743 }
3744
3745 return function (aRow, bRow) {
3746 var aFieldValues = getFieldValuesAsArray(aRow.doc);
3747 var bFieldValues = getFieldValuesAsArray(bRow.doc);
3748 var collation = collate(aFieldValues, bFieldValues);
3749 if (collation !== 0) {
3750 return collation;
3751 }
3752 // this is what mango seems to do
3753 return compare(aRow.doc._id, bRow.doc._id);
3754 };
3755}
3756
3757function filterInMemoryFields(rows, requestDef, inMemoryFields) {
3758 rows = rows.filter(function (row) {
3759 return rowFilter(row.doc, requestDef.selector, inMemoryFields);
3760 });
3761
3762 if (requestDef.sort) {
3763 // in-memory sort
3764 var fieldSorter = createFieldSorter(requestDef.sort);
3765 rows = rows.sort(fieldSorter);
3766 if (typeof requestDef.sort[0] !== 'string' &&
3767 getValue(requestDef.sort[0]) === 'desc') {
3768 rows = rows.reverse();
3769 }
3770 }
3771
3772 if ('limit' in requestDef || 'skip' in requestDef) {
3773 // have to do the limit in-memory
3774 var skip = requestDef.skip || 0;
3775 var limit = ('limit' in requestDef ? requestDef.limit : rows.length) + skip;
3776 rows = rows.slice(skip, limit);
3777 }
3778 return rows;
3779}
3780
3781function rowFilter(doc, selector, inMemoryFields) {
3782 return inMemoryFields.every(function (field) {
3783 var matcher = selector[field];
3784 var parsedField = parseField(field);
3785 var docFieldValue = getFieldFromDoc(doc, parsedField);
3786 if (isCombinationalField(field)) {
3787 return matchCominationalSelector(field, matcher, doc);
3788 }
3789
3790 return matchSelector(matcher, doc, parsedField, docFieldValue);
3791 });
3792}
3793
3794function matchSelector(matcher, doc, parsedField, docFieldValue) {
3795 if (!matcher) {
3796 // no filtering necessary; this field is just needed for sorting
3797 return true;
3798 }
3799
3800 // is matcher an object, if so continue recursion
3801 if (typeof matcher === 'object') {
3802 return Object.keys(matcher).every(function (maybeUserOperator) {
3803 var userValue = matcher[ maybeUserOperator ];
3804 // explicit operator
3805 if (maybeUserOperator.indexOf("$") === 0) {
3806 return match(maybeUserOperator, doc, userValue, parsedField, docFieldValue);
3807 } else {
3808 var subParsedField = parseField(maybeUserOperator);
3809
3810 if (
3811 docFieldValue === undefined &&
3812 typeof userValue !== "object" &&
3813 subParsedField.length > 0
3814 ) {
3815 // the field does not exist, return or getFieldFromDoc will throw
3816 return false;
3817 }
3818
3819 var subDocFieldValue = getFieldFromDoc(docFieldValue, subParsedField);
3820
3821 if (typeof userValue === "object") {
3822 // field value is an object that might contain more operators
3823 return matchSelector(userValue, doc, parsedField, subDocFieldValue);
3824 }
3825
3826 // implicit operator
3827 return match("$eq", doc, userValue, subParsedField, subDocFieldValue);
3828 }
3829 });
3830 }
3831
3832 // no more depth, No need to recurse further
3833 return matcher === docFieldValue;
3834}
3835
3836function matchCominationalSelector(field, matcher, doc) {
3837
3838 if (field === '$or') {
3839 return matcher.some(function (orMatchers) {
3840 return rowFilter(doc, orMatchers, Object.keys(orMatchers));
3841 });
3842 }
3843
3844 if (field === '$not') {
3845 return !rowFilter(doc, matcher, Object.keys(matcher));
3846 }
3847
3848 //`$nor`
3849 return !matcher.find(function (orMatchers) {
3850 return rowFilter(doc, orMatchers, Object.keys(orMatchers));
3851 });
3852
3853}
3854
3855function match(userOperator, doc, userValue, parsedField, docFieldValue) {
3856 if (!matchers[userOperator]) {
3857 /* istanbul ignore next */
3858 throw new Error('unknown operator "' + userOperator +
3859 '" - should be one of $eq, $lte, $lt, $gt, $gte, $exists, $ne, $in, ' +
3860 '$nin, $size, $mod, $regex, $elemMatch, $type, $allMatch or $all');
3861 }
3862 return matchers[userOperator](doc, userValue, parsedField, docFieldValue);
3863}
3864
3865function fieldExists(docFieldValue) {
3866 return typeof docFieldValue !== 'undefined' && docFieldValue !== null;
3867}
3868
3869function fieldIsNotUndefined(docFieldValue) {
3870 return typeof docFieldValue !== 'undefined';
3871}
3872
3873function modField(docFieldValue, userValue) {
3874 if (typeof docFieldValue !== "number" ||
3875 parseInt(docFieldValue, 10) !== docFieldValue) {
3876 return false;
3877 }
3878
3879 var divisor = userValue[0];
3880 var mod = userValue[1];
3881
3882 return docFieldValue % divisor === mod;
3883}
3884
3885function arrayContainsValue(docFieldValue, userValue) {
3886 return userValue.some(function (val) {
3887 if (docFieldValue instanceof Array) {
3888 return docFieldValue.some(function (docFieldValueItem) {
3889 return collate(val, docFieldValueItem) === 0;
3890 });
3891 }
3892
3893 return collate(val, docFieldValue) === 0;
3894 });
3895}
3896
3897function arrayContainsAllValues(docFieldValue, userValue) {
3898 return userValue.every(function (val) {
3899 return docFieldValue.some(function (docFieldValueItem) {
3900 return collate(val, docFieldValueItem) === 0;
3901 });
3902 });
3903}
3904
3905function arraySize(docFieldValue, userValue) {
3906 return docFieldValue.length === userValue;
3907}
3908
3909function regexMatch(docFieldValue, userValue) {
3910 var re = new RegExp(userValue);
3911
3912 return re.test(docFieldValue);
3913}
3914
3915function typeMatch(docFieldValue, userValue) {
3916
3917 switch (userValue) {
3918 case 'null':
3919 return docFieldValue === null;
3920 case 'boolean':
3921 return typeof (docFieldValue) === 'boolean';
3922 case 'number':
3923 return typeof (docFieldValue) === 'number';
3924 case 'string':
3925 return typeof (docFieldValue) === 'string';
3926 case 'array':
3927 return docFieldValue instanceof Array;
3928 case 'object':
3929 return ({}).toString.call(docFieldValue) === '[object Object]';
3930 }
3931}
3932
3933var matchers = {
3934
3935 '$elemMatch': function (doc, userValue, parsedField, docFieldValue) {
3936 if (!Array.isArray(docFieldValue)) {
3937 return false;
3938 }
3939
3940 if (docFieldValue.length === 0) {
3941 return false;
3942 }
3943
3944 if (typeof docFieldValue[0] === 'object' && docFieldValue[0] !== null) {
3945 return docFieldValue.some(function (val) {
3946 return rowFilter(val, userValue, Object.keys(userValue));
3947 });
3948 }
3949
3950 return docFieldValue.some(function (val) {
3951 return matchSelector(userValue, doc, parsedField, val);
3952 });
3953 },
3954
3955 '$allMatch': function (doc, userValue, parsedField, docFieldValue) {
3956 if (!Array.isArray(docFieldValue)) {
3957 return false;
3958 }
3959
3960 /* istanbul ignore next */
3961 if (docFieldValue.length === 0) {
3962 return false;
3963 }
3964
3965 if (typeof docFieldValue[0] === 'object' && docFieldValue[0] !== null) {
3966 return docFieldValue.every(function (val) {
3967 return rowFilter(val, userValue, Object.keys(userValue));
3968 });
3969 }
3970
3971 return docFieldValue.every(function (val) {
3972 return matchSelector(userValue, doc, parsedField, val);
3973 });
3974 },
3975
3976 '$eq': function (doc, userValue, parsedField, docFieldValue) {
3977 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) === 0;
3978 },
3979
3980 '$gte': function (doc, userValue, parsedField, docFieldValue) {
3981 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) >= 0;
3982 },
3983
3984 '$gt': function (doc, userValue, parsedField, docFieldValue) {
3985 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) > 0;
3986 },
3987
3988 '$lte': function (doc, userValue, parsedField, docFieldValue) {
3989 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) <= 0;
3990 },
3991
3992 '$lt': function (doc, userValue, parsedField, docFieldValue) {
3993 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) < 0;
3994 },
3995
3996 '$exists': function (doc, userValue, parsedField, docFieldValue) {
3997 //a field that is null is still considered to exist
3998 if (userValue) {
3999 return fieldIsNotUndefined(docFieldValue);
4000 }
4001
4002 return !fieldIsNotUndefined(docFieldValue);
4003 },
4004
4005 '$mod': function (doc, userValue, parsedField, docFieldValue) {
4006 return fieldExists(docFieldValue) && modField(docFieldValue, userValue);
4007 },
4008
4009 '$ne': function (doc, userValue, parsedField, docFieldValue) {
4010 return userValue.every(function (neValue) {
4011 return collate(docFieldValue, neValue) !== 0;
4012 });
4013 },
4014 '$in': function (doc, userValue, parsedField, docFieldValue) {
4015 return fieldExists(docFieldValue) && arrayContainsValue(docFieldValue, userValue);
4016 },
4017
4018 '$nin': function (doc, userValue, parsedField, docFieldValue) {
4019 return fieldExists(docFieldValue) && !arrayContainsValue(docFieldValue, userValue);
4020 },
4021
4022 '$size': function (doc, userValue, parsedField, docFieldValue) {
4023 return fieldExists(docFieldValue) &&
4024 Array.isArray(docFieldValue) &&
4025 arraySize(docFieldValue, userValue);
4026 },
4027
4028 '$all': function (doc, userValue, parsedField, docFieldValue) {
4029 return Array.isArray(docFieldValue) && arrayContainsAllValues(docFieldValue, userValue);
4030 },
4031
4032 '$regex': function (doc, userValue, parsedField, docFieldValue) {
4033 return fieldExists(docFieldValue) &&
4034 typeof docFieldValue == "string" &&
4035 userValue.every(function (regexValue) {
4036 return regexMatch(docFieldValue, regexValue);
4037 });
4038 },
4039
4040 '$type': function (doc, userValue, parsedField, docFieldValue) {
4041 return typeMatch(docFieldValue, userValue);
4042 }
4043};
4044
4045// return true if the given doc matches the supplied selector
4046function matchesSelector(doc, selector) {
4047 /* istanbul ignore if */
4048 if (typeof selector !== 'object') {
4049 // match the CouchDB error message
4050 throw new Error('Selector error: expected a JSON object');
4051 }
4052
4053 selector = massageSelector(selector);
4054 var row = {
4055 doc
4056 };
4057
4058 var rowsMatched = filterInMemoryFields([row], { selector }, Object.keys(selector));
4059 return rowsMatched && rowsMatched.length === 1;
4060}
4061
4062function evalFilter(input) {
4063 return scopeEval('"use strict";\nreturn ' + input + ';', {});
4064}
4065
4066function evalView(input) {
4067 var code = [
4068 'return function(doc) {',
4069 ' "use strict";',
4070 ' var emitted = false;',
4071 ' var emit = function (a, b) {',
4072 ' emitted = true;',
4073 ' };',
4074 ' var view = ' + input + ';',
4075 ' view(doc);',
4076 ' if (emitted) {',
4077 ' return true;',
4078 ' }',
4079 '};'
4080 ].join('\n');
4081
4082 return scopeEval(code, {});
4083}
4084
4085function validate(opts, callback) {
4086 if (opts.selector) {
4087 if (opts.filter && opts.filter !== '_selector') {
4088 var filterName = typeof opts.filter === 'string' ?
4089 opts.filter : 'function';
4090 return callback(new Error('selector invalid for filter "' + filterName + '"'));
4091 }
4092 }
4093 callback();
4094}
4095
4096function normalize(opts) {
4097 if (opts.view && !opts.filter) {
4098 opts.filter = '_view';
4099 }
4100
4101 if (opts.selector && !opts.filter) {
4102 opts.filter = '_selector';
4103 }
4104
4105 if (opts.filter && typeof opts.filter === 'string') {
4106 if (opts.filter === '_view') {
4107 opts.view = normalizeDesignDocFunctionName(opts.view);
4108 } else {
4109 opts.filter = normalizeDesignDocFunctionName(opts.filter);
4110 }
4111 }
4112}
4113
4114function shouldFilter(changesHandler, opts) {
4115 return opts.filter && typeof opts.filter === 'string' &&
4116 !opts.doc_ids && !isRemote(changesHandler.db);
4117}
4118
4119function filter(changesHandler, opts) {
4120 var callback = opts.complete;
4121 if (opts.filter === '_view') {
4122 if (!opts.view || typeof opts.view !== 'string') {
4123 var err = createError(BAD_REQUEST,
4124 '`view` filter parameter not found or invalid.');
4125 return callback(err);
4126 }
4127 // fetch a view from a design doc, make it behave like a filter
4128 var viewName = parseDesignDocFunctionName(opts.view);
4129 changesHandler.db.get('_design/' + viewName[0], function (err, ddoc) {
4130 /* istanbul ignore if */
4131 if (changesHandler.isCancelled) {
4132 return callback(null, {status: 'cancelled'});
4133 }
4134 /* istanbul ignore next */
4135 if (err) {
4136 return callback(generateErrorFromResponse(err));
4137 }
4138 var mapFun = ddoc && ddoc.views && ddoc.views[viewName[1]] &&
4139 ddoc.views[viewName[1]].map;
4140 if (!mapFun) {
4141 return callback(createError(MISSING_DOC,
4142 (ddoc.views ? 'missing json key: ' + viewName[1] :
4143 'missing json key: views')));
4144 }
4145 opts.filter = evalView(mapFun);
4146 changesHandler.doChanges(opts);
4147 });
4148 } else if (opts.selector) {
4149 opts.filter = function (doc) {
4150 return matchesSelector(doc, opts.selector);
4151 };
4152 changesHandler.doChanges(opts);
4153 } else {
4154 // fetch a filter from a design doc
4155 var filterName = parseDesignDocFunctionName(opts.filter);
4156 changesHandler.db.get('_design/' + filterName[0], function (err, ddoc) {
4157 /* istanbul ignore if */
4158 if (changesHandler.isCancelled) {
4159 return callback(null, {status: 'cancelled'});
4160 }
4161 /* istanbul ignore next */
4162 if (err) {
4163 return callback(generateErrorFromResponse(err));
4164 }
4165 var filterFun = ddoc && ddoc.filters && ddoc.filters[filterName[1]];
4166 if (!filterFun) {
4167 return callback(createError(MISSING_DOC,
4168 ((ddoc && ddoc.filters) ? 'missing json key: ' + filterName[1]
4169 : 'missing json key: filters')));
4170 }
4171 opts.filter = evalFilter(filterFun);
4172 changesHandler.doChanges(opts);
4173 });
4174 }
4175}
4176
4177function applyChangesFilterPlugin(PouchDB) {
4178 PouchDB._changesFilterPlugin = {
4179 validate,
4180 normalize,
4181 shouldFilter,
4182 filter
4183 };
4184}
4185
4186// TODO: remove from pouchdb-core (breaking)
4187PouchDB.plugin(applyChangesFilterPlugin);
4188
4189PouchDB.version = version;
4190
4191//
4192// Blobs are not supported in all versions of IndexedDB, notably
4193// Chrome <37, Android <5 and (some?) webkit-based browsers.
4194// In those versions, storing a blob will throw.
4195//
4196// Example Webkit error:
4197// > DataCloneError: Failed to store record in an IDBObjectStore: BlobURLs are not yet supported.
4198//
4199// Various other blob bugs exist in Chrome v37-42 (inclusive).
4200// Detecting them is expensive and confusing to users, and Chrome 37-42
4201// is at very low usage worldwide, so we do a hacky userAgent check instead.
4202//
4203// content-type bug: https://code.google.com/p/chromium/issues/detail?id=408120
4204// 404 bug: https://code.google.com/p/chromium/issues/detail?id=447916
4205// FileReader bug: https://code.google.com/p/chromium/issues/detail?id=447836
4206//
4207function checkBlobSupport(txn, store, docIdOrCreateDoc) {
4208 return new Promise(function (resolve) {
4209 var blob$$1 = createBlob(['']);
4210
4211 let req;
4212 if (typeof docIdOrCreateDoc === 'function') {
4213 // Store may require a specific key path, in which case we can't store the
4214 // blob directly in the store.
4215 const createDoc = docIdOrCreateDoc;
4216 const doc = createDoc(blob$$1);
4217 req = txn.objectStore(store).put(doc);
4218 } else {
4219 const docId = docIdOrCreateDoc;
4220 req = txn.objectStore(store).put(blob$$1, docId);
4221 }
4222
4223 req.onsuccess = function () {
4224 var matchedChrome = navigator.userAgent.match(/Chrome\/(\d+)/);
4225 var matchedEdge = navigator.userAgent.match(/Edge\//);
4226 // MS Edge pretends to be Chrome 42:
4227 // https://msdn.microsoft.com/en-us/library/hh869301%28v=vs.85%29.aspx
4228 resolve(matchedEdge || !matchedChrome ||
4229 parseInt(matchedChrome[1], 10) >= 43);
4230 };
4231
4232 req.onerror = txn.onabort = function (e) {
4233 // If the transaction aborts now its due to not being able to
4234 // write to the database, likely due to the disk being full
4235 e.preventDefault();
4236 e.stopPropagation();
4237 resolve(false);
4238 };
4239 }).catch(function () {
4240 return false; // error, so assume unsupported
4241 });
4242}
4243
4244function toObject(array) {
4245 return array.reduce(function (obj, item) {
4246 obj[item] = true;
4247 return obj;
4248 }, {});
4249}
4250// List of top level reserved words for doc
4251var reservedWords = toObject([
4252 '_id',
4253 '_rev',
4254 '_access',
4255 '_attachments',
4256 '_deleted',
4257 '_revisions',
4258 '_revs_info',
4259 '_conflicts',
4260 '_deleted_conflicts',
4261 '_local_seq',
4262 '_rev_tree',
4263 // replication documents
4264 '_replication_id',
4265 '_replication_state',
4266 '_replication_state_time',
4267 '_replication_state_reason',
4268 '_replication_stats',
4269 // Specific to Couchbase Sync Gateway
4270 '_removed'
4271]);
4272
4273// List of reserved words that should end up in the document
4274var dataWords = toObject([
4275 '_access',
4276 '_attachments',
4277 // replication documents
4278 '_replication_id',
4279 '_replication_state',
4280 '_replication_state_time',
4281 '_replication_state_reason',
4282 '_replication_stats'
4283]);
4284
4285function parseRevisionInfo(rev$$1) {
4286 if (!/^\d+-/.test(rev$$1)) {
4287 return createError(INVALID_REV);
4288 }
4289 var idx = rev$$1.indexOf('-');
4290 var left = rev$$1.substring(0, idx);
4291 var right = rev$$1.substring(idx + 1);
4292 return {
4293 prefix: parseInt(left, 10),
4294 id: right
4295 };
4296}
4297
4298function makeRevTreeFromRevisions(revisions, opts) {
4299 var pos = revisions.start - revisions.ids.length + 1;
4300
4301 var revisionIds = revisions.ids;
4302 var ids = [revisionIds[0], opts, []];
4303
4304 for (var i = 1, len = revisionIds.length; i < len; i++) {
4305 ids = [revisionIds[i], {status: 'missing'}, [ids]];
4306 }
4307
4308 return [{
4309 pos,
4310 ids
4311 }];
4312}
4313
4314// Preprocess documents, parse their revisions, assign an id and a
4315// revision for new writes that are missing them, etc
4316function parseDoc(doc, newEdits, dbOpts) {
4317 if (!dbOpts) {
4318 dbOpts = {
4319 deterministic_revs: true
4320 };
4321 }
4322
4323 var nRevNum;
4324 var newRevId;
4325 var revInfo;
4326 var opts = {status: 'available'};
4327 if (doc._deleted) {
4328 opts.deleted = true;
4329 }
4330
4331 if (newEdits) {
4332 if (!doc._id) {
4333 doc._id = uuid();
4334 }
4335 newRevId = rev(doc, dbOpts.deterministic_revs);
4336 if (doc._rev) {
4337 revInfo = parseRevisionInfo(doc._rev);
4338 if (revInfo.error) {
4339 return revInfo;
4340 }
4341 doc._rev_tree = [{
4342 pos: revInfo.prefix,
4343 ids: [revInfo.id, {status: 'missing'}, [[newRevId, opts, []]]]
4344 }];
4345 nRevNum = revInfo.prefix + 1;
4346 } else {
4347 doc._rev_tree = [{
4348 pos: 1,
4349 ids : [newRevId, opts, []]
4350 }];
4351 nRevNum = 1;
4352 }
4353 } else {
4354 if (doc._revisions) {
4355 doc._rev_tree = makeRevTreeFromRevisions(doc._revisions, opts);
4356 nRevNum = doc._revisions.start;
4357 newRevId = doc._revisions.ids[0];
4358 }
4359 if (!doc._rev_tree) {
4360 revInfo = parseRevisionInfo(doc._rev);
4361 if (revInfo.error) {
4362 return revInfo;
4363 }
4364 nRevNum = revInfo.prefix;
4365 newRevId = revInfo.id;
4366 doc._rev_tree = [{
4367 pos: nRevNum,
4368 ids: [newRevId, opts, []]
4369 }];
4370 }
4371 }
4372
4373 invalidIdError(doc._id);
4374
4375 doc._rev = nRevNum + '-' + newRevId;
4376
4377 var result = {metadata : {}, data : {}};
4378 for (var key in doc) {
4379 /* istanbul ignore else */
4380 if (Object.prototype.hasOwnProperty.call(doc, key)) {
4381 var specialKey = key[0] === '_';
4382 if (specialKey && !reservedWords[key]) {
4383 var error = createError(DOC_VALIDATION, key);
4384 error.message = DOC_VALIDATION.message + ': ' + key;
4385 throw error;
4386 } else if (specialKey && !dataWords[key]) {
4387 result.metadata[key.slice(1)] = doc[key];
4388 } else {
4389 result.data[key] = doc[key];
4390 }
4391 }
4392 }
4393 return result;
4394}
4395
4396function parseBase64(data) {
4397 try {
4398 return thisAtob(data);
4399 } catch (e) {
4400 var err = createError(BAD_ARG,
4401 'Attachment is not a valid base64 string');
4402 return {error: err};
4403 }
4404}
4405
4406function preprocessString(att, blobType, callback) {
4407 var asBinary = parseBase64(att.data);
4408 if (asBinary.error) {
4409 return callback(asBinary.error);
4410 }
4411
4412 att.length = asBinary.length;
4413 if (blobType === 'blob') {
4414 att.data = binStringToBluffer(asBinary, att.content_type);
4415 } else if (blobType === 'base64') {
4416 att.data = thisBtoa(asBinary);
4417 } else { // binary
4418 att.data = asBinary;
4419 }
4420 binaryMd5(asBinary, function (result) {
4421 att.digest = 'md5-' + result;
4422 callback();
4423 });
4424}
4425
4426function preprocessBlob(att, blobType, callback) {
4427 binaryMd5(att.data, function (md5) {
4428 att.digest = 'md5-' + md5;
4429 // size is for blobs (browser), length is for buffers (node)
4430 att.length = att.data.size || att.data.length || 0;
4431 if (blobType === 'binary') {
4432 blobToBinaryString(att.data, function (binString) {
4433 att.data = binString;
4434 callback();
4435 });
4436 } else if (blobType === 'base64') {
4437 blobToBase64(att.data, function (b64) {
4438 att.data = b64;
4439 callback();
4440 });
4441 } else {
4442 callback();
4443 }
4444 });
4445}
4446
4447function preprocessAttachment(att, blobType, callback) {
4448 if (att.stub) {
4449 return callback();
4450 }
4451 if (typeof att.data === 'string') { // input is a base64 string
4452 preprocessString(att, blobType, callback);
4453 } else { // input is a blob
4454 preprocessBlob(att, blobType, callback);
4455 }
4456}
4457
4458function preprocessAttachments(docInfos, blobType, callback) {
4459
4460 if (!docInfos.length) {
4461 return callback();
4462 }
4463
4464 var docv = 0;
4465 var overallErr;
4466
4467 docInfos.forEach(function (docInfo) {
4468 var attachments = docInfo.data && docInfo.data._attachments ?
4469 Object.keys(docInfo.data._attachments) : [];
4470 var recv = 0;
4471
4472 if (!attachments.length) {
4473 return done();
4474 }
4475
4476 function processedAttachment(err) {
4477 overallErr = err;
4478 recv++;
4479 if (recv === attachments.length) {
4480 done();
4481 }
4482 }
4483
4484 for (var key in docInfo.data._attachments) {
4485 if (Object.prototype.hasOwnProperty.call(docInfo.data._attachments, key)) {
4486 preprocessAttachment(docInfo.data._attachments[key],
4487 blobType, processedAttachment);
4488 }
4489 }
4490 });
4491
4492 function done() {
4493 docv++;
4494 if (docInfos.length === docv) {
4495 if (overallErr) {
4496 callback(overallErr);
4497 } else {
4498 callback();
4499 }
4500 }
4501 }
4502}
4503
4504function updateDoc(revLimit, prev, docInfo, results,
4505 i, cb, writeDoc, newEdits) {
4506
4507 if (revExists(prev.rev_tree, docInfo.metadata.rev) && !newEdits) {
4508 results[i] = docInfo;
4509 return cb();
4510 }
4511
4512 // sometimes this is pre-calculated. historically not always
4513 var previousWinningRev = prev.winningRev || winningRev(prev);
4514 var previouslyDeleted = 'deleted' in prev ? prev.deleted :
4515 isDeleted(prev, previousWinningRev);
4516 var deleted = 'deleted' in docInfo.metadata ? docInfo.metadata.deleted :
4517 isDeleted(docInfo.metadata);
4518 var isRoot = /^1-/.test(docInfo.metadata.rev);
4519
4520 if (previouslyDeleted && !deleted && newEdits && isRoot) {
4521 var newDoc = docInfo.data;
4522 newDoc._rev = previousWinningRev;
4523 newDoc._id = docInfo.metadata.id;
4524 docInfo = parseDoc(newDoc, newEdits);
4525 }
4526
4527 var merged = merge(prev.rev_tree, docInfo.metadata.rev_tree[0], revLimit);
4528
4529 var inConflict = newEdits && ((
4530 (previouslyDeleted && deleted && merged.conflicts !== 'new_leaf') ||
4531 (!previouslyDeleted && merged.conflicts !== 'new_leaf') ||
4532 (previouslyDeleted && !deleted && merged.conflicts === 'new_branch')));
4533
4534 if (inConflict) {
4535 var err = createError(REV_CONFLICT);
4536 results[i] = err;
4537 return cb();
4538 }
4539
4540 var newRev = docInfo.metadata.rev;
4541 docInfo.metadata.rev_tree = merged.tree;
4542 docInfo.stemmedRevs = merged.stemmedRevs || [];
4543 /* istanbul ignore else */
4544 if (prev.rev_map) {
4545 docInfo.metadata.rev_map = prev.rev_map; // used only by leveldb
4546 }
4547
4548 // recalculate
4549 var winningRev$$1 = winningRev(docInfo.metadata);
4550 var winningRevIsDeleted = isDeleted(docInfo.metadata, winningRev$$1);
4551
4552 // calculate the total number of documents that were added/removed,
4553 // from the perspective of total_rows/doc_count
4554 var delta = (previouslyDeleted === winningRevIsDeleted) ? 0 :
4555 previouslyDeleted < winningRevIsDeleted ? -1 : 1;
4556
4557 var newRevIsDeleted;
4558 if (newRev === winningRev$$1) {
4559 // if the new rev is the same as the winning rev, we can reuse that value
4560 newRevIsDeleted = winningRevIsDeleted;
4561 } else {
4562 // if they're not the same, then we need to recalculate
4563 newRevIsDeleted = isDeleted(docInfo.metadata, newRev);
4564 }
4565
4566 writeDoc(docInfo, winningRev$$1, winningRevIsDeleted, newRevIsDeleted,
4567 true, delta, i, cb);
4568}
4569
4570function rootIsMissing(docInfo) {
4571 return docInfo.metadata.rev_tree[0].ids[1].status === 'missing';
4572}
4573
4574function processDocs(revLimit, docInfos, api, fetchedDocs, tx, results,
4575 writeDoc, opts, overallCallback) {
4576
4577 // Default to 1000 locally
4578 revLimit = revLimit || 1000;
4579
4580 function insertDoc(docInfo, resultsIdx, callback) {
4581 // Cant insert new deleted documents
4582 var winningRev$$1 = winningRev(docInfo.metadata);
4583 var deleted = isDeleted(docInfo.metadata, winningRev$$1);
4584 if ('was_delete' in opts && deleted) {
4585 results[resultsIdx] = createError(MISSING_DOC, 'deleted');
4586 return callback();
4587 }
4588
4589 // 4712 - detect whether a new document was inserted with a _rev
4590 var inConflict = newEdits && rootIsMissing(docInfo);
4591
4592 if (inConflict) {
4593 var err = createError(REV_CONFLICT);
4594 results[resultsIdx] = err;
4595 return callback();
4596 }
4597
4598 var delta = deleted ? 0 : 1;
4599
4600 writeDoc(docInfo, winningRev$$1, deleted, deleted, false,
4601 delta, resultsIdx, callback);
4602 }
4603
4604 var newEdits = opts.new_edits;
4605 var idsToDocs = new Map();
4606
4607 var docsDone = 0;
4608 var docsToDo = docInfos.length;
4609
4610 function checkAllDocsDone() {
4611 if (++docsDone === docsToDo && overallCallback) {
4612 overallCallback();
4613 }
4614 }
4615
4616 docInfos.forEach(function (currentDoc, resultsIdx) {
4617
4618 if (currentDoc._id && isLocalId(currentDoc._id)) {
4619 var fun = currentDoc._deleted ? '_removeLocal' : '_putLocal';
4620 api[fun](currentDoc, {ctx: tx}, function (err, res) {
4621 results[resultsIdx] = err || res;
4622 checkAllDocsDone();
4623 });
4624 return;
4625 }
4626
4627 var id = currentDoc.metadata.id;
4628 if (idsToDocs.has(id)) {
4629 docsToDo--; // duplicate
4630 idsToDocs.get(id).push([currentDoc, resultsIdx]);
4631 } else {
4632 idsToDocs.set(id, [[currentDoc, resultsIdx]]);
4633 }
4634 });
4635
4636 // in the case of new_edits, the user can provide multiple docs
4637 // with the same id. these need to be processed sequentially
4638 idsToDocs.forEach(function (docs, id) {
4639 var numDone = 0;
4640
4641 function docWritten() {
4642 if (++numDone < docs.length) {
4643 nextDoc();
4644 } else {
4645 checkAllDocsDone();
4646 }
4647 }
4648 function nextDoc() {
4649 var value = docs[numDone];
4650 var currentDoc = value[0];
4651 var resultsIdx = value[1];
4652
4653 if (fetchedDocs.has(id)) {
4654 updateDoc(revLimit, fetchedDocs.get(id), currentDoc, results,
4655 resultsIdx, docWritten, writeDoc, newEdits);
4656 } else {
4657 // Ensure stemming applies to new writes as well
4658 var merged = merge([], currentDoc.metadata.rev_tree[0], revLimit);
4659 currentDoc.metadata.rev_tree = merged.tree;
4660 currentDoc.stemmedRevs = merged.stemmedRevs || [];
4661 insertDoc(currentDoc, resultsIdx, docWritten);
4662 }
4663 }
4664 nextDoc();
4665 });
4666}
4667
4668// IndexedDB requires a versioned database structure, so we use the
4669// version here to manage migrations.
4670var ADAPTER_VERSION = 5;
4671
4672// The object stores created for each database
4673// DOC_STORE stores the document meta data, its revision history and state
4674// Keyed by document id
4675var DOC_STORE = 'document-store';
4676// BY_SEQ_STORE stores a particular version of a document, keyed by its
4677// sequence id
4678var BY_SEQ_STORE = 'by-sequence';
4679// Where we store attachments
4680var ATTACH_STORE = 'attach-store';
4681// Where we store many-to-many relations
4682// between attachment digests and seqs
4683var ATTACH_AND_SEQ_STORE = 'attach-seq-store';
4684
4685// Where we store database-wide meta data in a single record
4686// keyed by id: META_STORE
4687var META_STORE = 'meta-store';
4688// Where we store local documents
4689var LOCAL_STORE = 'local-store';
4690// Where we detect blob support
4691var DETECT_BLOB_SUPPORT_STORE = 'detect-blob-support';
4692
4693function safeJsonParse(str) {
4694 // This try/catch guards against stack overflow errors.
4695 // JSON.parse() is faster than vuvuzela.parse() but vuvuzela
4696 // cannot overflow.
4697 try {
4698 return JSON.parse(str);
4699 } catch (e) {
4700 /* istanbul ignore next */
4701 return vuvuzela.parse(str);
4702 }
4703}
4704
4705function safeJsonStringify(json) {
4706 try {
4707 return JSON.stringify(json);
4708 } catch (e) {
4709 /* istanbul ignore next */
4710 return vuvuzela.stringify(json);
4711 }
4712}
4713
4714function idbError(callback) {
4715 return function (evt) {
4716 var message = 'unknown_error';
4717 if (evt.target && evt.target.error) {
4718 message = evt.target.error.name || evt.target.error.message;
4719 }
4720 callback(createError(IDB_ERROR, message, evt.type));
4721 };
4722}
4723
4724// Unfortunately, the metadata has to be stringified
4725// when it is put into the database, because otherwise
4726// IndexedDB can throw errors for deeply-nested objects.
4727// Originally we just used JSON.parse/JSON.stringify; now
4728// we use this custom vuvuzela library that avoids recursion.
4729// If we could do it all over again, we'd probably use a
4730// format for the revision trees other than JSON.
4731function encodeMetadata(metadata, winningRev, deleted) {
4732 return {
4733 data: safeJsonStringify(metadata),
4734 winningRev,
4735 deletedOrLocal: deleted ? '1' : '0',
4736 seq: metadata.seq, // highest seq for this doc
4737 id: metadata.id
4738 };
4739}
4740
4741function decodeMetadata(storedObject) {
4742 if (!storedObject) {
4743 return null;
4744 }
4745 var metadata = safeJsonParse(storedObject.data);
4746 metadata.winningRev = storedObject.winningRev;
4747 metadata.deleted = storedObject.deletedOrLocal === '1';
4748 metadata.seq = storedObject.seq;
4749 return metadata;
4750}
4751
4752// read the doc back out from the database. we don't store the
4753// _id or _rev because we already have _doc_id_rev.
4754function decodeDoc(doc) {
4755 if (!doc) {
4756 return doc;
4757 }
4758 var idx = doc._doc_id_rev.lastIndexOf(':');
4759 doc._id = doc._doc_id_rev.substring(0, idx - 1);
4760 doc._rev = doc._doc_id_rev.substring(idx + 1);
4761 delete doc._doc_id_rev;
4762 return doc;
4763}
4764
4765// Read a blob from the database, encoding as necessary
4766// and translating from base64 if the IDB doesn't support
4767// native Blobs
4768function readBlobData(body, type, asBlob, callback) {
4769 if (asBlob) {
4770 if (!body) {
4771 callback(createBlob([''], {type}));
4772 } else if (typeof body !== 'string') { // we have blob support
4773 callback(body);
4774 } else { // no blob support
4775 callback(b64ToBluffer(body, type));
4776 }
4777 } else { // as base64 string
4778 if (!body) {
4779 callback('');
4780 } else if (typeof body !== 'string') { // we have blob support
4781 readAsBinaryString(body, function (binary) {
4782 callback(thisBtoa(binary));
4783 });
4784 } else { // no blob support
4785 callback(body);
4786 }
4787 }
4788}
4789
4790function fetchAttachmentsIfNecessary(doc, opts, txn, cb) {
4791 var attachments = Object.keys(doc._attachments || {});
4792 if (!attachments.length) {
4793 return cb && cb();
4794 }
4795 var numDone = 0;
4796
4797 function checkDone() {
4798 if (++numDone === attachments.length && cb) {
4799 cb();
4800 }
4801 }
4802
4803 function fetchAttachment(doc, att) {
4804 var attObj = doc._attachments[att];
4805 var digest = attObj.digest;
4806 var req = txn.objectStore(ATTACH_STORE).get(digest);
4807 req.onsuccess = function (e) {
4808 attObj.body = e.target.result.body;
4809 checkDone();
4810 };
4811 }
4812
4813 attachments.forEach(function (att) {
4814 if (opts.attachments && opts.include_docs) {
4815 fetchAttachment(doc, att);
4816 } else {
4817 doc._attachments[att].stub = true;
4818 checkDone();
4819 }
4820 });
4821}
4822
4823// IDB-specific postprocessing necessary because
4824// we don't know whether we stored a true Blob or
4825// a base64-encoded string, and if it's a Blob it
4826// needs to be read outside of the transaction context
4827function postProcessAttachments(results, asBlob) {
4828 return Promise.all(results.map(function (row) {
4829 if (row.doc && row.doc._attachments) {
4830 var attNames = Object.keys(row.doc._attachments);
4831 return Promise.all(attNames.map(function (att) {
4832 var attObj = row.doc._attachments[att];
4833 if (!('body' in attObj)) { // already processed
4834 return;
4835 }
4836 var body = attObj.body;
4837 var type = attObj.content_type;
4838 return new Promise(function (resolve) {
4839 readBlobData(body, type, asBlob, function (data) {
4840 row.doc._attachments[att] = Object.assign(
4841 pick(attObj, ['digest', 'content_type']),
4842 {data}
4843 );
4844 resolve();
4845 });
4846 });
4847 }));
4848 }
4849 }));
4850}
4851
4852function compactRevs(revs, docId, txn) {
4853
4854 var possiblyOrphanedDigests = [];
4855 var seqStore = txn.objectStore(BY_SEQ_STORE);
4856 var attStore = txn.objectStore(ATTACH_STORE);
4857 var attAndSeqStore = txn.objectStore(ATTACH_AND_SEQ_STORE);
4858 var count = revs.length;
4859
4860 function checkDone() {
4861 count--;
4862 if (!count) { // done processing all revs
4863 deleteOrphanedAttachments();
4864 }
4865 }
4866
4867 function deleteOrphanedAttachments() {
4868 if (!possiblyOrphanedDigests.length) {
4869 return;
4870 }
4871 possiblyOrphanedDigests.forEach(function (digest) {
4872 var countReq = attAndSeqStore.index('digestSeq').count(
4873 IDBKeyRange.bound(
4874 digest + '::', digest + '::\uffff', false, false));
4875 countReq.onsuccess = function (e) {
4876 var count = e.target.result;
4877 if (!count) {
4878 // orphaned
4879 attStore.delete(digest);
4880 }
4881 };
4882 });
4883 }
4884
4885 revs.forEach(function (rev$$1) {
4886 var index = seqStore.index('_doc_id_rev');
4887 var key = docId + "::" + rev$$1;
4888 index.getKey(key).onsuccess = function (e) {
4889 var seq = e.target.result;
4890 if (typeof seq !== 'number') {
4891 return checkDone();
4892 }
4893 seqStore.delete(seq);
4894
4895 var cursor = attAndSeqStore.index('seq')
4896 .openCursor(IDBKeyRange.only(seq));
4897
4898 cursor.onsuccess = function (event) {
4899 var cursor = event.target.result;
4900 if (cursor) {
4901 var digest = cursor.value.digestSeq.split('::')[0];
4902 possiblyOrphanedDigests.push(digest);
4903 attAndSeqStore.delete(cursor.primaryKey);
4904 cursor.continue();
4905 } else { // done
4906 checkDone();
4907 }
4908 };
4909 };
4910 });
4911}
4912
4913function openTransactionSafely(idb, stores, mode) {
4914 try {
4915 return {
4916 txn: idb.transaction(stores, mode)
4917 };
4918 } catch (err) {
4919 return {
4920 error: err
4921 };
4922 }
4923}
4924
4925var changesHandler = new Changes();
4926
4927function idbBulkDocs(dbOpts, req, opts, api, idb, callback) {
4928 var docInfos = req.docs;
4929 var txn;
4930 var docStore;
4931 var bySeqStore;
4932 var attachStore;
4933 var attachAndSeqStore;
4934 var metaStore;
4935 var docInfoError;
4936 var metaDoc;
4937
4938 for (var i = 0, len = docInfos.length; i < len; i++) {
4939 var doc = docInfos[i];
4940 if (doc._id && isLocalId(doc._id)) {
4941 continue;
4942 }
4943 doc = docInfos[i] = parseDoc(doc, opts.new_edits, dbOpts);
4944 if (doc.error && !docInfoError) {
4945 docInfoError = doc;
4946 }
4947 }
4948
4949 if (docInfoError) {
4950 return callback(docInfoError);
4951 }
4952
4953 var allDocsProcessed = false;
4954 var docCountDelta = 0;
4955 var results = new Array(docInfos.length);
4956 var fetchedDocs = new Map();
4957 var preconditionErrored = false;
4958 var blobType = api._meta.blobSupport ? 'blob' : 'base64';
4959
4960 preprocessAttachments(docInfos, blobType, function (err) {
4961 if (err) {
4962 return callback(err);
4963 }
4964 startTransaction();
4965 });
4966
4967 function startTransaction() {
4968
4969 var stores = [
4970 DOC_STORE, BY_SEQ_STORE,
4971 ATTACH_STORE,
4972 LOCAL_STORE, ATTACH_AND_SEQ_STORE,
4973 META_STORE
4974 ];
4975 var txnResult = openTransactionSafely(idb, stores, 'readwrite');
4976 if (txnResult.error) {
4977 return callback(txnResult.error);
4978 }
4979 txn = txnResult.txn;
4980 txn.onabort = idbError(callback);
4981 txn.ontimeout = idbError(callback);
4982 txn.oncomplete = complete;
4983 docStore = txn.objectStore(DOC_STORE);
4984 bySeqStore = txn.objectStore(BY_SEQ_STORE);
4985 attachStore = txn.objectStore(ATTACH_STORE);
4986 attachAndSeqStore = txn.objectStore(ATTACH_AND_SEQ_STORE);
4987 metaStore = txn.objectStore(META_STORE);
4988
4989 metaStore.get(META_STORE).onsuccess = function (e) {
4990 metaDoc = e.target.result;
4991 updateDocCountIfReady();
4992 };
4993
4994 verifyAttachments(function (err) {
4995 if (err) {
4996 preconditionErrored = true;
4997 return callback(err);
4998 }
4999 fetchExistingDocs();
5000 });
5001 }
5002
5003 function onAllDocsProcessed() {
5004 allDocsProcessed = true;
5005 updateDocCountIfReady();
5006 }
5007
5008 function idbProcessDocs() {
5009 processDocs(dbOpts.revs_limit, docInfos, api, fetchedDocs,
5010 txn, results, writeDoc, opts, onAllDocsProcessed);
5011 }
5012
5013 function updateDocCountIfReady() {
5014 if (!metaDoc || !allDocsProcessed) {
5015 return;
5016 }
5017 // caching the docCount saves a lot of time in allDocs() and
5018 // info(), which is why we go to all the trouble of doing this
5019 metaDoc.docCount += docCountDelta;
5020 metaStore.put(metaDoc);
5021 }
5022
5023 function fetchExistingDocs() {
5024
5025 if (!docInfos.length) {
5026 return;
5027 }
5028
5029 var numFetched = 0;
5030
5031 function checkDone() {
5032 if (++numFetched === docInfos.length) {
5033 idbProcessDocs();
5034 }
5035 }
5036
5037 function readMetadata(event) {
5038 var metadata = decodeMetadata(event.target.result);
5039
5040 if (metadata) {
5041 fetchedDocs.set(metadata.id, metadata);
5042 }
5043 checkDone();
5044 }
5045
5046 for (var i = 0, len = docInfos.length; i < len; i++) {
5047 var docInfo = docInfos[i];
5048 if (docInfo._id && isLocalId(docInfo._id)) {
5049 checkDone(); // skip local docs
5050 continue;
5051 }
5052 var req = docStore.get(docInfo.metadata.id);
5053 req.onsuccess = readMetadata;
5054 }
5055 }
5056
5057 function complete() {
5058 if (preconditionErrored) {
5059 return;
5060 }
5061
5062 changesHandler.notify(api._meta.name);
5063 callback(null, results);
5064 }
5065
5066 function verifyAttachment(digest, callback) {
5067
5068 var req = attachStore.get(digest);
5069 req.onsuccess = function (e) {
5070 if (!e.target.result) {
5071 var err = createError(MISSING_STUB,
5072 'unknown stub attachment with digest ' +
5073 digest);
5074 err.status = 412;
5075 callback(err);
5076 } else {
5077 callback();
5078 }
5079 };
5080 }
5081
5082 function verifyAttachments(finish) {
5083
5084
5085 var digests = [];
5086 docInfos.forEach(function (docInfo) {
5087 if (docInfo.data && docInfo.data._attachments) {
5088 Object.keys(docInfo.data._attachments).forEach(function (filename) {
5089 var att = docInfo.data._attachments[filename];
5090 if (att.stub) {
5091 digests.push(att.digest);
5092 }
5093 });
5094 }
5095 });
5096 if (!digests.length) {
5097 return finish();
5098 }
5099 var numDone = 0;
5100 var err;
5101
5102 function checkDone() {
5103 if (++numDone === digests.length) {
5104 finish(err);
5105 }
5106 }
5107 digests.forEach(function (digest) {
5108 verifyAttachment(digest, function (attErr) {
5109 if (attErr && !err) {
5110 err = attErr;
5111 }
5112 checkDone();
5113 });
5114 });
5115 }
5116
5117 function writeDoc(docInfo, winningRev$$1, winningRevIsDeleted, newRevIsDeleted,
5118 isUpdate, delta, resultsIdx, callback) {
5119
5120 docInfo.metadata.winningRev = winningRev$$1;
5121 docInfo.metadata.deleted = winningRevIsDeleted;
5122
5123 var doc = docInfo.data;
5124 doc._id = docInfo.metadata.id;
5125 doc._rev = docInfo.metadata.rev;
5126
5127 if (newRevIsDeleted) {
5128 doc._deleted = true;
5129 }
5130
5131 var hasAttachments = doc._attachments &&
5132 Object.keys(doc._attachments).length;
5133 if (hasAttachments) {
5134 return writeAttachments(docInfo, winningRev$$1, winningRevIsDeleted,
5135 isUpdate, resultsIdx, callback);
5136 }
5137
5138 docCountDelta += delta;
5139 updateDocCountIfReady();
5140
5141 finishDoc(docInfo, winningRev$$1, winningRevIsDeleted,
5142 isUpdate, resultsIdx, callback);
5143 }
5144
5145 function finishDoc(docInfo, winningRev$$1, winningRevIsDeleted,
5146 isUpdate, resultsIdx, callback) {
5147
5148 var doc = docInfo.data;
5149 var metadata = docInfo.metadata;
5150
5151 doc._doc_id_rev = metadata.id + '::' + metadata.rev;
5152 delete doc._id;
5153 delete doc._rev;
5154
5155 function afterPutDoc(e) {
5156 var revsToDelete = docInfo.stemmedRevs || [];
5157
5158 if (isUpdate && api.auto_compaction) {
5159 revsToDelete = revsToDelete.concat(compactTree(docInfo.metadata));
5160 }
5161
5162 if (revsToDelete && revsToDelete.length) {
5163 compactRevs(revsToDelete, docInfo.metadata.id, txn);
5164 }
5165
5166 metadata.seq = e.target.result;
5167 // Current _rev is calculated from _rev_tree on read
5168 // delete metadata.rev;
5169 var metadataToStore = encodeMetadata(metadata, winningRev$$1,
5170 winningRevIsDeleted);
5171 var metaDataReq = docStore.put(metadataToStore);
5172 metaDataReq.onsuccess = afterPutMetadata;
5173 }
5174
5175 function afterPutDocError(e) {
5176 // ConstraintError, need to update, not put (see #1638 for details)
5177 e.preventDefault(); // avoid transaction abort
5178 e.stopPropagation(); // avoid transaction onerror
5179 var index = bySeqStore.index('_doc_id_rev');
5180 var getKeyReq = index.getKey(doc._doc_id_rev);
5181 getKeyReq.onsuccess = function (e) {
5182 var putReq = bySeqStore.put(doc, e.target.result);
5183 putReq.onsuccess = afterPutDoc;
5184 };
5185 }
5186
5187 function afterPutMetadata() {
5188 results[resultsIdx] = {
5189 ok: true,
5190 id: metadata.id,
5191 rev: metadata.rev
5192 };
5193 fetchedDocs.set(docInfo.metadata.id, docInfo.metadata);
5194 insertAttachmentMappings(docInfo, metadata.seq, callback);
5195 }
5196
5197 var putReq = bySeqStore.put(doc);
5198
5199 putReq.onsuccess = afterPutDoc;
5200 putReq.onerror = afterPutDocError;
5201 }
5202
5203 function writeAttachments(docInfo, winningRev$$1, winningRevIsDeleted,
5204 isUpdate, resultsIdx, callback) {
5205
5206
5207 var doc = docInfo.data;
5208
5209 var numDone = 0;
5210 var attachments = Object.keys(doc._attachments);
5211
5212 function collectResults() {
5213 if (numDone === attachments.length) {
5214 finishDoc(docInfo, winningRev$$1, winningRevIsDeleted,
5215 isUpdate, resultsIdx, callback);
5216 }
5217 }
5218
5219 function attachmentSaved() {
5220 numDone++;
5221 collectResults();
5222 }
5223
5224 attachments.forEach(function (key) {
5225 var att = docInfo.data._attachments[key];
5226 if (!att.stub) {
5227 var data = att.data;
5228 delete att.data;
5229 att.revpos = parseInt(winningRev$$1, 10);
5230 var digest = att.digest;
5231 saveAttachment(digest, data, attachmentSaved);
5232 } else {
5233 numDone++;
5234 collectResults();
5235 }
5236 });
5237 }
5238
5239 // map seqs to attachment digests, which
5240 // we will need later during compaction
5241 function insertAttachmentMappings(docInfo, seq, callback) {
5242
5243 var attsAdded = 0;
5244 var attsToAdd = Object.keys(docInfo.data._attachments || {});
5245
5246 if (!attsToAdd.length) {
5247 return callback();
5248 }
5249
5250 function checkDone() {
5251 if (++attsAdded === attsToAdd.length) {
5252 callback();
5253 }
5254 }
5255
5256 function add(att) {
5257 var digest = docInfo.data._attachments[att].digest;
5258 var req = attachAndSeqStore.put({
5259 seq,
5260 digestSeq: digest + '::' + seq
5261 });
5262
5263 req.onsuccess = checkDone;
5264 req.onerror = function (e) {
5265 // this callback is for a constaint error, which we ignore
5266 // because this docid/rev has already been associated with
5267 // the digest (e.g. when new_edits == false)
5268 e.preventDefault(); // avoid transaction abort
5269 e.stopPropagation(); // avoid transaction onerror
5270 checkDone();
5271 };
5272 }
5273 for (var i = 0; i < attsToAdd.length; i++) {
5274 add(attsToAdd[i]); // do in parallel
5275 }
5276 }
5277
5278 function saveAttachment(digest, data, callback) {
5279
5280
5281 var getKeyReq = attachStore.count(digest);
5282 getKeyReq.onsuccess = function (e) {
5283 var count = e.target.result;
5284 if (count) {
5285 return callback(); // already exists
5286 }
5287 var newAtt = {
5288 digest,
5289 body: data
5290 };
5291 var putReq = attachStore.put(newAtt);
5292 putReq.onsuccess = callback;
5293 };
5294 }
5295}
5296
5297// Abstraction over IDBCursor and getAll()/getAllKeys() that allows us to batch our operations
5298// while falling back to a normal IDBCursor operation on browsers that don't support getAll() or
5299// getAllKeys(). This allows for a much faster implementation than just straight-up cursors, because
5300// we're not processing each document one-at-a-time.
5301function runBatchedCursor(objectStore, keyRange, descending, batchSize, onBatch) {
5302
5303 if (batchSize === -1) {
5304 batchSize = 1000;
5305 }
5306
5307 // Bail out of getAll()/getAllKeys() in the following cases:
5308 // 1) either method is unsupported - we need both
5309 // 2) batchSize is 1 (might as well use IDBCursor)
5310 // 3) descending – no real way to do this via getAll()/getAllKeys()
5311
5312 var useGetAll = typeof objectStore.getAll === 'function' &&
5313 typeof objectStore.getAllKeys === 'function' &&
5314 batchSize > 1 && !descending;
5315
5316 var keysBatch;
5317 var valuesBatch;
5318 var pseudoCursor;
5319
5320 function onGetAll(e) {
5321 valuesBatch = e.target.result;
5322 if (keysBatch) {
5323 onBatch(keysBatch, valuesBatch, pseudoCursor);
5324 }
5325 }
5326
5327 function onGetAllKeys(e) {
5328 keysBatch = e.target.result;
5329 if (valuesBatch) {
5330 onBatch(keysBatch, valuesBatch, pseudoCursor);
5331 }
5332 }
5333
5334 function continuePseudoCursor() {
5335 if (!keysBatch.length) { // no more results
5336 return onBatch();
5337 }
5338 // fetch next batch, exclusive start
5339 var lastKey = keysBatch[keysBatch.length - 1];
5340 var newKeyRange;
5341 if (keyRange && keyRange.upper) {
5342 try {
5343 newKeyRange = IDBKeyRange.bound(lastKey, keyRange.upper,
5344 true, keyRange.upperOpen);
5345 } catch (e) {
5346 if (e.name === "DataError" && e.code === 0) {
5347 return onBatch(); // we're done, startkey and endkey are equal
5348 }
5349 }
5350 } else {
5351 newKeyRange = IDBKeyRange.lowerBound(lastKey, true);
5352 }
5353 keyRange = newKeyRange;
5354 keysBatch = null;
5355 valuesBatch = null;
5356 objectStore.getAll(keyRange, batchSize).onsuccess = onGetAll;
5357 objectStore.getAllKeys(keyRange, batchSize).onsuccess = onGetAllKeys;
5358 }
5359
5360 function onCursor(e) {
5361 var cursor = e.target.result;
5362 if (!cursor) { // done
5363 return onBatch();
5364 }
5365 // regular IDBCursor acts like a batch where batch size is always 1
5366 onBatch([cursor.key], [cursor.value], cursor);
5367 }
5368
5369 if (useGetAll) {
5370 pseudoCursor = {"continue": continuePseudoCursor};
5371 objectStore.getAll(keyRange, batchSize).onsuccess = onGetAll;
5372 objectStore.getAllKeys(keyRange, batchSize).onsuccess = onGetAllKeys;
5373 } else if (descending) {
5374 objectStore.openCursor(keyRange, 'prev').onsuccess = onCursor;
5375 } else {
5376 objectStore.openCursor(keyRange).onsuccess = onCursor;
5377 }
5378}
5379
5380// simple shim for objectStore.getAll(), falling back to IDBCursor
5381function getAll(objectStore, keyRange, onSuccess) {
5382 if (typeof objectStore.getAll === 'function') {
5383 // use native getAll
5384 objectStore.getAll(keyRange).onsuccess = onSuccess;
5385 return;
5386 }
5387 // fall back to cursors
5388 var values = [];
5389
5390 function onCursor(e) {
5391 var cursor = e.target.result;
5392 if (cursor) {
5393 values.push(cursor.value);
5394 cursor.continue();
5395 } else {
5396 onSuccess({
5397 target: {
5398 result: values
5399 }
5400 });
5401 }
5402 }
5403
5404 objectStore.openCursor(keyRange).onsuccess = onCursor;
5405}
5406
5407function allDocsKeys(keys, docStore, onBatch) {
5408 // It's not guaranteed to be returned in right order
5409 var valuesBatch = new Array(keys.length);
5410 var count = 0;
5411 keys.forEach(function (key, index) {
5412 docStore.get(key).onsuccess = function (event) {
5413 if (event.target.result) {
5414 valuesBatch[index] = event.target.result;
5415 } else {
5416 valuesBatch[index] = {key, error: 'not_found'};
5417 }
5418 count++;
5419 if (count === keys.length) {
5420 onBatch(keys, valuesBatch, {});
5421 }
5422 };
5423 });
5424}
5425
5426function createKeyRange(start, end, inclusiveEnd, key, descending) {
5427 try {
5428 if (start && end) {
5429 if (descending) {
5430 return IDBKeyRange.bound(end, start, !inclusiveEnd, false);
5431 } else {
5432 return IDBKeyRange.bound(start, end, false, !inclusiveEnd);
5433 }
5434 } else if (start) {
5435 if (descending) {
5436 return IDBKeyRange.upperBound(start);
5437 } else {
5438 return IDBKeyRange.lowerBound(start);
5439 }
5440 } else if (end) {
5441 if (descending) {
5442 return IDBKeyRange.lowerBound(end, !inclusiveEnd);
5443 } else {
5444 return IDBKeyRange.upperBound(end, !inclusiveEnd);
5445 }
5446 } else if (key) {
5447 return IDBKeyRange.only(key);
5448 }
5449 } catch (e) {
5450 return {error: e};
5451 }
5452 return null;
5453}
5454
5455function idbAllDocs(opts, idb, callback) {
5456 var start = 'startkey' in opts ? opts.startkey : false;
5457 var end = 'endkey' in opts ? opts.endkey : false;
5458 var key = 'key' in opts ? opts.key : false;
5459 var keys = 'keys' in opts ? opts.keys : false;
5460 var skip = opts.skip || 0;
5461 var limit = typeof opts.limit === 'number' ? opts.limit : -1;
5462 var inclusiveEnd = opts.inclusive_end !== false;
5463
5464 var keyRange ;
5465 var keyRangeError;
5466 if (!keys) {
5467 keyRange = createKeyRange(start, end, inclusiveEnd, key, opts.descending);
5468 keyRangeError = keyRange && keyRange.error;
5469 if (keyRangeError &&
5470 !(keyRangeError.name === "DataError" && keyRangeError.code === 0)) {
5471 // DataError with error code 0 indicates start is less than end, so
5472 // can just do an empty query. Else need to throw
5473 return callback(createError(IDB_ERROR,
5474 keyRangeError.name, keyRangeError.message));
5475 }
5476 }
5477
5478 var stores = [DOC_STORE, BY_SEQ_STORE, META_STORE];
5479
5480 if (opts.attachments) {
5481 stores.push(ATTACH_STORE);
5482 }
5483 var txnResult = openTransactionSafely(idb, stores, 'readonly');
5484 if (txnResult.error) {
5485 return callback(txnResult.error);
5486 }
5487 var txn = txnResult.txn;
5488 txn.oncomplete = onTxnComplete;
5489 txn.onabort = idbError(callback);
5490 var docStore = txn.objectStore(DOC_STORE);
5491 var seqStore = txn.objectStore(BY_SEQ_STORE);
5492 var metaStore = txn.objectStore(META_STORE);
5493 var docIdRevIndex = seqStore.index('_doc_id_rev');
5494 var results = [];
5495 var docCount;
5496 var updateSeq;
5497
5498 metaStore.get(META_STORE).onsuccess = function (e) {
5499 docCount = e.target.result.docCount;
5500 };
5501
5502 /* istanbul ignore if */
5503 if (opts.update_seq) {
5504 // get max updateSeq
5505 seqStore.openKeyCursor(null, 'prev').onsuccess = e => {
5506 var cursor = e.target.result;
5507 if (cursor && cursor.key) {
5508 updateSeq = cursor.key;
5509 }
5510 };
5511 }
5512
5513 // if the user specifies include_docs=true, then we don't
5514 // want to block the main cursor while we're fetching the doc
5515 function fetchDocAsynchronously(metadata, row, winningRev$$1) {
5516 var key = metadata.id + "::" + winningRev$$1;
5517 docIdRevIndex.get(key).onsuccess = function onGetDoc(e) {
5518 row.doc = decodeDoc(e.target.result) || {};
5519 if (opts.conflicts) {
5520 var conflicts = collectConflicts(metadata);
5521 if (conflicts.length) {
5522 row.doc._conflicts = conflicts;
5523 }
5524 }
5525 fetchAttachmentsIfNecessary(row.doc, opts, txn);
5526 };
5527 }
5528
5529 function allDocsInner(winningRev$$1, metadata) {
5530 var row = {
5531 id: metadata.id,
5532 key: metadata.id,
5533 value: {
5534 rev: winningRev$$1
5535 }
5536 };
5537 var deleted = metadata.deleted;
5538 if (deleted) {
5539 if (keys) {
5540 results.push(row);
5541 // deleted docs are okay with "keys" requests
5542 row.value.deleted = true;
5543 row.doc = null;
5544 }
5545 } else if (skip-- <= 0) {
5546 results.push(row);
5547 if (opts.include_docs) {
5548 fetchDocAsynchronously(metadata, row, winningRev$$1);
5549 }
5550 }
5551 }
5552
5553 function processBatch(batchValues) {
5554 for (var i = 0, len = batchValues.length; i < len; i++) {
5555 if (results.length === limit) {
5556 break;
5557 }
5558 var batchValue = batchValues[i];
5559 if (batchValue.error && keys) {
5560 // key was not found with "keys" requests
5561 results.push(batchValue);
5562 continue;
5563 }
5564 var metadata = decodeMetadata(batchValue);
5565 var winningRev$$1 = metadata.winningRev;
5566 allDocsInner(winningRev$$1, metadata);
5567 }
5568 }
5569
5570 function onBatch(batchKeys, batchValues, cursor) {
5571 if (!cursor) {
5572 return;
5573 }
5574 processBatch(batchValues);
5575 if (results.length < limit) {
5576 cursor.continue();
5577 }
5578 }
5579
5580 function onGetAll(e) {
5581 var values = e.target.result;
5582 if (opts.descending) {
5583 values = values.reverse();
5584 }
5585 processBatch(values);
5586 }
5587
5588 function onResultsReady() {
5589 var returnVal = {
5590 total_rows: docCount,
5591 offset: opts.skip,
5592 rows: results
5593 };
5594
5595 /* istanbul ignore if */
5596 if (opts.update_seq && updateSeq !== undefined) {
5597 returnVal.update_seq = updateSeq;
5598 }
5599 callback(null, returnVal);
5600 }
5601
5602 function onTxnComplete() {
5603 if (opts.attachments) {
5604 postProcessAttachments(results, opts.binary).then(onResultsReady);
5605 } else {
5606 onResultsReady();
5607 }
5608 }
5609
5610 // don't bother doing any requests if start > end or limit === 0
5611 if (keyRangeError || limit === 0) {
5612 return;
5613 }
5614 if (keys) {
5615 return allDocsKeys(keys, docStore, onBatch);
5616 }
5617 if (limit === -1) { // just fetch everything
5618 return getAll(docStore, keyRange, onGetAll);
5619 }
5620 // else do a cursor
5621 // choose a batch size based on the skip, since we'll need to skip that many
5622 runBatchedCursor(docStore, keyRange, opts.descending, limit + skip, onBatch);
5623}
5624
5625function countDocs(txn, cb) {
5626 var index = txn.objectStore(DOC_STORE).index('deletedOrLocal');
5627 index.count(IDBKeyRange.only('0')).onsuccess = function (e) {
5628 cb(e.target.result);
5629 };
5630}
5631
5632// This task queue ensures that IDB open calls are done in their own tick
5633
5634var running = false;
5635var queue = [];
5636
5637function tryCode(fun, err, res, PouchDB) {
5638 try {
5639 fun(err, res);
5640 } catch (err) {
5641 // Shouldn't happen, but in some odd cases
5642 // IndexedDB implementations might throw a sync
5643 // error, in which case this will at least log it.
5644 PouchDB.emit('error', err);
5645 }
5646}
5647
5648function applyNext() {
5649 if (running || !queue.length) {
5650 return;
5651 }
5652 running = true;
5653 queue.shift()();
5654}
5655
5656function enqueueTask(action, callback, PouchDB) {
5657 queue.push(function runAction() {
5658 action(function runCallback(err, res) {
5659 tryCode(callback, err, res, PouchDB);
5660 running = false;
5661 nextTick(function runNext() {
5662 applyNext(PouchDB);
5663 });
5664 });
5665 });
5666 applyNext();
5667}
5668
5669function changes(opts, api, dbName, idb) {
5670 opts = clone(opts);
5671
5672 if (opts.continuous) {
5673 var id = dbName + ':' + uuid();
5674 changesHandler.addListener(dbName, id, api, opts);
5675 changesHandler.notify(dbName);
5676 return {
5677 cancel: function () {
5678 changesHandler.removeListener(dbName, id);
5679 }
5680 };
5681 }
5682
5683 var docIds = opts.doc_ids && new Set(opts.doc_ids);
5684
5685 opts.since = opts.since || 0;
5686 var lastSeq = opts.since;
5687
5688 var limit = 'limit' in opts ? opts.limit : -1;
5689 if (limit === 0) {
5690 limit = 1; // per CouchDB _changes spec
5691 }
5692
5693 var results = [];
5694 var numResults = 0;
5695 var filter = filterChange(opts);
5696 var docIdsToMetadata = new Map();
5697
5698 var txn;
5699 var bySeqStore;
5700 var docStore;
5701 var docIdRevIndex;
5702
5703 function onBatch(batchKeys, batchValues, cursor) {
5704 if (!cursor || !batchKeys.length) { // done
5705 return;
5706 }
5707
5708 var winningDocs = new Array(batchKeys.length);
5709 var metadatas = new Array(batchKeys.length);
5710
5711 function processMetadataAndWinningDoc(metadata, winningDoc) {
5712 var change = opts.processChange(winningDoc, metadata, opts);
5713 lastSeq = change.seq = metadata.seq;
5714
5715 var filtered = filter(change);
5716 if (typeof filtered === 'object') { // anything but true/false indicates error
5717 return Promise.reject(filtered);
5718 }
5719
5720 if (!filtered) {
5721 return Promise.resolve();
5722 }
5723 numResults++;
5724 if (opts.return_docs) {
5725 results.push(change);
5726 }
5727 // process the attachment immediately
5728 // for the benefit of live listeners
5729 if (opts.attachments && opts.include_docs) {
5730 return new Promise(function (resolve) {
5731 fetchAttachmentsIfNecessary(winningDoc, opts, txn, function () {
5732 postProcessAttachments([change], opts.binary).then(function () {
5733 resolve(change);
5734 });
5735 });
5736 });
5737 } else {
5738 return Promise.resolve(change);
5739 }
5740 }
5741
5742 function onBatchDone() {
5743 var promises = [];
5744 for (var i = 0, len = winningDocs.length; i < len; i++) {
5745 if (numResults === limit) {
5746 break;
5747 }
5748 var winningDoc = winningDocs[i];
5749 if (!winningDoc) {
5750 continue;
5751 }
5752 var metadata = metadatas[i];
5753 promises.push(processMetadataAndWinningDoc(metadata, winningDoc));
5754 }
5755
5756 Promise.all(promises).then(function (changes) {
5757 for (var i = 0, len = changes.length; i < len; i++) {
5758 if (changes[i]) {
5759 opts.onChange(changes[i]);
5760 }
5761 }
5762 }).catch(opts.complete);
5763
5764 if (numResults !== limit) {
5765 cursor.continue();
5766 }
5767 }
5768
5769 // Fetch all metadatas/winningdocs from this batch in parallel, then process
5770 // them all only once all data has been collected. This is done in parallel
5771 // because it's faster than doing it one-at-a-time.
5772 var numDone = 0;
5773 batchValues.forEach(function (value, i) {
5774 var doc = decodeDoc(value);
5775 var seq = batchKeys[i];
5776 fetchWinningDocAndMetadata(doc, seq, function (metadata, winningDoc) {
5777 metadatas[i] = metadata;
5778 winningDocs[i] = winningDoc;
5779 if (++numDone === batchKeys.length) {
5780 onBatchDone();
5781 }
5782 });
5783 });
5784 }
5785
5786 function onGetMetadata(doc, seq, metadata, cb) {
5787 if (metadata.seq !== seq) {
5788 // some other seq is later
5789 return cb();
5790 }
5791
5792 if (metadata.winningRev === doc._rev) {
5793 // this is the winning doc
5794 return cb(metadata, doc);
5795 }
5796
5797 // fetch winning doc in separate request
5798 var docIdRev = doc._id + '::' + metadata.winningRev;
5799 var req = docIdRevIndex.get(docIdRev);
5800 req.onsuccess = function (e) {
5801 cb(metadata, decodeDoc(e.target.result));
5802 };
5803 }
5804
5805 function fetchWinningDocAndMetadata(doc, seq, cb) {
5806 if (docIds && !docIds.has(doc._id)) {
5807 return cb();
5808 }
5809
5810 var metadata = docIdsToMetadata.get(doc._id);
5811 if (metadata) { // cached
5812 return onGetMetadata(doc, seq, metadata, cb);
5813 }
5814 // metadata not cached, have to go fetch it
5815 docStore.get(doc._id).onsuccess = function (e) {
5816 metadata = decodeMetadata(e.target.result);
5817 docIdsToMetadata.set(doc._id, metadata);
5818 onGetMetadata(doc, seq, metadata, cb);
5819 };
5820 }
5821
5822 function finish() {
5823 opts.complete(null, {
5824 results,
5825 last_seq: lastSeq
5826 });
5827 }
5828
5829 function onTxnComplete() {
5830 if (!opts.continuous && opts.attachments) {
5831 // cannot guarantee that postProcessing was already done,
5832 // so do it again
5833 postProcessAttachments(results).then(finish);
5834 } else {
5835 finish();
5836 }
5837 }
5838
5839 var objectStores = [DOC_STORE, BY_SEQ_STORE];
5840 if (opts.attachments) {
5841 objectStores.push(ATTACH_STORE);
5842 }
5843 var txnResult = openTransactionSafely(idb, objectStores, 'readonly');
5844 if (txnResult.error) {
5845 return opts.complete(txnResult.error);
5846 }
5847 txn = txnResult.txn;
5848 txn.onabort = idbError(opts.complete);
5849 txn.oncomplete = onTxnComplete;
5850
5851 bySeqStore = txn.objectStore(BY_SEQ_STORE);
5852 docStore = txn.objectStore(DOC_STORE);
5853 docIdRevIndex = bySeqStore.index('_doc_id_rev');
5854
5855 var keyRange = (opts.since && !opts.descending) ?
5856 IDBKeyRange.lowerBound(opts.since, true) : null;
5857
5858 runBatchedCursor(bySeqStore, keyRange, opts.descending, limit, onBatch);
5859}
5860
5861var cachedDBs = new Map();
5862var blobSupportPromise;
5863var openReqList = new Map();
5864
5865function IdbPouch(opts, callback) {
5866 var api = this;
5867
5868 enqueueTask(function (thisCallback) {
5869 init(api, opts, thisCallback);
5870 }, callback, api.constructor);
5871}
5872
5873function init(api, opts, callback) {
5874
5875 var dbName = opts.name;
5876
5877 var idb = null;
5878 var idbGlobalFailureError = null;
5879 api._meta = null;
5880
5881 function enrichCallbackError(callback) {
5882 return function (error, result) {
5883 if (error && error instanceof Error && !error.reason) {
5884 if (idbGlobalFailureError) {
5885 error.reason = idbGlobalFailureError;
5886 }
5887 }
5888
5889 callback(error, result);
5890 };
5891 }
5892
5893 // called when creating a fresh new database
5894 function createSchema(db) {
5895 var docStore = db.createObjectStore(DOC_STORE, {keyPath : 'id'});
5896 db.createObjectStore(BY_SEQ_STORE, {autoIncrement: true})
5897 .createIndex('_doc_id_rev', '_doc_id_rev', {unique: true});
5898 db.createObjectStore(ATTACH_STORE, {keyPath: 'digest'});
5899 db.createObjectStore(META_STORE, {keyPath: 'id', autoIncrement: false});
5900 db.createObjectStore(DETECT_BLOB_SUPPORT_STORE);
5901
5902 // added in v2
5903 docStore.createIndex('deletedOrLocal', 'deletedOrLocal', {unique : false});
5904
5905 // added in v3
5906 db.createObjectStore(LOCAL_STORE, {keyPath: '_id'});
5907
5908 // added in v4
5909 var attAndSeqStore = db.createObjectStore(ATTACH_AND_SEQ_STORE,
5910 {autoIncrement: true});
5911 attAndSeqStore.createIndex('seq', 'seq');
5912 attAndSeqStore.createIndex('digestSeq', 'digestSeq', {unique: true});
5913 }
5914
5915 // migration to version 2
5916 // unfortunately "deletedOrLocal" is a misnomer now that we no longer
5917 // store local docs in the main doc-store, but whaddyagonnado
5918 function addDeletedOrLocalIndex(txn, callback) {
5919 var docStore = txn.objectStore(DOC_STORE);
5920 docStore.createIndex('deletedOrLocal', 'deletedOrLocal', {unique : false});
5921
5922 docStore.openCursor().onsuccess = function (event) {
5923 var cursor = event.target.result;
5924 if (cursor) {
5925 var metadata = cursor.value;
5926 var deleted = isDeleted(metadata);
5927 metadata.deletedOrLocal = deleted ? "1" : "0";
5928 docStore.put(metadata);
5929 cursor.continue();
5930 } else {
5931 callback();
5932 }
5933 };
5934 }
5935
5936 // migration to version 3 (part 1)
5937 function createLocalStoreSchema(db) {
5938 db.createObjectStore(LOCAL_STORE, {keyPath: '_id'})
5939 .createIndex('_doc_id_rev', '_doc_id_rev', {unique: true});
5940 }
5941
5942 // migration to version 3 (part 2)
5943 function migrateLocalStore(txn, cb) {
5944 var localStore = txn.objectStore(LOCAL_STORE);
5945 var docStore = txn.objectStore(DOC_STORE);
5946 var seqStore = txn.objectStore(BY_SEQ_STORE);
5947
5948 var cursor = docStore.openCursor();
5949 cursor.onsuccess = function (event) {
5950 var cursor = event.target.result;
5951 if (cursor) {
5952 var metadata = cursor.value;
5953 var docId = metadata.id;
5954 var local = isLocalId(docId);
5955 var rev$$1 = winningRev(metadata);
5956 if (local) {
5957 var docIdRev = docId + "::" + rev$$1;
5958 // remove all seq entries
5959 // associated with this docId
5960 var start = docId + "::";
5961 var end = docId + "::~";
5962 var index = seqStore.index('_doc_id_rev');
5963 var range = IDBKeyRange.bound(start, end, false, false);
5964 var seqCursor = index.openCursor(range);
5965 seqCursor.onsuccess = function (e) {
5966 seqCursor = e.target.result;
5967 if (!seqCursor) {
5968 // done
5969 docStore.delete(cursor.primaryKey);
5970 cursor.continue();
5971 } else {
5972 var data = seqCursor.value;
5973 if (data._doc_id_rev === docIdRev) {
5974 localStore.put(data);
5975 }
5976 seqStore.delete(seqCursor.primaryKey);
5977 seqCursor.continue();
5978 }
5979 };
5980 } else {
5981 cursor.continue();
5982 }
5983 } else if (cb) {
5984 cb();
5985 }
5986 };
5987 }
5988
5989 // migration to version 4 (part 1)
5990 function addAttachAndSeqStore(db) {
5991 var attAndSeqStore = db.createObjectStore(ATTACH_AND_SEQ_STORE,
5992 {autoIncrement: true});
5993 attAndSeqStore.createIndex('seq', 'seq');
5994 attAndSeqStore.createIndex('digestSeq', 'digestSeq', {unique: true});
5995 }
5996
5997 // migration to version 4 (part 2)
5998 function migrateAttsAndSeqs(txn, callback) {
5999 var seqStore = txn.objectStore(BY_SEQ_STORE);
6000 var attStore = txn.objectStore(ATTACH_STORE);
6001 var attAndSeqStore = txn.objectStore(ATTACH_AND_SEQ_STORE);
6002
6003 // need to actually populate the table. this is the expensive part,
6004 // so as an optimization, check first that this database even
6005 // contains attachments
6006 var req = attStore.count();
6007 req.onsuccess = function (e) {
6008 var count = e.target.result;
6009 if (!count) {
6010 return callback(); // done
6011 }
6012
6013 seqStore.openCursor().onsuccess = function (e) {
6014 var cursor = e.target.result;
6015 if (!cursor) {
6016 return callback(); // done
6017 }
6018 var doc = cursor.value;
6019 var seq = cursor.primaryKey;
6020 var atts = Object.keys(doc._attachments || {});
6021 var digestMap = {};
6022 for (var j = 0; j < atts.length; j++) {
6023 var att = doc._attachments[atts[j]];
6024 digestMap[att.digest] = true; // uniq digests, just in case
6025 }
6026 var digests = Object.keys(digestMap);
6027 for (j = 0; j < digests.length; j++) {
6028 var digest = digests[j];
6029 attAndSeqStore.put({
6030 seq,
6031 digestSeq: digest + '::' + seq
6032 });
6033 }
6034 cursor.continue();
6035 };
6036 };
6037 }
6038
6039 // migration to version 5
6040 // Instead of relying on on-the-fly migration of metadata,
6041 // this brings the doc-store to its modern form:
6042 // - metadata.winningrev
6043 // - metadata.seq
6044 // - stringify the metadata when storing it
6045 function migrateMetadata(txn) {
6046
6047 function decodeMetadataCompat(storedObject) {
6048 if (!storedObject.data) {
6049 // old format, when we didn't store it stringified
6050 storedObject.deleted = storedObject.deletedOrLocal === '1';
6051 return storedObject;
6052 }
6053 return decodeMetadata(storedObject);
6054 }
6055
6056 // ensure that every metadata has a winningRev and seq,
6057 // which was previously created on-the-fly but better to migrate
6058 var bySeqStore = txn.objectStore(BY_SEQ_STORE);
6059 var docStore = txn.objectStore(DOC_STORE);
6060 var cursor = docStore.openCursor();
6061 cursor.onsuccess = function (e) {
6062 var cursor = e.target.result;
6063 if (!cursor) {
6064 return; // done
6065 }
6066 var metadata = decodeMetadataCompat(cursor.value);
6067
6068 metadata.winningRev = metadata.winningRev ||
6069 winningRev(metadata);
6070
6071 function fetchMetadataSeq() {
6072 // metadata.seq was added post-3.2.0, so if it's missing,
6073 // we need to fetch it manually
6074 var start = metadata.id + '::';
6075 var end = metadata.id + '::\uffff';
6076 var req = bySeqStore.index('_doc_id_rev').openCursor(
6077 IDBKeyRange.bound(start, end));
6078
6079 var metadataSeq = 0;
6080 req.onsuccess = function (e) {
6081 var cursor = e.target.result;
6082 if (!cursor) {
6083 metadata.seq = metadataSeq;
6084 return onGetMetadataSeq();
6085 }
6086 var seq = cursor.primaryKey;
6087 if (seq > metadataSeq) {
6088 metadataSeq = seq;
6089 }
6090 cursor.continue();
6091 };
6092 }
6093
6094 function onGetMetadataSeq() {
6095 var metadataToStore = encodeMetadata(metadata,
6096 metadata.winningRev, metadata.deleted);
6097
6098 var req = docStore.put(metadataToStore);
6099 req.onsuccess = function () {
6100 cursor.continue();
6101 };
6102 }
6103
6104 if (metadata.seq) {
6105 return onGetMetadataSeq();
6106 }
6107
6108 fetchMetadataSeq();
6109 };
6110
6111 }
6112
6113 api._remote = false;
6114 api.type = function () {
6115 return 'idb';
6116 };
6117
6118 api._id = toPromise(function (callback) {
6119 callback(null, api._meta.instanceId);
6120 });
6121
6122 api._bulkDocs = function idb_bulkDocs(req, reqOpts, callback) {
6123 idbBulkDocs(opts, req, reqOpts, api, idb, enrichCallbackError(callback));
6124 };
6125
6126 // First we look up the metadata in the ids database, then we fetch the
6127 // current revision(s) from the by sequence store
6128 api._get = function idb_get(id, opts, callback) {
6129 var doc;
6130 var metadata;
6131 var err;
6132 var txn = opts.ctx;
6133 if (!txn) {
6134 var txnResult = openTransactionSafely(idb,
6135 [DOC_STORE, BY_SEQ_STORE, ATTACH_STORE], 'readonly');
6136 if (txnResult.error) {
6137 return callback(txnResult.error);
6138 }
6139 txn = txnResult.txn;
6140 }
6141
6142 function finish() {
6143 callback(err, {doc, metadata, ctx: txn});
6144 }
6145
6146 txn.objectStore(DOC_STORE).get(id).onsuccess = function (e) {
6147 metadata = decodeMetadata(e.target.result);
6148 // we can determine the result here if:
6149 // 1. there is no such document
6150 // 2. the document is deleted and we don't ask about specific rev
6151 // When we ask with opts.rev we expect the answer to be either
6152 // doc (possibly with _deleted=true) or missing error
6153 if (!metadata) {
6154 err = createError(MISSING_DOC, 'missing');
6155 return finish();
6156 }
6157
6158 var rev$$1;
6159 if (!opts.rev) {
6160 rev$$1 = metadata.winningRev;
6161 var deleted = isDeleted(metadata);
6162 if (deleted) {
6163 err = createError(MISSING_DOC, "deleted");
6164 return finish();
6165 }
6166 } else {
6167 rev$$1 = opts.latest ? latest(opts.rev, metadata) : opts.rev;
6168 }
6169
6170 var objectStore = txn.objectStore(BY_SEQ_STORE);
6171 var key = metadata.id + '::' + rev$$1;
6172
6173 objectStore.index('_doc_id_rev').get(key).onsuccess = function (e) {
6174 doc = e.target.result;
6175 if (doc) {
6176 doc = decodeDoc(doc);
6177 }
6178 if (!doc) {
6179 err = createError(MISSING_DOC, 'missing');
6180 return finish();
6181 }
6182 finish();
6183 };
6184 };
6185 };
6186
6187 api._getAttachment = function (docId, attachId, attachment, opts, callback) {
6188 var txn;
6189 if (opts.ctx) {
6190 txn = opts.ctx;
6191 } else {
6192 var txnResult = openTransactionSafely(idb,
6193 [DOC_STORE, BY_SEQ_STORE, ATTACH_STORE], 'readonly');
6194 if (txnResult.error) {
6195 return callback(txnResult.error);
6196 }
6197 txn = txnResult.txn;
6198 }
6199 var digest = attachment.digest;
6200 var type = attachment.content_type;
6201
6202 txn.objectStore(ATTACH_STORE).get(digest).onsuccess = function (e) {
6203 var body = e.target.result.body;
6204 readBlobData(body, type, opts.binary, function (blobData) {
6205 callback(null, blobData);
6206 });
6207 };
6208 };
6209
6210 api._info = function idb_info(callback) {
6211 var updateSeq;
6212 var docCount;
6213
6214 var txnResult = openTransactionSafely(idb, [META_STORE, BY_SEQ_STORE], 'readonly');
6215 if (txnResult.error) {
6216 return callback(txnResult.error);
6217 }
6218 var txn = txnResult.txn;
6219 txn.objectStore(META_STORE).get(META_STORE).onsuccess = function (e) {
6220 docCount = e.target.result.docCount;
6221 };
6222 txn.objectStore(BY_SEQ_STORE).openKeyCursor(null, 'prev').onsuccess = function (e) {
6223 var cursor = e.target.result;
6224 updateSeq = cursor ? cursor.key : 0;
6225 };
6226
6227 txn.oncomplete = function () {
6228 callback(null, {
6229 doc_count: docCount,
6230 update_seq: updateSeq,
6231 // for debugging
6232 idb_attachment_format: (api._meta.blobSupport ? 'binary' : 'base64')
6233 });
6234 };
6235 };
6236
6237 api._allDocs = function idb_allDocs(opts, callback) {
6238 idbAllDocs(opts, idb, enrichCallbackError(callback));
6239 };
6240
6241 api._changes = function idbChanges(opts) {
6242 return changes(opts, api, dbName, idb);
6243 };
6244
6245 api._close = function (callback) {
6246 // https://developer.mozilla.org/en-US/docs/IndexedDB/IDBDatabase#close
6247 // "Returns immediately and closes the connection in a separate thread..."
6248 idb.close();
6249 cachedDBs.delete(dbName);
6250 callback();
6251 };
6252
6253 api._getRevisionTree = function (docId, callback) {
6254 var txnResult = openTransactionSafely(idb, [DOC_STORE], 'readonly');
6255 if (txnResult.error) {
6256 return callback(txnResult.error);
6257 }
6258 var txn = txnResult.txn;
6259 var req = txn.objectStore(DOC_STORE).get(docId);
6260 req.onsuccess = function (event) {
6261 var doc = decodeMetadata(event.target.result);
6262 if (!doc) {
6263 callback(createError(MISSING_DOC));
6264 } else {
6265 callback(null, doc.rev_tree);
6266 }
6267 };
6268 };
6269
6270 // This function removes revisions of document docId
6271 // which are listed in revs and sets this document
6272 // revision to to rev_tree
6273 api._doCompaction = function (docId, revs, callback) {
6274 var stores = [
6275 DOC_STORE,
6276 BY_SEQ_STORE,
6277 ATTACH_STORE,
6278 ATTACH_AND_SEQ_STORE
6279 ];
6280 var txnResult = openTransactionSafely(idb, stores, 'readwrite');
6281 if (txnResult.error) {
6282 return callback(txnResult.error);
6283 }
6284 var txn = txnResult.txn;
6285
6286 var docStore = txn.objectStore(DOC_STORE);
6287
6288 docStore.get(docId).onsuccess = function (event) {
6289 var metadata = decodeMetadata(event.target.result);
6290 traverseRevTree(metadata.rev_tree, function (isLeaf, pos,
6291 revHash, ctx, opts) {
6292 var rev$$1 = pos + '-' + revHash;
6293 if (revs.indexOf(rev$$1) !== -1) {
6294 opts.status = 'missing';
6295 }
6296 });
6297 compactRevs(revs, docId, txn);
6298 var winningRev$$1 = metadata.winningRev;
6299 var deleted = metadata.deleted;
6300 txn.objectStore(DOC_STORE).put(
6301 encodeMetadata(metadata, winningRev$$1, deleted));
6302 };
6303 txn.onabort = idbError(callback);
6304 txn.oncomplete = function () {
6305 callback();
6306 };
6307 };
6308
6309
6310 api._getLocal = function (id, callback) {
6311 var txnResult = openTransactionSafely(idb, [LOCAL_STORE], 'readonly');
6312 if (txnResult.error) {
6313 return callback(txnResult.error);
6314 }
6315 var tx = txnResult.txn;
6316 var req = tx.objectStore(LOCAL_STORE).get(id);
6317
6318 req.onerror = idbError(callback);
6319 req.onsuccess = function (e) {
6320 var doc = e.target.result;
6321 if (!doc) {
6322 callback(createError(MISSING_DOC));
6323 } else {
6324 delete doc['_doc_id_rev']; // for backwards compat
6325 callback(null, doc);
6326 }
6327 };
6328 };
6329
6330 api._putLocal = function (doc, opts, callback) {
6331 if (typeof opts === 'function') {
6332 callback = opts;
6333 opts = {};
6334 }
6335 delete doc._revisions; // ignore this, trust the rev
6336 var oldRev = doc._rev;
6337 var id = doc._id;
6338 if (!oldRev) {
6339 doc._rev = '0-1';
6340 } else {
6341 doc._rev = '0-' + (parseInt(oldRev.split('-')[1], 10) + 1);
6342 }
6343
6344 var tx = opts.ctx;
6345 var ret;
6346 if (!tx) {
6347 var txnResult = openTransactionSafely(idb, [LOCAL_STORE], 'readwrite');
6348 if (txnResult.error) {
6349 return callback(txnResult.error);
6350 }
6351 tx = txnResult.txn;
6352 tx.onerror = idbError(callback);
6353 tx.oncomplete = function () {
6354 if (ret) {
6355 callback(null, ret);
6356 }
6357 };
6358 }
6359
6360 var oStore = tx.objectStore(LOCAL_STORE);
6361 var req;
6362 if (oldRev) {
6363 req = oStore.get(id);
6364 req.onsuccess = function (e) {
6365 var oldDoc = e.target.result;
6366 if (!oldDoc || oldDoc._rev !== oldRev) {
6367 callback(createError(REV_CONFLICT));
6368 } else { // update
6369 var req = oStore.put(doc);
6370 req.onsuccess = function () {
6371 ret = {ok: true, id: doc._id, rev: doc._rev};
6372 if (opts.ctx) { // return immediately
6373 callback(null, ret);
6374 }
6375 };
6376 }
6377 };
6378 } else { // new doc
6379 req = oStore.add(doc);
6380 req.onerror = function (e) {
6381 // constraint error, already exists
6382 callback(createError(REV_CONFLICT));
6383 e.preventDefault(); // avoid transaction abort
6384 e.stopPropagation(); // avoid transaction onerror
6385 };
6386 req.onsuccess = function () {
6387 ret = {ok: true, id: doc._id, rev: doc._rev};
6388 if (opts.ctx) { // return immediately
6389 callback(null, ret);
6390 }
6391 };
6392 }
6393 };
6394
6395 api._removeLocal = function (doc, opts, callback) {
6396 if (typeof opts === 'function') {
6397 callback = opts;
6398 opts = {};
6399 }
6400 var tx = opts.ctx;
6401 if (!tx) {
6402 var txnResult = openTransactionSafely(idb, [LOCAL_STORE], 'readwrite');
6403 if (txnResult.error) {
6404 return callback(txnResult.error);
6405 }
6406 tx = txnResult.txn;
6407 tx.oncomplete = function () {
6408 if (ret) {
6409 callback(null, ret);
6410 }
6411 };
6412 }
6413 var ret;
6414 var id = doc._id;
6415 var oStore = tx.objectStore(LOCAL_STORE);
6416 var req = oStore.get(id);
6417
6418 req.onerror = idbError(callback);
6419 req.onsuccess = function (e) {
6420 var oldDoc = e.target.result;
6421 if (!oldDoc || oldDoc._rev !== doc._rev) {
6422 callback(createError(MISSING_DOC));
6423 } else {
6424 oStore.delete(id);
6425 ret = {ok: true, id, rev: '0-0'};
6426 if (opts.ctx) { // return immediately
6427 callback(null, ret);
6428 }
6429 }
6430 };
6431 };
6432
6433 api._destroy = function (opts, callback) {
6434 changesHandler.removeAllListeners(dbName);
6435
6436 //Close open request for "dbName" database to fix ie delay.
6437 var openReq = openReqList.get(dbName);
6438 if (openReq && openReq.result) {
6439 openReq.result.close();
6440 cachedDBs.delete(dbName);
6441 }
6442 var req = indexedDB.deleteDatabase(dbName);
6443
6444 req.onsuccess = function () {
6445 //Remove open request from the list.
6446 openReqList.delete(dbName);
6447 if (hasLocalStorage() && (dbName in localStorage)) {
6448 delete localStorage[dbName];
6449 }
6450 callback(null, { 'ok': true });
6451 };
6452
6453 req.onerror = idbError(callback);
6454 };
6455
6456 var cached = cachedDBs.get(dbName);
6457
6458 if (cached) {
6459 idb = cached.idb;
6460 api._meta = cached.global;
6461 return nextTick(function () {
6462 callback(null, api);
6463 });
6464 }
6465
6466 var req = indexedDB.open(dbName, ADAPTER_VERSION);
6467 openReqList.set(dbName, req);
6468
6469 req.onupgradeneeded = function (e) {
6470 var db = e.target.result;
6471 if (e.oldVersion < 1) {
6472 return createSchema(db); // new db, initial schema
6473 }
6474 // do migrations
6475
6476 var txn = e.currentTarget.transaction;
6477 // these migrations have to be done in this function, before
6478 // control is returned to the event loop, because IndexedDB
6479
6480 if (e.oldVersion < 3) {
6481 createLocalStoreSchema(db); // v2 -> v3
6482 }
6483 if (e.oldVersion < 4) {
6484 addAttachAndSeqStore(db); // v3 -> v4
6485 }
6486
6487 var migrations = [
6488 addDeletedOrLocalIndex, // v1 -> v2
6489 migrateLocalStore, // v2 -> v3
6490 migrateAttsAndSeqs, // v3 -> v4
6491 migrateMetadata // v4 -> v5
6492 ];
6493
6494 var i = e.oldVersion;
6495
6496 function next() {
6497 var migration = migrations[i - 1];
6498 i++;
6499 if (migration) {
6500 migration(txn, next);
6501 }
6502 }
6503
6504 next();
6505 };
6506
6507 req.onsuccess = function (e) {
6508
6509 idb = e.target.result;
6510
6511 idb.onversionchange = function () {
6512 idb.close();
6513 cachedDBs.delete(dbName);
6514 };
6515
6516 idb.onabort = function (e) {
6517 guardedConsole('error', 'Database has a global failure', e.target.error);
6518 idbGlobalFailureError = e.target.error;
6519 idb.close();
6520 cachedDBs.delete(dbName);
6521 };
6522
6523 // Do a few setup operations (in parallel as much as possible):
6524 // 1. Fetch meta doc
6525 // 2. Check blob support
6526 // 3. Calculate docCount
6527 // 4. Generate an instanceId if necessary
6528 // 5. Store docCount and instanceId on meta doc
6529
6530 var txn = idb.transaction([
6531 META_STORE,
6532 DETECT_BLOB_SUPPORT_STORE,
6533 DOC_STORE
6534 ], 'readwrite');
6535
6536 var storedMetaDoc = false;
6537 var metaDoc;
6538 var docCount;
6539 var blobSupport;
6540 var instanceId;
6541
6542 function completeSetup() {
6543 if (typeof blobSupport === 'undefined' || !storedMetaDoc) {
6544 return;
6545 }
6546 api._meta = {
6547 name: dbName,
6548 instanceId,
6549 blobSupport
6550 };
6551
6552 cachedDBs.set(dbName, {
6553 idb,
6554 global: api._meta
6555 });
6556 callback(null, api);
6557 }
6558
6559 function storeMetaDocIfReady() {
6560 if (typeof docCount === 'undefined' || typeof metaDoc === 'undefined') {
6561 return;
6562 }
6563 var instanceKey = dbName + '_id';
6564 if (instanceKey in metaDoc) {
6565 instanceId = metaDoc[instanceKey];
6566 } else {
6567 metaDoc[instanceKey] = instanceId = uuid();
6568 }
6569 metaDoc.docCount = docCount;
6570 txn.objectStore(META_STORE).put(metaDoc);
6571 }
6572
6573 //
6574 // fetch or generate the instanceId
6575 //
6576 txn.objectStore(META_STORE).get(META_STORE).onsuccess = function (e) {
6577 metaDoc = e.target.result || { id: META_STORE };
6578 storeMetaDocIfReady();
6579 };
6580
6581 //
6582 // countDocs
6583 //
6584 countDocs(txn, function (count) {
6585 docCount = count;
6586 storeMetaDocIfReady();
6587 });
6588
6589 //
6590 // check blob support
6591 //
6592 if (!blobSupportPromise) {
6593 // make sure blob support is only checked once
6594 blobSupportPromise = checkBlobSupport(txn, DETECT_BLOB_SUPPORT_STORE, 'key');
6595 }
6596
6597 blobSupportPromise.then(function (val) {
6598 blobSupport = val;
6599 completeSetup();
6600 });
6601
6602 // only when the metadata put transaction has completed,
6603 // consider the setup done
6604 txn.oncomplete = function () {
6605 storedMetaDoc = true;
6606 completeSetup();
6607 };
6608 txn.onabort = idbError(callback);
6609 };
6610
6611 req.onerror = function (e) {
6612 var msg = e.target.error && e.target.error.message;
6613
6614 if (!msg) {
6615 msg = 'Failed to open indexedDB, are you in private browsing mode?';
6616 } else if (msg.indexOf("stored database is a higher version") !== -1) {
6617 msg = new Error('This DB was created with the newer "indexeddb" adapter, but you are trying to open it with the older "idb" adapter');
6618 }
6619
6620 guardedConsole('error', msg);
6621 callback(createError(IDB_ERROR, msg));
6622 };
6623}
6624
6625IdbPouch.valid = function () {
6626 // Following #7085 buggy idb versions (typically Safari < 10.1) are
6627 // considered valid.
6628
6629 // On Firefox SecurityError is thrown while referencing indexedDB if cookies
6630 // are not allowed. `typeof indexedDB` also triggers the error.
6631 try {
6632 // some outdated implementations of IDB that appear on Samsung
6633 // and HTC Android devices <4.4 are missing IDBKeyRange
6634 return typeof indexedDB !== 'undefined' && typeof IDBKeyRange !== 'undefined';
6635 } catch (e) {
6636 return false;
6637 }
6638};
6639
6640function IDBPouch (PouchDB) {
6641 PouchDB.adapter('idb', IdbPouch, true);
6642}
6643
6644// dead simple promise pool, inspired by https://github.com/timdp/es6-promise-pool
6645// but much smaller in code size. limits the number of concurrent promises that are executed
6646
6647
6648function pool(promiseFactories, limit) {
6649 return new Promise(function (resolve, reject) {
6650 var running = 0;
6651 var current = 0;
6652 var done = 0;
6653 var len = promiseFactories.length;
6654 var err;
6655
6656 function runNext() {
6657 running++;
6658 promiseFactories[current++]().then(onSuccess, onError);
6659 }
6660
6661 function doNext() {
6662 if (++done === len) {
6663 /* istanbul ignore if */
6664 if (err) {
6665 reject(err);
6666 } else {
6667 resolve();
6668 }
6669 } else {
6670 runNextBatch();
6671 }
6672 }
6673
6674 function onSuccess() {
6675 running--;
6676 doNext();
6677 }
6678
6679 /* istanbul ignore next */
6680 function onError(thisErr) {
6681 running--;
6682 err = err || thisErr;
6683 doNext();
6684 }
6685
6686 function runNextBatch() {
6687 while (running < limit && current < len) {
6688 runNext();
6689 }
6690 }
6691
6692 runNextBatch();
6693 });
6694}
6695
6696const CHANGES_BATCH_SIZE = 25;
6697const MAX_SIMULTANEOUS_REVS = 50;
6698const CHANGES_TIMEOUT_BUFFER = 5000;
6699const DEFAULT_HEARTBEAT = 10000;
6700
6701const supportsBulkGetMap = {};
6702
6703function readAttachmentsAsBlobOrBuffer(row) {
6704 const doc = row.doc || row.ok;
6705 const atts = doc && doc._attachments;
6706 if (!atts) {
6707 return;
6708 }
6709 Object.keys(atts).forEach(function (filename) {
6710 const att = atts[filename];
6711 att.data = b64ToBluffer(att.data, att.content_type);
6712 });
6713}
6714
6715function encodeDocId(id) {
6716 if (/^_design/.test(id)) {
6717 return '_design/' + encodeURIComponent(id.slice(8));
6718 }
6719 if (id.startsWith('_local/')) {
6720 return '_local/' + encodeURIComponent(id.slice(7));
6721 }
6722 return encodeURIComponent(id);
6723}
6724
6725function preprocessAttachments$1(doc) {
6726 if (!doc._attachments || !Object.keys(doc._attachments)) {
6727 return Promise.resolve();
6728 }
6729
6730 return Promise.all(Object.keys(doc._attachments).map(function (key) {
6731 const attachment = doc._attachments[key];
6732 if (attachment.data && typeof attachment.data !== 'string') {
6733 return new Promise(function (resolve) {
6734 blobToBase64(attachment.data, resolve);
6735 }).then(function (b64) {
6736 attachment.data = b64;
6737 });
6738 }
6739 }));
6740}
6741
6742function hasUrlPrefix(opts) {
6743 if (!opts.prefix) {
6744 return false;
6745 }
6746 const protocol = parseUri(opts.prefix).protocol;
6747 return protocol === 'http' || protocol === 'https';
6748}
6749
6750// Get all the information you possibly can about the URI given by name and
6751// return it as a suitable object.
6752function getHost(name, opts) {
6753 // encode db name if opts.prefix is a url (#5574)
6754 if (hasUrlPrefix(opts)) {
6755 const dbName = opts.name.substr(opts.prefix.length);
6756 // Ensure prefix has a trailing slash
6757 const prefix = opts.prefix.replace(/\/?$/, '/');
6758 name = prefix + encodeURIComponent(dbName);
6759 }
6760
6761 const uri = parseUri(name);
6762 if (uri.user || uri.password) {
6763 uri.auth = {username: uri.user, password: uri.password};
6764 }
6765
6766 // Split the path part of the URI into parts using '/' as the delimiter
6767 // after removing any leading '/' and any trailing '/'
6768 const parts = uri.path.replace(/(^\/|\/$)/g, '').split('/');
6769
6770 uri.db = parts.pop();
6771 // Prevent double encoding of URI component
6772 if (uri.db.indexOf('%') === -1) {
6773 uri.db = encodeURIComponent(uri.db);
6774 }
6775
6776 uri.path = parts.join('/');
6777
6778 return uri;
6779}
6780
6781// Generate a URL with the host data given by opts and the given path
6782function genDBUrl(opts, path) {
6783 return genUrl(opts, opts.db + '/' + path);
6784}
6785
6786// Generate a URL with the host data given by opts and the given path
6787function genUrl(opts, path) {
6788 // If the host already has a path, then we need to have a path delimiter
6789 // Otherwise, the path delimiter is the empty string
6790 const pathDel = !opts.path ? '' : '/';
6791
6792 // If the host already has a path, then we need to have a path delimiter
6793 // Otherwise, the path delimiter is the empty string
6794 return opts.protocol + '://' + opts.host +
6795 (opts.port ? (':' + opts.port) : '') +
6796 '/' + opts.path + pathDel + path;
6797}
6798
6799function paramsToStr(params) {
6800 const paramKeys = Object.keys(params);
6801 if (paramKeys.length === 0) {
6802 return '';
6803 }
6804
6805 return '?' + paramKeys.map(key => key + '=' + encodeURIComponent(params[key])).join('&');
6806}
6807
6808function shouldCacheBust(opts) {
6809 const ua = (typeof navigator !== 'undefined' && navigator.userAgent) ?
6810 navigator.userAgent.toLowerCase() : '';
6811 const isIE = ua.indexOf('msie') !== -1;
6812 const isTrident = ua.indexOf('trident') !== -1;
6813 const isEdge = ua.indexOf('edge') !== -1;
6814 const isGET = !('method' in opts) || opts.method === 'GET';
6815 return (isIE || isTrident || isEdge) && isGET;
6816}
6817
6818// Implements the PouchDB API for dealing with CouchDB instances over HTTP
6819function HttpPouch(opts, callback) {
6820
6821 // The functions that will be publicly available for HttpPouch
6822 const api = this;
6823
6824 const host = getHost(opts.name, opts);
6825 const dbUrl = genDBUrl(host, '');
6826
6827 opts = clone(opts);
6828
6829 const ourFetch = async function (url, options) {
6830
6831 options = options || {};
6832 options.headers = options.headers || new h();
6833
6834 options.credentials = 'include';
6835
6836 if (opts.auth || host.auth) {
6837 const nAuth = opts.auth || host.auth;
6838 const str = nAuth.username + ':' + nAuth.password;
6839 const token = thisBtoa(unescape(encodeURIComponent(str)));
6840 options.headers.set('Authorization', 'Basic ' + token);
6841 }
6842
6843 const headers = opts.headers || {};
6844 Object.keys(headers).forEach(function (key) {
6845 options.headers.append(key, headers[key]);
6846 });
6847
6848 /* istanbul ignore if */
6849 if (shouldCacheBust(options)) {
6850 url += (url.indexOf('?') === -1 ? '?' : '&') + '_nonce=' + Date.now();
6851 }
6852
6853 const fetchFun = opts.fetch || f$1;
6854 return await fetchFun(url, options);
6855 };
6856
6857 function adapterFun$$1(name, fun) {
6858 return adapterFun(name, function (...args) {
6859 setup().then(function () {
6860 return fun.apply(this, args);
6861 }).catch(function (e) {
6862 const callback = args.pop();
6863 callback(e);
6864 });
6865 }).bind(api);
6866 }
6867
6868 async function fetchJSON(url, options) {
6869
6870 const result = {};
6871
6872 options = options || {};
6873 options.headers = options.headers || new h();
6874
6875 if (!options.headers.get('Content-Type')) {
6876 options.headers.set('Content-Type', 'application/json');
6877 }
6878 if (!options.headers.get('Accept')) {
6879 options.headers.set('Accept', 'application/json');
6880 }
6881
6882 const response = await ourFetch(url, options);
6883 result.ok = response.ok;
6884 result.status = response.status;
6885 const json = await response.json();
6886
6887 result.data = json;
6888 if (!result.ok) {
6889 result.data.status = result.status;
6890 const err = generateErrorFromResponse(result.data);
6891 throw err;
6892 }
6893
6894 if (Array.isArray(result.data)) {
6895 result.data = result.data.map(function (v) {
6896 if (v.error || v.missing) {
6897 return generateErrorFromResponse(v);
6898 } else {
6899 return v;
6900 }
6901 });
6902 }
6903
6904 return result;
6905 }
6906
6907 let setupPromise;
6908
6909 async function setup() {
6910 if (opts.skip_setup) {
6911 return Promise.resolve();
6912 }
6913
6914 // If there is a setup in process or previous successful setup
6915 // done then we will use that
6916 // If previous setups have been rejected we will try again
6917 if (setupPromise) {
6918 return setupPromise;
6919 }
6920
6921 setupPromise = fetchJSON(dbUrl).catch(function (err) {
6922 if (err && err.status && err.status === 404) {
6923 // Doesnt exist, create it
6924 explainError(404, 'PouchDB is just detecting if the remote exists.');
6925 return fetchJSON(dbUrl, {method: 'PUT'});
6926 } else {
6927 return Promise.reject(err);
6928 }
6929 }).catch(function (err) {
6930 // If we try to create a database that already exists, skipped in
6931 // istanbul since its catching a race condition.
6932 /* istanbul ignore if */
6933 if (err && err.status && err.status === 412) {
6934 return true;
6935 }
6936 return Promise.reject(err);
6937 });
6938
6939 setupPromise.catch(function () {
6940 setupPromise = null;
6941 });
6942
6943 return setupPromise;
6944 }
6945
6946 nextTick(function () {
6947 callback(null, api);
6948 });
6949
6950 api._remote = true;
6951
6952 /* istanbul ignore next */
6953 api.type = function () {
6954 return 'http';
6955 };
6956
6957 api.id = adapterFun$$1('id', async function (callback) {
6958 let result;
6959 try {
6960 const response = await ourFetch(genUrl(host, ''));
6961 result = await response.json();
6962 } catch (err) {
6963 result = {};
6964 }
6965
6966 // Bad response or missing `uuid` should not prevent ID generation.
6967 const uuid$$1 = (result && result.uuid) ? (result.uuid + host.db) : genDBUrl(host, '');
6968 callback(null, uuid$$1);
6969 });
6970
6971 // Sends a POST request to the host calling the couchdb _compact function
6972 // version: The version of CouchDB it is running
6973 api.compact = adapterFun$$1('compact', async function (opts, callback) {
6974 if (typeof opts === 'function') {
6975 callback = opts;
6976 opts = {};
6977 }
6978 opts = clone(opts);
6979
6980 await fetchJSON(genDBUrl(host, '_compact'), {method: 'POST'});
6981
6982 function ping() {
6983 api.info(function (err, res) {
6984 // CouchDB may send a "compact_running:true" if it's
6985 // already compacting. PouchDB Server doesn't.
6986 /* istanbul ignore else */
6987 if (res && !res.compact_running) {
6988 callback(null, {ok: true});
6989 } else {
6990 setTimeout(ping, opts.interval || 200);
6991 }
6992 });
6993 }
6994 // Ping the http if it's finished compaction
6995 ping();
6996 });
6997
6998 api.bulkGet = adapterFun('bulkGet', function (opts, callback) {
6999 const self = this;
7000
7001 async function doBulkGet(cb) {
7002 const params = {};
7003 if (opts.revs) {
7004 params.revs = true;
7005 }
7006 if (opts.attachments) {
7007 /* istanbul ignore next */
7008 params.attachments = true;
7009 }
7010 if (opts.latest) {
7011 params.latest = true;
7012 }
7013 try {
7014 const result = await fetchJSON(genDBUrl(host, '_bulk_get' + paramsToStr(params)), {
7015 method: 'POST',
7016 body: JSON.stringify({ docs: opts.docs})
7017 });
7018
7019 if (opts.attachments && opts.binary) {
7020 result.data.results.forEach(function (res) {
7021 res.docs.forEach(readAttachmentsAsBlobOrBuffer);
7022 });
7023 }
7024 cb(null, result.data);
7025 } catch (error) {
7026 cb(error);
7027 }
7028 }
7029
7030 /* istanbul ignore next */
7031 function doBulkGetShim() {
7032 // avoid "url too long error" by splitting up into multiple requests
7033 const batchSize = MAX_SIMULTANEOUS_REVS;
7034 const numBatches = Math.ceil(opts.docs.length / batchSize);
7035 let numDone = 0;
7036 const results = new Array(numBatches);
7037
7038 function onResult(batchNum) {
7039 return function (err, res) {
7040 // err is impossible because shim returns a list of errs in that case
7041 results[batchNum] = res.results;
7042 if (++numDone === numBatches) {
7043 callback(null, {results: results.flat()});
7044 }
7045 };
7046 }
7047
7048 for (let i = 0; i < numBatches; i++) {
7049 const subOpts = pick(opts, ['revs', 'attachments', 'binary', 'latest']);
7050 subOpts.docs = opts.docs.slice(i * batchSize,
7051 Math.min(opts.docs.length, (i + 1) * batchSize));
7052 bulkGet(self, subOpts, onResult(i));
7053 }
7054 }
7055
7056 // mark the whole database as either supporting or not supporting _bulk_get
7057 const dbUrl = genUrl(host, '');
7058 const supportsBulkGet = supportsBulkGetMap[dbUrl];
7059
7060 /* istanbul ignore next */
7061 if (typeof supportsBulkGet !== 'boolean') {
7062 // check if this database supports _bulk_get
7063 doBulkGet(function (err, res) {
7064 if (err) {
7065 supportsBulkGetMap[dbUrl] = false;
7066 explainError(
7067 err.status,
7068 'PouchDB is just detecting if the remote ' +
7069 'supports the _bulk_get API.'
7070 );
7071 doBulkGetShim();
7072 } else {
7073 supportsBulkGetMap[dbUrl] = true;
7074 callback(null, res);
7075 }
7076 });
7077 } else if (supportsBulkGet) {
7078 doBulkGet(callback);
7079 } else {
7080 doBulkGetShim();
7081 }
7082 });
7083
7084 // Calls GET on the host, which gets back a JSON string containing
7085 // couchdb: A welcome string
7086 // version: The version of CouchDB it is running
7087 api._info = async function (callback) {
7088 try {
7089 await setup();
7090 const response = await ourFetch(genDBUrl(host, ''));
7091 const info = await response.json();
7092 info.host = genDBUrl(host, '');
7093 callback(null, info);
7094 } catch (err) {
7095 callback(err);
7096 }
7097 };
7098
7099 api.fetch = async function (path, options) {
7100 await setup();
7101 const url = path.substring(0, 1) === '/' ?
7102 genUrl(host, path.substring(1)) :
7103 genDBUrl(host, path);
7104 return ourFetch(url, options);
7105 };
7106
7107 // Get the document with the given id from the database given by host.
7108 // The id could be solely the _id in the database, or it may be a
7109 // _design/ID or _local/ID path
7110 api.get = adapterFun$$1('get', async function (id, opts, callback) {
7111 // If no options were given, set the callback to the second parameter
7112 if (typeof opts === 'function') {
7113 callback = opts;
7114 opts = {};
7115 }
7116 opts = clone(opts);
7117
7118 // List of parameters to add to the GET request
7119 const params = {};
7120
7121 if (opts.revs) {
7122 params.revs = true;
7123 }
7124
7125 if (opts.revs_info) {
7126 params.revs_info = true;
7127 }
7128
7129 if (opts.latest) {
7130 params.latest = true;
7131 }
7132
7133 if (opts.open_revs) {
7134 if (opts.open_revs !== "all") {
7135 opts.open_revs = JSON.stringify(opts.open_revs);
7136 }
7137 params.open_revs = opts.open_revs;
7138 }
7139
7140 if (opts.rev) {
7141 params.rev = opts.rev;
7142 }
7143
7144 if (opts.conflicts) {
7145 params.conflicts = opts.conflicts;
7146 }
7147
7148 /* istanbul ignore if */
7149 if (opts.update_seq) {
7150 params.update_seq = opts.update_seq;
7151 }
7152
7153 id = encodeDocId(id);
7154
7155 function fetchAttachments(doc) {
7156 const atts = doc._attachments;
7157 const filenames = atts && Object.keys(atts);
7158 if (!atts || !filenames.length) {
7159 return;
7160 }
7161 // we fetch these manually in separate XHRs, because
7162 // Sync Gateway would normally send it back as multipart/mixed,
7163 // which we cannot parse. Also, this is more efficient than
7164 // receiving attachments as base64-encoded strings.
7165 async function fetchData(filename) {
7166 const att = atts[filename];
7167 const path = encodeDocId(doc._id) + '/' + encodeAttachmentId(filename) +
7168 '?rev=' + doc._rev;
7169
7170 const response = await ourFetch(genDBUrl(host, path));
7171
7172 let blob;
7173 if ('buffer' in response) {
7174 blob = await response.buffer();
7175 } else {
7176 /* istanbul ignore next */
7177 blob = await response.blob();
7178 }
7179
7180 let data;
7181 if (opts.binary) {
7182 const typeFieldDescriptor = Object.getOwnPropertyDescriptor(blob.__proto__, 'type');
7183 if (!typeFieldDescriptor || typeFieldDescriptor.set) {
7184 blob.type = att.content_type;
7185 }
7186 data = blob;
7187 } else {
7188 data = await new Promise(function (resolve) {
7189 blobToBase64(blob, resolve);
7190 });
7191 }
7192
7193 delete att.stub;
7194 delete att.length;
7195 att.data = data;
7196 }
7197
7198 const promiseFactories = filenames.map(function (filename) {
7199 return function () {
7200 return fetchData(filename);
7201 };
7202 });
7203
7204 // This limits the number of parallel xhr requests to 5 any time
7205 // to avoid issues with maximum browser request limits
7206 return pool(promiseFactories, 5);
7207 }
7208
7209 function fetchAllAttachments(docOrDocs) {
7210 if (Array.isArray(docOrDocs)) {
7211 return Promise.all(docOrDocs.map(function (doc) {
7212 if (doc.ok) {
7213 return fetchAttachments(doc.ok);
7214 }
7215 }));
7216 }
7217 return fetchAttachments(docOrDocs);
7218 }
7219
7220 const url = genDBUrl(host, id + paramsToStr(params));
7221 try {
7222 const res = await fetchJSON(url);
7223 if (opts.attachments) {
7224 await fetchAllAttachments(res.data);
7225 }
7226 callback(null, res.data);
7227 } catch (error) {
7228 error.docId = id;
7229 callback(error);
7230 }
7231 });
7232
7233
7234 // Delete the document given by doc from the database given by host.
7235 api.remove = adapterFun$$1('remove', async function (docOrId, optsOrRev, opts, cb) {
7236 let doc;
7237 if (typeof optsOrRev === 'string') {
7238 // id, rev, opts, callback style
7239 doc = {
7240 _id: docOrId,
7241 _rev: optsOrRev
7242 };
7243 if (typeof opts === 'function') {
7244 cb = opts;
7245 opts = {};
7246 }
7247 } else {
7248 // doc, opts, callback style
7249 doc = docOrId;
7250 if (typeof optsOrRev === 'function') {
7251 cb = optsOrRev;
7252 opts = {};
7253 } else {
7254 cb = opts;
7255 opts = optsOrRev;
7256 }
7257 }
7258
7259 const rev$$1 = (doc._rev || opts.rev);
7260 const url = genDBUrl(host, encodeDocId(doc._id)) + '?rev=' + rev$$1;
7261
7262 try {
7263 const result = await fetchJSON(url, {method: 'DELETE'});
7264 cb(null, result.data);
7265 } catch (error) {
7266 cb(error);
7267 }
7268 });
7269
7270 function encodeAttachmentId(attachmentId) {
7271 return attachmentId.split("/").map(encodeURIComponent).join("/");
7272 }
7273
7274 // Get the attachment
7275 api.getAttachment = adapterFun$$1('getAttachment', async function (docId, attachmentId,
7276 opts, callback) {
7277 if (typeof opts === 'function') {
7278 callback = opts;
7279 opts = {};
7280 }
7281 const params = opts.rev ? ('?rev=' + opts.rev) : '';
7282 const url = genDBUrl(host, encodeDocId(docId)) + '/' +
7283 encodeAttachmentId(attachmentId) + params;
7284 let contentType;
7285 try {
7286 const response = await ourFetch(url, {method: 'GET'});
7287
7288 if (!response.ok) {
7289 throw response;
7290 }
7291
7292 contentType = response.headers.get('content-type');
7293 let blob;
7294 if (typeof process !== 'undefined' && !process.browser && typeof response.buffer === 'function') {
7295 blob = await response.buffer();
7296 } else {
7297 /* istanbul ignore next */
7298 blob = await response.blob();
7299 }
7300
7301 // TODO: also remove
7302 if (typeof process !== 'undefined' && !process.browser) {
7303 const typeFieldDescriptor = Object.getOwnPropertyDescriptor(blob.__proto__, 'type');
7304 if (!typeFieldDescriptor || typeFieldDescriptor.set) {
7305 blob.type = contentType;
7306 }
7307 }
7308 callback(null, blob);
7309 } catch (err) {
7310 callback(err);
7311 }
7312 });
7313
7314 // Remove the attachment given by the id and rev
7315 api.removeAttachment = adapterFun$$1('removeAttachment', async function (
7316 docId,
7317 attachmentId,
7318 rev$$1,
7319 callback,
7320 ) {
7321 const url = genDBUrl(host, encodeDocId(docId) + '/' + encodeAttachmentId(attachmentId)) + '?rev=' + rev$$1;
7322
7323 try {
7324 const result = await fetchJSON(url, {method: 'DELETE'});
7325 callback(null, result.data);
7326 } catch (error) {
7327 callback(error);
7328 }
7329 });
7330
7331 // Add the attachment given by blob and its contentType property
7332 // to the document with the given id, the revision given by rev, and
7333 // add it to the database given by host.
7334 api.putAttachment = adapterFun$$1('putAttachment', async function (
7335 docId,
7336 attachmentId,
7337 rev$$1,
7338 blob,
7339 type,
7340 callback,
7341 ) {
7342 if (typeof type === 'function') {
7343 callback = type;
7344 type = blob;
7345 blob = rev$$1;
7346 rev$$1 = null;
7347 }
7348 const id = encodeDocId(docId) + '/' + encodeAttachmentId(attachmentId);
7349 let url = genDBUrl(host, id);
7350 if (rev$$1) {
7351 url += '?rev=' + rev$$1;
7352 }
7353
7354 if (typeof blob === 'string') {
7355 // input is assumed to be a base64 string
7356 let binary;
7357 try {
7358 binary = thisAtob(blob);
7359 } catch (err) {
7360 return callback(createError(BAD_ARG,
7361 'Attachment is not a valid base64 string'));
7362 }
7363 blob = binary ? binStringToBluffer(binary, type) : '';
7364 }
7365
7366 try {
7367 // Add the attachment
7368 const result = await fetchJSON(url, {
7369 headers: new h({'Content-Type': type}),
7370 method: 'PUT',
7371 body: blob
7372 });
7373 callback(null, result.data);
7374 } catch (error) {
7375 callback(error);
7376 }
7377 });
7378
7379 // Update/create multiple documents given by req in the database
7380 // given by host.
7381 api._bulkDocs = async function (req, opts, callback) {
7382 // If new_edits=false then it prevents the database from creating
7383 // new revision numbers for the documents. Instead it just uses
7384 // the old ones. This is used in database replication.
7385 req.new_edits = opts.new_edits;
7386
7387 try {
7388 await setup();
7389 await Promise.all(req.docs.map(preprocessAttachments$1));
7390
7391 // Update/create the documents
7392 const result = await fetchJSON(genDBUrl(host, '_bulk_docs'), {
7393 method: 'POST',
7394 body: JSON.stringify(req)
7395 });
7396 callback(null, result.data);
7397 } catch (error) {
7398 callback(error);
7399 }
7400 };
7401
7402 // Update/create document
7403 api._put = async function (doc, opts, callback) {
7404 try {
7405 await setup();
7406 await preprocessAttachments$1(doc);
7407
7408 const result = await fetchJSON(genDBUrl(host, encodeDocId(doc._id)), {
7409 method: 'PUT',
7410 body: JSON.stringify(doc)
7411 });
7412 callback(null, result.data);
7413 } catch (error) {
7414 error.docId = doc && doc._id;
7415 callback(error);
7416 }
7417 };
7418
7419
7420 // Get a listing of the documents in the database given
7421 // by host and ordered by increasing id.
7422 api.allDocs = adapterFun$$1('allDocs', async function (opts, callback) {
7423 if (typeof opts === 'function') {
7424 callback = opts;
7425 opts = {};
7426 }
7427 opts = clone(opts);
7428
7429 // List of parameters to add to the GET request
7430 const params = {};
7431 let body;
7432 let method = 'GET';
7433
7434 if (opts.conflicts) {
7435 params.conflicts = true;
7436 }
7437
7438 /* istanbul ignore if */
7439 if (opts.update_seq) {
7440 params.update_seq = true;
7441 }
7442
7443 if (opts.descending) {
7444 params.descending = true;
7445 }
7446
7447 if (opts.include_docs) {
7448 params.include_docs = true;
7449 }
7450
7451 // added in CouchDB 1.6.0
7452 if (opts.attachments) {
7453 params.attachments = true;
7454 }
7455
7456 if (opts.key) {
7457 params.key = JSON.stringify(opts.key);
7458 }
7459
7460 if (opts.start_key) {
7461 opts.startkey = opts.start_key;
7462 }
7463
7464 if (opts.startkey) {
7465 params.startkey = JSON.stringify(opts.startkey);
7466 }
7467
7468 if (opts.end_key) {
7469 opts.endkey = opts.end_key;
7470 }
7471
7472 if (opts.endkey) {
7473 params.endkey = JSON.stringify(opts.endkey);
7474 }
7475
7476 if (typeof opts.inclusive_end !== 'undefined') {
7477 params.inclusive_end = !!opts.inclusive_end;
7478 }
7479
7480 if (typeof opts.limit !== 'undefined') {
7481 params.limit = opts.limit;
7482 }
7483
7484 if (typeof opts.skip !== 'undefined') {
7485 params.skip = opts.skip;
7486 }
7487
7488 const paramStr = paramsToStr(params);
7489
7490 if (typeof opts.keys !== 'undefined') {
7491 method = 'POST';
7492 body = {keys: opts.keys};
7493 }
7494
7495 try {
7496 const result = await fetchJSON(genDBUrl(host, '_all_docs' + paramStr), {
7497 method,
7498 body: JSON.stringify(body)
7499 });
7500 if (opts.include_docs && opts.attachments && opts.binary) {
7501 result.data.rows.forEach(readAttachmentsAsBlobOrBuffer);
7502 }
7503 callback(null, result.data);
7504 } catch (error) {
7505 callback(error);
7506 }
7507 });
7508
7509 // Get a list of changes made to documents in the database given by host.
7510 // TODO According to the README, there should be two other methods here,
7511 // api.changes.addListener and api.changes.removeListener.
7512 api._changes = function (opts) {
7513
7514 // We internally page the results of a changes request, this means
7515 // if there is a large set of changes to be returned we can start
7516 // processing them quicker instead of waiting on the entire
7517 // set of changes to return and attempting to process them at once
7518 const batchSize = 'batch_size' in opts ? opts.batch_size : CHANGES_BATCH_SIZE;
7519
7520 opts = clone(opts);
7521
7522 if (opts.continuous && !('heartbeat' in opts)) {
7523 opts.heartbeat = DEFAULT_HEARTBEAT;
7524 }
7525
7526 let requestTimeout = ('timeout' in opts) ? opts.timeout : 30 * 1000;
7527
7528 // ensure CHANGES_TIMEOUT_BUFFER applies
7529 if ('timeout' in opts && opts.timeout &&
7530 (requestTimeout - opts.timeout) < CHANGES_TIMEOUT_BUFFER) {
7531 requestTimeout = opts.timeout + CHANGES_TIMEOUT_BUFFER;
7532 }
7533
7534 /* istanbul ignore if */
7535 if ('heartbeat' in opts && opts.heartbeat &&
7536 (requestTimeout - opts.heartbeat) < CHANGES_TIMEOUT_BUFFER) {
7537 requestTimeout = opts.heartbeat + CHANGES_TIMEOUT_BUFFER;
7538 }
7539
7540 const params = {};
7541 if ('timeout' in opts && opts.timeout) {
7542 params.timeout = opts.timeout;
7543 }
7544
7545 const limit = (typeof opts.limit !== 'undefined') ? opts.limit : false;
7546 let leftToFetch = limit;
7547
7548 if (opts.style) {
7549 params.style = opts.style;
7550 }
7551
7552 if (opts.include_docs || opts.filter && typeof opts.filter === 'function') {
7553 params.include_docs = true;
7554 }
7555
7556 if (opts.attachments) {
7557 params.attachments = true;
7558 }
7559
7560 if (opts.continuous) {
7561 params.feed = 'longpoll';
7562 }
7563
7564 if (opts.seq_interval) {
7565 params.seq_interval = opts.seq_interval;
7566 }
7567
7568 if (opts.conflicts) {
7569 params.conflicts = true;
7570 }
7571
7572 if (opts.descending) {
7573 params.descending = true;
7574 }
7575
7576 /* istanbul ignore if */
7577 if (opts.update_seq) {
7578 params.update_seq = true;
7579 }
7580
7581 if ('heartbeat' in opts) {
7582 // If the heartbeat value is false, it disables the default heartbeat
7583 if (opts.heartbeat) {
7584 params.heartbeat = opts.heartbeat;
7585 }
7586 }
7587
7588 if (opts.filter && typeof opts.filter === 'string') {
7589 params.filter = opts.filter;
7590 }
7591
7592 if (opts.view && typeof opts.view === 'string') {
7593 params.filter = '_view';
7594 params.view = opts.view;
7595 }
7596
7597 // If opts.query_params exists, pass it through to the changes request.
7598 // These parameters may be used by the filter on the source database.
7599 if (opts.query_params && typeof opts.query_params === 'object') {
7600 for (const param_name in opts.query_params) {
7601 /* istanbul ignore else */
7602 if (Object.prototype.hasOwnProperty.call(opts.query_params, param_name)) {
7603 params[param_name] = opts.query_params[param_name];
7604 }
7605 }
7606 }
7607
7608 let method = 'GET';
7609 let body;
7610
7611 if (opts.doc_ids) {
7612 // set this automagically for the user; it's annoying that couchdb
7613 // requires both a "filter" and a "doc_ids" param.
7614 params.filter = '_doc_ids';
7615 method = 'POST';
7616 body = {doc_ids: opts.doc_ids };
7617 }
7618 /* istanbul ignore next */
7619 else if (opts.selector) {
7620 // set this automagically for the user, similar to above
7621 params.filter = '_selector';
7622 method = 'POST';
7623 body = {selector: opts.selector };
7624 }
7625
7626 const controller = new AbortController();
7627 let lastFetchedSeq;
7628
7629 // Get all the changes starting with the one immediately after the
7630 // sequence number given by since.
7631 const fetchData = async function (since, callback) {
7632 if (opts.aborted) {
7633 return;
7634 }
7635 params.since = since;
7636 // "since" can be any kind of json object in Cloudant/CouchDB 2.x
7637 /* istanbul ignore next */
7638 if (typeof params.since === "object") {
7639 params.since = JSON.stringify(params.since);
7640 }
7641
7642 if (opts.descending) {
7643 if (limit) {
7644 params.limit = leftToFetch;
7645 }
7646 } else {
7647 params.limit = (!limit || leftToFetch > batchSize) ?
7648 batchSize : leftToFetch;
7649 }
7650
7651 // Set the options for the ajax call
7652 const url = genDBUrl(host, '_changes' + paramsToStr(params));
7653 const fetchOpts = {
7654 signal: controller.signal,
7655 method,
7656 body: JSON.stringify(body)
7657 };
7658 lastFetchedSeq = since;
7659
7660 /* istanbul ignore if */
7661 if (opts.aborted) {
7662 return;
7663 }
7664
7665 // Get the changes
7666 try {
7667 await setup();
7668 const result = await fetchJSON(url, fetchOpts);
7669 callback(null, result.data);
7670 } catch (error) {
7671 callback(error);
7672 }
7673 };
7674
7675 // If opts.since exists, get all the changes from the sequence
7676 // number given by opts.since. Otherwise, get all the changes
7677 // from the sequence number 0.
7678 const results = {results: []};
7679
7680 const fetched = function (err, res) {
7681 if (opts.aborted) {
7682 return;
7683 }
7684 let raw_results_length = 0;
7685 // If the result of the ajax call (res) contains changes (res.results)
7686 if (res && res.results) {
7687 raw_results_length = res.results.length;
7688 results.last_seq = res.last_seq;
7689 let pending = null;
7690 let lastSeq = null;
7691 // Attach 'pending' property if server supports it (CouchDB 2.0+)
7692 /* istanbul ignore if */
7693 if (typeof res.pending === 'number') {
7694 pending = res.pending;
7695 }
7696 if (typeof results.last_seq === 'string' || typeof results.last_seq === 'number') {
7697 lastSeq = results.last_seq;
7698 }
7699 // For each change
7700 const req = {};
7701 req.query = opts.query_params;
7702 res.results = res.results.filter(function (c) {
7703 leftToFetch--;
7704 const ret = filterChange(opts)(c);
7705 if (ret) {
7706 if (opts.include_docs && opts.attachments && opts.binary) {
7707 readAttachmentsAsBlobOrBuffer(c);
7708 }
7709 if (opts.return_docs) {
7710 results.results.push(c);
7711 }
7712 opts.onChange(c, pending, lastSeq);
7713 }
7714 return ret;
7715 });
7716 } else if (err) {
7717 // In case of an error, stop listening for changes and call
7718 // opts.complete
7719 opts.aborted = true;
7720 opts.complete(err);
7721 return;
7722 }
7723
7724 // The changes feed may have timed out with no results
7725 // if so reuse last update sequence
7726 if (res && res.last_seq) {
7727 lastFetchedSeq = res.last_seq;
7728 }
7729
7730 const finished = (limit && leftToFetch <= 0) ||
7731 (res && raw_results_length < batchSize) ||
7732 (opts.descending);
7733
7734 if ((opts.continuous && !(limit && leftToFetch <= 0)) || !finished) {
7735 // Queue a call to fetch again with the newest sequence number
7736 nextTick(function () { fetchData(lastFetchedSeq, fetched); });
7737 } else {
7738 // We're done, call the callback
7739 opts.complete(null, results);
7740 }
7741 };
7742
7743 fetchData(opts.since || 0, fetched);
7744
7745 // Return a method to cancel this method from processing any more
7746 return {
7747 cancel: function () {
7748 opts.aborted = true;
7749 controller.abort();
7750 }
7751 };
7752 };
7753
7754 // Given a set of document/revision IDs (given by req), tets the subset of
7755 // those that do NOT correspond to revisions stored in the database.
7756 // See http://wiki.apache.org/couchdb/HttpPostRevsDiff
7757 api.revsDiff = adapterFun$$1('revsDiff', async function (req, opts, callback) {
7758 // If no options were given, set the callback to be the second parameter
7759 if (typeof opts === 'function') {
7760 callback = opts;
7761 opts = {};
7762 }
7763
7764 try {
7765 // Get the missing document/revision IDs
7766 const result = await fetchJSON(genDBUrl(host, '_revs_diff'), {
7767 method: 'POST',
7768 body: JSON.stringify(req)
7769 });
7770 callback(null, result.data);
7771 } catch (error) {
7772 callback(error);
7773 }
7774 });
7775
7776 api._close = function (callback) {
7777 callback();
7778 };
7779
7780 api._destroy = async function (options, callback) {
7781 try {
7782 const json = await fetchJSON(genDBUrl(host, ''), {method: 'DELETE'});
7783 callback(null, json);
7784 } catch (error) {
7785 if (error.status === 404) {
7786 callback(null, {ok: true});
7787 } else {
7788 callback(error);
7789 }
7790 }
7791 };
7792}
7793
7794// HttpPouch is a valid adapter.
7795HttpPouch.valid = function () {
7796 return true;
7797};
7798
7799function HttpPouch$1 (PouchDB) {
7800 PouchDB.adapter('http', HttpPouch, false);
7801 PouchDB.adapter('https', HttpPouch, false);
7802}
7803
7804class QueryParseError extends Error {
7805 constructor(message) {
7806 super();
7807 this.status = 400;
7808 this.name = 'query_parse_error';
7809 this.message = message;
7810 this.error = true;
7811 try {
7812 Error.captureStackTrace(this, QueryParseError);
7813 } catch (e) {}
7814 }
7815}
7816
7817class NotFoundError extends Error {
7818 constructor(message) {
7819 super();
7820 this.status = 404;
7821 this.name = 'not_found';
7822 this.message = message;
7823 this.error = true;
7824 try {
7825 Error.captureStackTrace(this, NotFoundError);
7826 } catch (e) {}
7827 }
7828}
7829
7830class BuiltInError extends Error {
7831 constructor(message) {
7832 super();
7833 this.status = 500;
7834 this.name = 'invalid_value';
7835 this.message = message;
7836 this.error = true;
7837 try {
7838 Error.captureStackTrace(this, BuiltInError);
7839 } catch (e) {}
7840 }
7841}
7842
7843function promisedCallback(promise, callback) {
7844 if (callback) {
7845 promise.then(function (res) {
7846 nextTick(function () {
7847 callback(null, res);
7848 });
7849 }, function (reason) {
7850 nextTick(function () {
7851 callback(reason);
7852 });
7853 });
7854 }
7855 return promise;
7856}
7857
7858function callbackify(fun) {
7859 return function (...args) {
7860 var cb = args.pop();
7861 var promise = fun.apply(this, args);
7862 if (typeof cb === 'function') {
7863 promisedCallback(promise, cb);
7864 }
7865 return promise;
7866 };
7867}
7868
7869// Promise finally util similar to Q.finally
7870function fin(promise, finalPromiseFactory) {
7871 return promise.then(function (res) {
7872 return finalPromiseFactory().then(function () {
7873 return res;
7874 });
7875 }, function (reason) {
7876 return finalPromiseFactory().then(function () {
7877 throw reason;
7878 });
7879 });
7880}
7881
7882function sequentialize(queue, promiseFactory) {
7883 return function () {
7884 var args = arguments;
7885 var that = this;
7886 return queue.add(function () {
7887 return promiseFactory.apply(that, args);
7888 });
7889 };
7890}
7891
7892// uniq an array of strings, order not guaranteed
7893// similar to underscore/lodash _.uniq
7894function uniq(arr) {
7895 var theSet = new Set(arr);
7896 var result = new Array(theSet.size);
7897 var index = -1;
7898 theSet.forEach(function (value) {
7899 result[++index] = value;
7900 });
7901 return result;
7902}
7903
7904function mapToKeysArray(map) {
7905 var result = new Array(map.size);
7906 var index = -1;
7907 map.forEach(function (value, key) {
7908 result[++index] = key;
7909 });
7910 return result;
7911}
7912
7913function createBuiltInError(name) {
7914 var message = 'builtin ' + name +
7915 ' function requires map values to be numbers' +
7916 ' or number arrays';
7917 return new BuiltInError(message);
7918}
7919
7920function sum(values) {
7921 var result = 0;
7922 for (var i = 0, len = values.length; i < len; i++) {
7923 var num = values[i];
7924 if (typeof num !== 'number') {
7925 if (Array.isArray(num)) {
7926 // lists of numbers are also allowed, sum them separately
7927 result = typeof result === 'number' ? [result] : result;
7928 for (var j = 0, jLen = num.length; j < jLen; j++) {
7929 var jNum = num[j];
7930 if (typeof jNum !== 'number') {
7931 throw createBuiltInError('_sum');
7932 } else if (typeof result[j] === 'undefined') {
7933 result.push(jNum);
7934 } else {
7935 result[j] += jNum;
7936 }
7937 }
7938 } else { // not array/number
7939 throw createBuiltInError('_sum');
7940 }
7941 } else if (typeof result === 'number') {
7942 result += num;
7943 } else { // add number to array
7944 result[0] += num;
7945 }
7946 }
7947 return result;
7948}
7949
7950var log = guardedConsole.bind(null, 'log');
7951var isArray = Array.isArray;
7952var toJSON = JSON.parse;
7953
7954function evalFunctionWithEval(func, emit) {
7955 return scopeEval(
7956 "return (" + func.replace(/;\s*$/, "") + ");",
7957 {
7958 emit,
7959 sum,
7960 log,
7961 isArray,
7962 toJSON
7963 }
7964 );
7965}
7966
7967/*
7968 * Simple task queue to sequentialize actions. Assumes
7969 * callbacks will eventually fire (once).
7970 */
7971
7972class TaskQueue$1 {
7973 constructor() {
7974 this.promise = Promise.resolve();
7975 }
7976
7977 add(promiseFactory) {
7978 this.promise = this.promise
7979 // just recover
7980 .catch(() => { })
7981 .then(() => promiseFactory());
7982 return this.promise;
7983 }
7984
7985 finish() {
7986 return this.promise;
7987 }
7988}
7989
7990function stringify(input) {
7991 if (!input) {
7992 return 'undefined'; // backwards compat for empty reduce
7993 }
7994 // for backwards compat with mapreduce, functions/strings are stringified
7995 // as-is. everything else is JSON-stringified.
7996 switch (typeof input) {
7997 case 'function':
7998 // e.g. a mapreduce map
7999 return input.toString();
8000 case 'string':
8001 // e.g. a mapreduce built-in _reduce function
8002 return input.toString();
8003 default:
8004 // e.g. a JSON object in the case of mango queries
8005 return JSON.stringify(input);
8006 }
8007}
8008
8009/* create a string signature for a view so we can cache it and uniq it */
8010function createViewSignature(mapFun, reduceFun) {
8011 // the "undefined" part is for backwards compatibility
8012 return stringify(mapFun) + stringify(reduceFun) + 'undefined';
8013}
8014
8015async function createView(sourceDB, viewName, mapFun, reduceFun, temporary, localDocName) {
8016 const viewSignature = createViewSignature(mapFun, reduceFun);
8017
8018 let cachedViews;
8019 if (!temporary) {
8020 // cache this to ensure we don't try to update the same view twice
8021 cachedViews = sourceDB._cachedViews = sourceDB._cachedViews || {};
8022 if (cachedViews[viewSignature]) {
8023 return cachedViews[viewSignature];
8024 }
8025 }
8026
8027 const promiseForView = sourceDB.info().then(async function (info) {
8028 const depDbName = info.db_name + '-mrview-' +
8029 (temporary ? 'temp' : stringMd5(viewSignature));
8030
8031 // save the view name in the source db so it can be cleaned up if necessary
8032 // (e.g. when the _design doc is deleted, remove all associated view data)
8033 function diffFunction(doc) {
8034 doc.views = doc.views || {};
8035 let fullViewName = viewName;
8036 if (fullViewName.indexOf('/') === -1) {
8037 fullViewName = viewName + '/' + viewName;
8038 }
8039 const depDbs = doc.views[fullViewName] = doc.views[fullViewName] || {};
8040 /* istanbul ignore if */
8041 if (depDbs[depDbName]) {
8042 return; // no update necessary
8043 }
8044 depDbs[depDbName] = true;
8045 return doc;
8046 }
8047 await upsert(sourceDB, '_local/' + localDocName, diffFunction);
8048 const res = await sourceDB.registerDependentDatabase(depDbName);
8049 const db = res.db;
8050 db.auto_compaction = true;
8051 const view = {
8052 name: depDbName,
8053 db,
8054 sourceDB,
8055 adapter: sourceDB.adapter,
8056 mapFun,
8057 reduceFun
8058 };
8059
8060 let lastSeqDoc;
8061 try {
8062 lastSeqDoc = await view.db.get('_local/lastSeq');
8063 } catch (err) {
8064 /* istanbul ignore if */
8065 if (err.status !== 404) {
8066 throw err;
8067 }
8068 }
8069
8070 view.seq = lastSeqDoc ? lastSeqDoc.seq : 0;
8071 if (cachedViews) {
8072 view.db.once('destroyed', function () {
8073 delete cachedViews[viewSignature];
8074 });
8075 }
8076 return view;
8077 });
8078
8079 if (cachedViews) {
8080 cachedViews[viewSignature] = promiseForView;
8081 }
8082 return promiseForView;
8083}
8084
8085const persistentQueues = {};
8086const tempViewQueue = new TaskQueue$1();
8087const CHANGES_BATCH_SIZE$1 = 50;
8088
8089function parseViewName(name) {
8090 // can be either 'ddocname/viewname' or just 'viewname'
8091 // (where the ddoc name is the same)
8092 return name.indexOf('/') === -1 ? [name, name] : name.split('/');
8093}
8094
8095function isGenOne(changes) {
8096 // only return true if the current change is 1-
8097 // and there are no other leafs
8098 return changes.length === 1 && /^1-/.test(changes[0].rev);
8099}
8100
8101function emitError(db, e, data) {
8102 try {
8103 db.emit('error', e);
8104 } catch (err) {
8105 guardedConsole('error',
8106 'The user\'s map/reduce function threw an uncaught error.\n' +
8107 'You can debug this error by doing:\n' +
8108 'myDatabase.on(\'error\', function (err) { debugger; });\n' +
8109 'Please double-check your map/reduce function.');
8110 guardedConsole('error', e, data);
8111 }
8112}
8113
8114/**
8115 * Returns an "abstract" mapreduce object of the form:
8116 *
8117 * {
8118 * query: queryFun,
8119 * viewCleanup: viewCleanupFun
8120 * }
8121 *
8122 * Arguments are:
8123 *
8124 * localDoc: string
8125 * This is for the local doc that gets saved in order to track the
8126 * "dependent" DBs and clean them up for viewCleanup. It should be
8127 * unique, so that indexer plugins don't collide with each other.
8128 * mapper: function (mapFunDef, emit)
8129 * Returns a map function based on the mapFunDef, which in the case of
8130 * normal map/reduce is just the de-stringified function, but may be
8131 * something else, such as an object in the case of pouchdb-find.
8132 * reducer: function (reduceFunDef)
8133 * Ditto, but for reducing. Modules don't have to support reducing
8134 * (e.g. pouchdb-find).
8135 * ddocValidator: function (ddoc, viewName)
8136 * Throws an error if the ddoc or viewName is not valid.
8137 * This could be a way to communicate to the user that the configuration for the
8138 * indexer is invalid.
8139 */
8140function createAbstractMapReduce(localDocName, mapper, reducer, ddocValidator) {
8141
8142 function tryMap(db, fun, doc) {
8143 // emit an event if there was an error thrown by a map function.
8144 // putting try/catches in a single function also avoids deoptimizations.
8145 try {
8146 fun(doc);
8147 } catch (e) {
8148 emitError(db, e, {fun, doc});
8149 }
8150 }
8151
8152 function tryReduce(db, fun, keys, values, rereduce) {
8153 // same as above, but returning the result or an error. there are two separate
8154 // functions to avoid extra memory allocations since the tryCode() case is used
8155 // for custom map functions (common) vs this function, which is only used for
8156 // custom reduce functions (rare)
8157 try {
8158 return {output : fun(keys, values, rereduce)};
8159 } catch (e) {
8160 emitError(db, e, {fun, keys, values, rereduce});
8161 return {error: e};
8162 }
8163 }
8164
8165 function sortByKeyThenValue(x, y) {
8166 const keyCompare = collate(x.key, y.key);
8167 return keyCompare !== 0 ? keyCompare : collate(x.value, y.value);
8168 }
8169
8170 function sliceResults(results, limit, skip) {
8171 skip = skip || 0;
8172 if (typeof limit === 'number') {
8173 return results.slice(skip, limit + skip);
8174 } else if (skip > 0) {
8175 return results.slice(skip);
8176 }
8177 return results;
8178 }
8179
8180 function rowToDocId(row) {
8181 const val = row.value;
8182 // Users can explicitly specify a joined doc _id, or it
8183 // defaults to the doc _id that emitted the key/value.
8184 const docId = (val && typeof val === 'object' && val._id) || row.id;
8185 return docId;
8186 }
8187
8188 function readAttachmentsAsBlobOrBuffer(res) {
8189 for (const row of res.rows) {
8190 const atts = row.doc && row.doc._attachments;
8191 if (!atts) {
8192 continue;
8193 }
8194 for (const filename of Object.keys(atts)) {
8195 const att = atts[filename];
8196 atts[filename].data = b64ToBluffer(att.data, att.content_type);
8197 }
8198 }
8199 }
8200
8201 function postprocessAttachments(opts) {
8202 return function (res) {
8203 if (opts.include_docs && opts.attachments && opts.binary) {
8204 readAttachmentsAsBlobOrBuffer(res);
8205 }
8206 return res;
8207 };
8208 }
8209
8210 function addHttpParam(paramName, opts, params, asJson) {
8211 // add an http param from opts to params, optionally json-encoded
8212 let val = opts[paramName];
8213 if (typeof val !== 'undefined') {
8214 if (asJson) {
8215 val = encodeURIComponent(JSON.stringify(val));
8216 }
8217 params.push(paramName + '=' + val);
8218 }
8219 }
8220
8221 function coerceInteger(integerCandidate) {
8222 if (typeof integerCandidate !== 'undefined') {
8223 const asNumber = Number(integerCandidate);
8224 // prevents e.g. '1foo' or '1.1' being coerced to 1
8225 if (!isNaN(asNumber) && asNumber === parseInt(integerCandidate, 10)) {
8226 return asNumber;
8227 } else {
8228 return integerCandidate;
8229 }
8230 }
8231 }
8232
8233 function coerceOptions(opts) {
8234 opts.group_level = coerceInteger(opts.group_level);
8235 opts.limit = coerceInteger(opts.limit);
8236 opts.skip = coerceInteger(opts.skip);
8237 return opts;
8238 }
8239
8240 function checkPositiveInteger(number) {
8241 if (number) {
8242 if (typeof number !== 'number') {
8243 return new QueryParseError(`Invalid value for integer: "${number}"`);
8244 }
8245 if (number < 0) {
8246 return new QueryParseError(`Invalid value for positive integer: "${number}"`);
8247 }
8248 }
8249 }
8250
8251 function checkQueryParseError(options, fun) {
8252 const startkeyName = options.descending ? 'endkey' : 'startkey';
8253 const endkeyName = options.descending ? 'startkey' : 'endkey';
8254
8255 if (typeof options[startkeyName] !== 'undefined' &&
8256 typeof options[endkeyName] !== 'undefined' &&
8257 collate(options[startkeyName], options[endkeyName]) > 0) {
8258 throw new QueryParseError('No rows can match your key range, ' +
8259 'reverse your start_key and end_key or set {descending : true}');
8260 } else if (fun.reduce && options.reduce !== false) {
8261 if (options.include_docs) {
8262 throw new QueryParseError('{include_docs:true} is invalid for reduce');
8263 } else if (options.keys && options.keys.length > 1 &&
8264 !options.group && !options.group_level) {
8265 throw new QueryParseError('Multi-key fetches for reduce views must use ' +
8266 '{group: true}');
8267 }
8268 }
8269 for (const optionName of ['group_level', 'limit', 'skip']) {
8270 const error = checkPositiveInteger(options[optionName]);
8271 if (error) {
8272 throw error;
8273 }
8274 }
8275 }
8276
8277 async function httpQuery(db, fun, opts) {
8278 // List of parameters to add to the PUT request
8279 let params = [];
8280 let body;
8281 let method = 'GET';
8282 let ok;
8283
8284 // If opts.reduce exists and is defined, then add it to the list
8285 // of parameters.
8286 // If reduce=false then the results are that of only the map function
8287 // not the final result of map and reduce.
8288 addHttpParam('reduce', opts, params);
8289 addHttpParam('include_docs', opts, params);
8290 addHttpParam('attachments', opts, params);
8291 addHttpParam('limit', opts, params);
8292 addHttpParam('descending', opts, params);
8293 addHttpParam('group', opts, params);
8294 addHttpParam('group_level', opts, params);
8295 addHttpParam('skip', opts, params);
8296 addHttpParam('stale', opts, params);
8297 addHttpParam('conflicts', opts, params);
8298 addHttpParam('startkey', opts, params, true);
8299 addHttpParam('start_key', opts, params, true);
8300 addHttpParam('endkey', opts, params, true);
8301 addHttpParam('end_key', opts, params, true);
8302 addHttpParam('inclusive_end', opts, params);
8303 addHttpParam('key', opts, params, true);
8304 addHttpParam('update_seq', opts, params);
8305
8306 // Format the list of parameters into a valid URI query string
8307 params = params.join('&');
8308 params = params === '' ? '' : '?' + params;
8309
8310 // If keys are supplied, issue a POST to circumvent GET query string limits
8311 // see http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options
8312 if (typeof opts.keys !== 'undefined') {
8313 const MAX_URL_LENGTH = 2000;
8314 // according to http://stackoverflow.com/a/417184/680742,
8315 // the de facto URL length limit is 2000 characters
8316
8317 const keysAsString = `keys=${encodeURIComponent(JSON.stringify(opts.keys))}`;
8318 if (keysAsString.length + params.length + 1 <= MAX_URL_LENGTH) {
8319 // If the keys are short enough, do a GET. we do this to work around
8320 // Safari not understanding 304s on POSTs (see pouchdb/pouchdb#1239)
8321 params += (params[0] === '?' ? '&' : '?') + keysAsString;
8322 } else {
8323 method = 'POST';
8324 if (typeof fun === 'string') {
8325 body = {keys: opts.keys};
8326 } else { // fun is {map : mapfun}, so append to this
8327 fun.keys = opts.keys;
8328 }
8329 }
8330 }
8331
8332 // We are referencing a query defined in the design doc
8333 if (typeof fun === 'string') {
8334 const parts = parseViewName(fun);
8335
8336 const response = await db.fetch('_design/' + parts[0] + '/_view/' + parts[1] + params, {
8337 headers: new h({'Content-Type': 'application/json'}),
8338 method,
8339 body: JSON.stringify(body)
8340 });
8341 ok = response.ok;
8342 // status = response.status;
8343 const result = await response.json();
8344
8345 if (!ok) {
8346 result.status = response.status;
8347 throw generateErrorFromResponse(result);
8348 }
8349
8350 // fail the entire request if the result contains an error
8351 for (const row of result.rows) {
8352 /* istanbul ignore if */
8353 if (row.value && row.value.error && row.value.error === "builtin_reduce_error") {
8354 throw new Error(row.reason);
8355 }
8356 }
8357
8358 return new Promise(function (resolve) {
8359 resolve(result);
8360 }).then(postprocessAttachments(opts));
8361 }
8362
8363 // We are using a temporary view, terrible for performance, good for testing
8364 body = body || {};
8365 for (const key of Object.keys(fun)) {
8366 if (Array.isArray(fun[key])) {
8367 body[key] = fun[key];
8368 } else {
8369 body[key] = fun[key].toString();
8370 }
8371 }
8372
8373 const response = await db.fetch('_temp_view' + params, {
8374 headers: new h({'Content-Type': 'application/json'}),
8375 method: 'POST',
8376 body: JSON.stringify(body)
8377 });
8378
8379 ok = response.ok;
8380 // status = response.status;
8381 const result = await response.json();
8382 if (!ok) {
8383 result.status = response.status;
8384 throw generateErrorFromResponse(result);
8385 }
8386
8387 return new Promise(function (resolve) {
8388 resolve(result);
8389 }).then(postprocessAttachments(opts));
8390 }
8391
8392 // custom adapters can define their own api._query
8393 // and override the default behavior
8394 /* istanbul ignore next */
8395 function customQuery(db, fun, opts) {
8396 return new Promise(function (resolve, reject) {
8397 db._query(fun, opts, function (err, res) {
8398 if (err) {
8399 return reject(err);
8400 }
8401 resolve(res);
8402 });
8403 });
8404 }
8405
8406 // custom adapters can define their own api._viewCleanup
8407 // and override the default behavior
8408 /* istanbul ignore next */
8409 function customViewCleanup(db) {
8410 return new Promise(function (resolve, reject) {
8411 db._viewCleanup(function (err, res) {
8412 if (err) {
8413 return reject(err);
8414 }
8415 resolve(res);
8416 });
8417 });
8418 }
8419
8420 function defaultsTo(value) {
8421 return function (reason) {
8422 /* istanbul ignore else */
8423 if (reason.status === 404) {
8424 return value;
8425 } else {
8426 throw reason;
8427 }
8428 };
8429 }
8430
8431 // returns a promise for a list of docs to update, based on the input docId.
8432 // the order doesn't matter, because post-3.2.0, bulkDocs
8433 // is an atomic operation in all three adapters.
8434 async function getDocsToPersist(docId, view, docIdsToChangesAndEmits) {
8435 const metaDocId = '_local/doc_' + docId;
8436 const defaultMetaDoc = {_id: metaDocId, keys: []};
8437 const docData = docIdsToChangesAndEmits.get(docId);
8438 const indexableKeysToKeyValues = docData[0];
8439 const changes = docData[1];
8440
8441 function getMetaDoc() {
8442 if (isGenOne(changes)) {
8443 // generation 1, so we can safely assume initial state
8444 // for performance reasons (avoids unnecessary GETs)
8445 return Promise.resolve(defaultMetaDoc);
8446 }
8447 return view.db.get(metaDocId).catch(defaultsTo(defaultMetaDoc));
8448 }
8449
8450 function getKeyValueDocs(metaDoc) {
8451 if (!metaDoc.keys.length) {
8452 // no keys, no need for a lookup
8453 return Promise.resolve({rows: []});
8454 }
8455 return view.db.allDocs({
8456 keys: metaDoc.keys,
8457 include_docs: true
8458 });
8459 }
8460
8461 function processKeyValueDocs(metaDoc, kvDocsRes) {
8462 const kvDocs = [];
8463 const oldKeys = new Set();
8464
8465 for (const row of kvDocsRes.rows) {
8466 const doc = row.doc;
8467 if (!doc) { // deleted
8468 continue;
8469 }
8470 kvDocs.push(doc);
8471 oldKeys.add(doc._id);
8472 doc._deleted = !indexableKeysToKeyValues.has(doc._id);
8473 if (!doc._deleted) {
8474 const keyValue = indexableKeysToKeyValues.get(doc._id);
8475 if ('value' in keyValue) {
8476 doc.value = keyValue.value;
8477 }
8478 }
8479 }
8480 const newKeys = mapToKeysArray(indexableKeysToKeyValues);
8481 for (const key of newKeys) {
8482 if (!oldKeys.has(key)) {
8483 // new doc
8484 const kvDoc = {
8485 _id: key
8486 };
8487 const keyValue = indexableKeysToKeyValues.get(key);
8488 if ('value' in keyValue) {
8489 kvDoc.value = keyValue.value;
8490 }
8491 kvDocs.push(kvDoc);
8492 }
8493 }
8494 metaDoc.keys = uniq(newKeys.concat(metaDoc.keys));
8495 kvDocs.push(metaDoc);
8496
8497 return kvDocs;
8498 }
8499
8500 const metaDoc = await getMetaDoc();
8501 const keyValueDocs = await getKeyValueDocs(metaDoc);
8502 return processKeyValueDocs(metaDoc, keyValueDocs);
8503 }
8504
8505 function updatePurgeSeq(view) {
8506 // with this approach, we just assume to have processed all missing purges and write the latest
8507 // purgeSeq into the _local/purgeSeq doc.
8508 return view.sourceDB.get('_local/purges').then(function (res) {
8509 const purgeSeq = res.purgeSeq;
8510 return view.db.get('_local/purgeSeq').then(function (res) {
8511 return res._rev;
8512 })
8513 .catch(defaultsTo(undefined))
8514 .then(function (rev$$1) {
8515 return view.db.put({
8516 _id: '_local/purgeSeq',
8517 _rev: rev$$1,
8518 purgeSeq,
8519 });
8520 });
8521 }).catch(function (err) {
8522 if (err.status !== 404) {
8523 throw err;
8524 }
8525 });
8526 }
8527
8528 // updates all emitted key/value docs and metaDocs in the mrview database
8529 // for the given batch of documents from the source database
8530 function saveKeyValues(view, docIdsToChangesAndEmits, seq) {
8531 var seqDocId = '_local/lastSeq';
8532 return view.db.get(seqDocId)
8533 .catch(defaultsTo({_id: seqDocId, seq: 0}))
8534 .then(function (lastSeqDoc) {
8535 var docIds = mapToKeysArray(docIdsToChangesAndEmits);
8536 return Promise.all(docIds.map(function (docId) {
8537 return getDocsToPersist(docId, view, docIdsToChangesAndEmits);
8538 })).then(function (listOfDocsToPersist) {
8539 var docsToPersist = listOfDocsToPersist.flat();
8540 lastSeqDoc.seq = seq;
8541 docsToPersist.push(lastSeqDoc);
8542 // write all docs in a single operation, update the seq once
8543 return view.db.bulkDocs({docs : docsToPersist});
8544 })
8545 // TODO: this should be placed somewhere else, probably? we're querying both docs twice
8546 // (first time when getting the actual purges).
8547 .then(() => updatePurgeSeq(view));
8548 });
8549 }
8550
8551 function getQueue(view) {
8552 const viewName = typeof view === 'string' ? view : view.name;
8553 let queue = persistentQueues[viewName];
8554 if (!queue) {
8555 queue = persistentQueues[viewName] = new TaskQueue$1();
8556 }
8557 return queue;
8558 }
8559
8560 async function updateView(view, opts) {
8561 return sequentialize(getQueue(view), function () {
8562 return updateViewInQueue(view, opts);
8563 })();
8564 }
8565
8566 async function updateViewInQueue(view, opts) {
8567 // bind the emit function once
8568 let mapResults;
8569 let doc;
8570 let taskId;
8571
8572 function emit(key, value) {
8573 const output = {id: doc._id, key: normalizeKey(key)};
8574 // Don't explicitly store the value unless it's defined and non-null.
8575 // This saves on storage space, because often people don't use it.
8576 if (typeof value !== 'undefined' && value !== null) {
8577 output.value = normalizeKey(value);
8578 }
8579 mapResults.push(output);
8580 }
8581
8582 const mapFun = mapper(view.mapFun, emit);
8583
8584 let currentSeq = view.seq || 0;
8585
8586 function createTask() {
8587 return view.sourceDB.info().then(function (info) {
8588 taskId = view.sourceDB.activeTasks.add({
8589 name: 'view_indexing',
8590 total_items: info.update_seq - currentSeq,
8591 });
8592 });
8593 }
8594
8595 function processChange(docIdsToChangesAndEmits, seq) {
8596 return function () {
8597 return saveKeyValues(view, docIdsToChangesAndEmits, seq);
8598 };
8599 }
8600
8601 let indexed_docs = 0;
8602 const progress = {
8603 view: view.name,
8604 indexed_docs
8605 };
8606 view.sourceDB.emit('indexing', progress);
8607
8608 const queue = new TaskQueue$1();
8609
8610 async function processNextBatch() {
8611 const response = await view.sourceDB.changes({
8612 return_docs: true,
8613 conflicts: true,
8614 include_docs: true,
8615 style: 'all_docs',
8616 since: currentSeq,
8617 limit: opts.changes_batch_size
8618 });
8619 const purges = await getRecentPurges();
8620 return processBatch(response, purges);
8621 }
8622
8623 function getRecentPurges() {
8624 return view.db.get('_local/purgeSeq').then(function (res) {
8625 return res.purgeSeq;
8626 })
8627 .catch(defaultsTo(-1))
8628 .then(function (purgeSeq) {
8629 return view.sourceDB.get('_local/purges').then(function (res) {
8630 const recentPurges = res.purges.filter(function (purge, index) {
8631 return index > purgeSeq;
8632 }).map((purge) => purge.docId);
8633
8634 const uniquePurges = recentPurges.filter(function (docId, index) {
8635 return recentPurges.indexOf(docId) === index;
8636 });
8637
8638 return Promise.all(uniquePurges.map(function (docId) {
8639 return view.sourceDB.get(docId).then(function (doc) {
8640 return { docId, doc };
8641 })
8642 .catch(defaultsTo({ docId }));
8643 }));
8644 })
8645 .catch(defaultsTo([]));
8646 });
8647 }
8648
8649 function processBatch(response, purges) {
8650 const results = response.results;
8651 if (!results.length && !purges.length) {
8652 return;
8653 }
8654
8655 for (const purge of purges) {
8656 const index = results.findIndex(function (change) {
8657 return change.id === purge.docId;
8658 });
8659 if (index < 0) {
8660 // mimic a db.remove() on the changes feed
8661 const entry = {
8662 _id: purge.docId,
8663 doc: {
8664 _id: purge.docId,
8665 _deleted: 1,
8666 },
8667 changes: [],
8668 };
8669
8670 if (purge.doc) {
8671 // update with new winning rev after purge
8672 entry.doc = purge.doc;
8673 entry.changes.push({ rev: purge.doc._rev });
8674 }
8675
8676 results.push(entry);
8677 }
8678 }
8679
8680 const docIdsToChangesAndEmits = createDocIdsToChangesAndEmits(results);
8681
8682 queue.add(processChange(docIdsToChangesAndEmits, currentSeq));
8683
8684 indexed_docs = indexed_docs + results.length;
8685 const progress = {
8686 view: view.name,
8687 last_seq: response.last_seq,
8688 results_count: results.length,
8689 indexed_docs
8690 };
8691 view.sourceDB.emit('indexing', progress);
8692 view.sourceDB.activeTasks.update(taskId, {completed_items: indexed_docs});
8693
8694 if (results.length < opts.changes_batch_size) {
8695 return;
8696 }
8697 return processNextBatch();
8698 }
8699
8700 function createDocIdsToChangesAndEmits(results) {
8701 const docIdsToChangesAndEmits = new Map();
8702 for (const change of results) {
8703 if (change.doc._id[0] !== '_') {
8704 mapResults = [];
8705 doc = change.doc;
8706
8707 if (!doc._deleted) {
8708 tryMap(view.sourceDB, mapFun, doc);
8709 }
8710 mapResults.sort(sortByKeyThenValue);
8711
8712 const indexableKeysToKeyValues = createIndexableKeysToKeyValues(mapResults);
8713 docIdsToChangesAndEmits.set(change.doc._id, [
8714 indexableKeysToKeyValues,
8715 change.changes
8716 ]);
8717 }
8718 currentSeq = change.seq;
8719 }
8720 return docIdsToChangesAndEmits;
8721 }
8722
8723 function createIndexableKeysToKeyValues(mapResults) {
8724 const indexableKeysToKeyValues = new Map();
8725 let lastKey;
8726 for (let i = 0, len = mapResults.length; i < len; i++) {
8727 const emittedKeyValue = mapResults[i];
8728 const complexKey = [emittedKeyValue.key, emittedKeyValue.id];
8729 if (i > 0 && collate(emittedKeyValue.key, lastKey) === 0) {
8730 complexKey.push(i); // dup key+id, so make it unique
8731 }
8732 indexableKeysToKeyValues.set(toIndexableString(complexKey), emittedKeyValue);
8733 lastKey = emittedKeyValue.key;
8734 }
8735 return indexableKeysToKeyValues;
8736 }
8737
8738 try {
8739 await createTask();
8740 await processNextBatch();
8741 await queue.finish();
8742 view.seq = currentSeq;
8743 view.sourceDB.activeTasks.remove(taskId);
8744 } catch (error) {
8745 view.sourceDB.activeTasks.remove(taskId, error);
8746 }
8747 }
8748
8749 function reduceView(view, results, options) {
8750 if (options.group_level === 0) {
8751 delete options.group_level;
8752 }
8753
8754 const shouldGroup = options.group || options.group_level;
8755 const reduceFun = reducer(view.reduceFun);
8756 const groups = [];
8757 const lvl = isNaN(options.group_level)
8758 ? Number.POSITIVE_INFINITY
8759 : options.group_level;
8760
8761 for (const result of results) {
8762 const last = groups[groups.length - 1];
8763 let groupKey = shouldGroup ? result.key : null;
8764
8765 // only set group_level for array keys
8766 if (shouldGroup && Array.isArray(groupKey)) {
8767 groupKey = groupKey.slice(0, lvl);
8768 }
8769
8770 if (last && collate(last.groupKey, groupKey) === 0) {
8771 last.keys.push([result.key, result.id]);
8772 last.values.push(result.value);
8773 continue;
8774 }
8775 groups.push({
8776 keys: [[result.key, result.id]],
8777 values: [result.value],
8778 groupKey
8779 });
8780 }
8781
8782 results = [];
8783 for (const group of groups) {
8784 const reduceTry = tryReduce(view.sourceDB, reduceFun, group.keys, group.values, false);
8785 if (reduceTry.error && reduceTry.error instanceof BuiltInError) {
8786 // CouchDB returns an error if a built-in errors out
8787 throw reduceTry.error;
8788 }
8789 results.push({
8790 // CouchDB just sets the value to null if a non-built-in errors out
8791 value: reduceTry.error ? null : reduceTry.output,
8792 key: group.groupKey
8793 });
8794 }
8795 // no total_rows/offset when reducing
8796 return { rows: sliceResults(results, options.limit, options.skip) };
8797 }
8798
8799 function queryView(view, opts) {
8800 return sequentialize(getQueue(view), function () {
8801 return queryViewInQueue(view, opts);
8802 })();
8803 }
8804
8805 async function queryViewInQueue(view, opts) {
8806 let totalRows;
8807 const shouldReduce = view.reduceFun && opts.reduce !== false;
8808 const skip = opts.skip || 0;
8809 if (typeof opts.keys !== 'undefined' && !opts.keys.length) {
8810 // equivalent query
8811 opts.limit = 0;
8812 delete opts.keys;
8813 }
8814
8815 async function fetchFromView(viewOpts) {
8816 viewOpts.include_docs = true;
8817 const res = await view.db.allDocs(viewOpts);
8818 totalRows = res.total_rows;
8819
8820 return res.rows.map(function (result) {
8821 // implicit migration - in older versions of PouchDB,
8822 // we explicitly stored the doc as {id: ..., key: ..., value: ...}
8823 // this is tested in a migration test
8824 /* istanbul ignore next */
8825 if ('value' in result.doc && typeof result.doc.value === 'object' &&
8826 result.doc.value !== null) {
8827 const keys = Object.keys(result.doc.value).sort();
8828 // this detection method is not perfect, but it's unlikely the user
8829 // emitted a value which was an object with these 3 exact keys
8830 const expectedKeys = ['id', 'key', 'value'];
8831 if (!(keys < expectedKeys || keys > expectedKeys)) {
8832 return result.doc.value;
8833 }
8834 }
8835
8836 const parsedKeyAndDocId = parseIndexableString(result.doc._id);
8837 return {
8838 key: parsedKeyAndDocId[0],
8839 id: parsedKeyAndDocId[1],
8840 value: ('value' in result.doc ? result.doc.value : null)
8841 };
8842 });
8843 }
8844
8845 async function onMapResultsReady(rows) {
8846 let finalResults;
8847 if (shouldReduce) {
8848 finalResults = reduceView(view, rows, opts);
8849 } else if (typeof opts.keys === 'undefined') {
8850 finalResults = {
8851 total_rows: totalRows,
8852 offset: skip,
8853 rows
8854 };
8855 } else {
8856 // support limit, skip for keys query
8857 finalResults = {
8858 total_rows: totalRows,
8859 offset: skip,
8860 rows: sliceResults(rows,opts.limit,opts.skip)
8861 };
8862 }
8863 /* istanbul ignore if */
8864 if (opts.update_seq) {
8865 finalResults.update_seq = view.seq;
8866 }
8867 if (opts.include_docs) {
8868 const docIds = uniq(rows.map(rowToDocId));
8869
8870 const allDocsRes = await view.sourceDB.allDocs({
8871 keys: docIds,
8872 include_docs: true,
8873 conflicts: opts.conflicts,
8874 attachments: opts.attachments,
8875 binary: opts.binary
8876 });
8877 const docIdsToDocs = new Map();
8878 for (const row of allDocsRes.rows) {
8879 docIdsToDocs.set(row.id, row.doc);
8880 }
8881 for (const row of rows) {
8882 const docId = rowToDocId(row);
8883 const doc = docIdsToDocs.get(docId);
8884 if (doc) {
8885 row.doc = doc;
8886 }
8887 }
8888 }
8889 return finalResults;
8890 }
8891
8892 if (typeof opts.keys !== 'undefined') {
8893 const keys = opts.keys;
8894 const fetchPromises = keys.map(function (key) {
8895 const viewOpts = {
8896 startkey : toIndexableString([key]),
8897 endkey : toIndexableString([key, {}])
8898 };
8899 /* istanbul ignore if */
8900 if (opts.update_seq) {
8901 viewOpts.update_seq = true;
8902 }
8903 return fetchFromView(viewOpts);
8904 });
8905 const result = await Promise.all(fetchPromises);
8906 const flattenedResult = result.flat();
8907 return onMapResultsReady(flattenedResult);
8908 } else { // normal query, no 'keys'
8909 const viewOpts = {
8910 descending : opts.descending
8911 };
8912 /* istanbul ignore if */
8913 if (opts.update_seq) {
8914 viewOpts.update_seq = true;
8915 }
8916 let startkey;
8917 let endkey;
8918 if ('start_key' in opts) {
8919 startkey = opts.start_key;
8920 }
8921 if ('startkey' in opts) {
8922 startkey = opts.startkey;
8923 }
8924 if ('end_key' in opts) {
8925 endkey = opts.end_key;
8926 }
8927 if ('endkey' in opts) {
8928 endkey = opts.endkey;
8929 }
8930 if (typeof startkey !== 'undefined') {
8931 viewOpts.startkey = opts.descending ?
8932 toIndexableString([startkey, {}]) :
8933 toIndexableString([startkey]);
8934 }
8935 if (typeof endkey !== 'undefined') {
8936 let inclusiveEnd = opts.inclusive_end !== false;
8937 if (opts.descending) {
8938 inclusiveEnd = !inclusiveEnd;
8939 }
8940
8941 viewOpts.endkey = toIndexableString(
8942 inclusiveEnd ? [endkey, {}] : [endkey]);
8943 }
8944 if (typeof opts.key !== 'undefined') {
8945 const keyStart = toIndexableString([opts.key]);
8946 const keyEnd = toIndexableString([opts.key, {}]);
8947 if (viewOpts.descending) {
8948 viewOpts.endkey = keyStart;
8949 viewOpts.startkey = keyEnd;
8950 } else {
8951 viewOpts.startkey = keyStart;
8952 viewOpts.endkey = keyEnd;
8953 }
8954 }
8955 if (!shouldReduce) {
8956 if (typeof opts.limit === 'number') {
8957 viewOpts.limit = opts.limit;
8958 }
8959 viewOpts.skip = skip;
8960 }
8961
8962 const result = await fetchFromView(viewOpts);
8963 return onMapResultsReady(result);
8964 }
8965 }
8966
8967 async function httpViewCleanup(db) {
8968 const response = await db.fetch('_view_cleanup', {
8969 headers: new h({'Content-Type': 'application/json'}),
8970 method: 'POST'
8971 });
8972 return response.json();
8973 }
8974
8975 async function localViewCleanup(db) {
8976 try {
8977 const metaDoc = await db.get('_local/' + localDocName);
8978 const docsToViews = new Map();
8979
8980 for (const fullViewName of Object.keys(metaDoc.views)) {
8981 const parts = parseViewName(fullViewName);
8982 const designDocName = '_design/' + parts[0];
8983 const viewName = parts[1];
8984 let views = docsToViews.get(designDocName);
8985 if (!views) {
8986 views = new Set();
8987 docsToViews.set(designDocName, views);
8988 }
8989 views.add(viewName);
8990 }
8991 const opts = {
8992 keys : mapToKeysArray(docsToViews),
8993 include_docs : true
8994 };
8995
8996 const res = await db.allDocs(opts);
8997 const viewsToStatus = {};
8998 for (const row of res.rows) {
8999 const ddocName = row.key.substring(8); // cuts off '_design/'
9000 for (const viewName of docsToViews.get(row.key)) {
9001 let fullViewName = ddocName + '/' + viewName;
9002 /* istanbul ignore if */
9003 if (!metaDoc.views[fullViewName]) {
9004 // new format, without slashes, to support PouchDB 2.2.0
9005 // migration test in pouchdb's browser.migration.js verifies this
9006 fullViewName = viewName;
9007 }
9008 const viewDBNames = Object.keys(metaDoc.views[fullViewName]);
9009 // design doc deleted, or view function nonexistent
9010 const statusIsGood = row.doc && row.doc.views &&
9011 row.doc.views[viewName];
9012 for (const viewDBName of viewDBNames) {
9013 viewsToStatus[viewDBName] = viewsToStatus[viewDBName] || statusIsGood;
9014 }
9015 }
9016 }
9017
9018 const dbsToDelete = Object.keys(viewsToStatus)
9019 .filter(function (viewDBName) { return !viewsToStatus[viewDBName]; });
9020
9021 const destroyPromises = dbsToDelete.map(function (viewDBName) {
9022 return sequentialize(getQueue(viewDBName), function () {
9023 return new db.constructor(viewDBName, db.__opts).destroy();
9024 })();
9025 });
9026
9027 return Promise.all(destroyPromises).then(function () {
9028 return {ok: true};
9029 });
9030 } catch (err) {
9031 if (err.status === 404) {
9032 return {ok: true};
9033 } else {
9034 throw err;
9035 }
9036 }
9037 }
9038
9039 async function queryPromised(db, fun, opts) {
9040 /* istanbul ignore next */
9041 if (typeof db._query === 'function') {
9042 return customQuery(db, fun, opts);
9043 }
9044 if (isRemote(db)) {
9045 return httpQuery(db, fun, opts);
9046 }
9047
9048 const updateViewOpts = {
9049 changes_batch_size: db.__opts.view_update_changes_batch_size || CHANGES_BATCH_SIZE$1
9050 };
9051
9052 if (typeof fun !== 'string') {
9053 // temp_view
9054 checkQueryParseError(opts, fun);
9055
9056 tempViewQueue.add(async function () {
9057 const view = await createView(
9058 /* sourceDB */ db,
9059 /* viewName */ 'temp_view/temp_view',
9060 /* mapFun */ fun.map,
9061 /* reduceFun */ fun.reduce,
9062 /* temporary */ true,
9063 /* localDocName */ localDocName);
9064
9065 return fin(updateView(view, updateViewOpts).then(
9066 function () { return queryView(view, opts); }),
9067 function () { return view.db.destroy(); }
9068 );
9069 });
9070 return tempViewQueue.finish();
9071 } else {
9072 // persistent view
9073 const fullViewName = fun;
9074 const parts = parseViewName(fullViewName);
9075 const designDocName = parts[0];
9076 const viewName = parts[1];
9077
9078 const doc = await db.get('_design/' + designDocName);
9079 fun = doc.views && doc.views[viewName];
9080
9081 if (!fun) {
9082 // basic validator; it's assumed that every subclass would want this
9083 throw new NotFoundError(`ddoc ${doc._id} has no view named ${viewName}`);
9084 }
9085
9086 ddocValidator(doc, viewName);
9087 checkQueryParseError(opts, fun);
9088
9089 const view = await createView(
9090 /* sourceDB */ db,
9091 /* viewName */ fullViewName,
9092 /* mapFun */ fun.map,
9093 /* reduceFun */ fun.reduce,
9094 /* temporary */ false,
9095 /* localDocName */ localDocName);
9096
9097 if (opts.stale === 'ok' || opts.stale === 'update_after') {
9098 if (opts.stale === 'update_after') {
9099 nextTick(function () {
9100 updateView(view, updateViewOpts);
9101 });
9102 }
9103 return queryView(view, opts);
9104 } else { // stale not ok
9105 await updateView(view, updateViewOpts);
9106 return queryView(view, opts);
9107 }
9108 }
9109 }
9110
9111 function abstractQuery(fun, opts, callback) {
9112 const db = this;
9113 if (typeof opts === 'function') {
9114 callback = opts;
9115 opts = {};
9116 }
9117 opts = opts ? coerceOptions(opts) : {};
9118
9119 if (typeof fun === 'function') {
9120 fun = {map : fun};
9121 }
9122
9123 const promise = Promise.resolve().then(function () {
9124 return queryPromised(db, fun, opts);
9125 });
9126 promisedCallback(promise, callback);
9127 return promise;
9128 }
9129
9130 const abstractViewCleanup = callbackify(function () {
9131 const db = this;
9132 /* istanbul ignore next */
9133 if (typeof db._viewCleanup === 'function') {
9134 return customViewCleanup(db);
9135 }
9136 if (isRemote(db)) {
9137 return httpViewCleanup(db);
9138 }
9139 return localViewCleanup(db);
9140 });
9141
9142 return {
9143 query: abstractQuery,
9144 viewCleanup: abstractViewCleanup
9145 };
9146}
9147
9148var builtInReduce = {
9149 _sum: function (keys, values) {
9150 return sum(values);
9151 },
9152
9153 _count: function (keys, values) {
9154 return values.length;
9155 },
9156
9157 _stats: function (keys, values) {
9158 // no need to implement rereduce=true, because Pouch
9159 // will never call it
9160 function sumsqr(values) {
9161 var _sumsqr = 0;
9162 for (var i = 0, len = values.length; i < len; i++) {
9163 var num = values[i];
9164 _sumsqr += (num * num);
9165 }
9166 return _sumsqr;
9167 }
9168 return {
9169 sum : sum(values),
9170 min : Math.min.apply(null, values),
9171 max : Math.max.apply(null, values),
9172 count : values.length,
9173 sumsqr : sumsqr(values)
9174 };
9175 }
9176};
9177
9178function getBuiltIn(reduceFunString) {
9179 if (/^_sum/.test(reduceFunString)) {
9180 return builtInReduce._sum;
9181 } else if (/^_count/.test(reduceFunString)) {
9182 return builtInReduce._count;
9183 } else if (/^_stats/.test(reduceFunString)) {
9184 return builtInReduce._stats;
9185 } else if (/^_/.test(reduceFunString)) {
9186 throw new Error(reduceFunString + ' is not a supported reduce function.');
9187 }
9188}
9189
9190function mapper(mapFun, emit) {
9191 // for temp_views one can use emit(doc, emit), see #38
9192 if (typeof mapFun === "function" && mapFun.length === 2) {
9193 var origMap = mapFun;
9194 return function (doc) {
9195 return origMap(doc, emit);
9196 };
9197 } else {
9198 return evalFunctionWithEval(mapFun.toString(), emit);
9199 }
9200}
9201
9202function reducer(reduceFun) {
9203 var reduceFunString = reduceFun.toString();
9204 var builtIn = getBuiltIn(reduceFunString);
9205 if (builtIn) {
9206 return builtIn;
9207 } else {
9208 return evalFunctionWithEval(reduceFunString);
9209 }
9210}
9211
9212function ddocValidator(ddoc, viewName) {
9213 var fun = ddoc.views && ddoc.views[viewName];
9214 if (typeof fun.map !== 'string') {
9215 throw new NotFoundError('ddoc ' + ddoc._id + ' has no string view named ' +
9216 viewName + ', instead found object of type: ' + typeof fun.map);
9217 }
9218}
9219
9220var localDocName = 'mrviews';
9221var abstract = createAbstractMapReduce(localDocName, mapper, reducer, ddocValidator);
9222
9223function query(fun, opts, callback) {
9224 return abstract.query.call(this, fun, opts, callback);
9225}
9226
9227function viewCleanup(callback) {
9228 return abstract.viewCleanup.call(this, callback);
9229}
9230
9231var mapreduce = {
9232 query,
9233 viewCleanup
9234};
9235
9236function fileHasChanged(localDoc, remoteDoc, filename) {
9237 return !localDoc._attachments ||
9238 !localDoc._attachments[filename] ||
9239 localDoc._attachments[filename].digest !== remoteDoc._attachments[filename].digest;
9240}
9241
9242function getDocAttachments(db, doc) {
9243 var filenames = Object.keys(doc._attachments);
9244 return Promise.all(filenames.map(function (filename) {
9245 return db.getAttachment(doc._id, filename, {rev: doc._rev});
9246 }));
9247}
9248
9249function getDocAttachmentsFromTargetOrSource(target, src, doc) {
9250 var doCheckForLocalAttachments = isRemote(src) && !isRemote(target);
9251 var filenames = Object.keys(doc._attachments);
9252
9253 if (!doCheckForLocalAttachments) {
9254 return getDocAttachments(src, doc);
9255 }
9256
9257 return target.get(doc._id).then(function (localDoc) {
9258 return Promise.all(filenames.map(function (filename) {
9259 if (fileHasChanged(localDoc, doc, filename)) {
9260 return src.getAttachment(doc._id, filename);
9261 }
9262
9263 return target.getAttachment(localDoc._id, filename);
9264 }));
9265 }).catch(function (error) {
9266 /* istanbul ignore if */
9267 if (error.status !== 404) {
9268 throw error;
9269 }
9270
9271 return getDocAttachments(src, doc);
9272 });
9273}
9274
9275function createBulkGetOpts(diffs) {
9276 var requests = [];
9277 Object.keys(diffs).forEach(function (id) {
9278 var missingRevs = diffs[id].missing;
9279 missingRevs.forEach(function (missingRev) {
9280 requests.push({
9281 id,
9282 rev: missingRev
9283 });
9284 });
9285 });
9286
9287 return {
9288 docs: requests,
9289 revs: true,
9290 latest: true
9291 };
9292}
9293
9294//
9295// Fetch all the documents from the src as described in the "diffs",
9296// which is a mapping of docs IDs to revisions. If the state ever
9297// changes to "cancelled", then the returned promise will be rejected.
9298// Else it will be resolved with a list of fetched documents.
9299//
9300function getDocs(src, target, diffs, state) {
9301 diffs = clone(diffs); // we do not need to modify this
9302
9303 var resultDocs = [],
9304 ok = true;
9305
9306 function getAllDocs() {
9307
9308 var bulkGetOpts = createBulkGetOpts(diffs);
9309
9310 if (!bulkGetOpts.docs.length) { // optimization: skip empty requests
9311 return;
9312 }
9313
9314 return src.bulkGet(bulkGetOpts).then(function (bulkGetResponse) {
9315 /* istanbul ignore if */
9316 if (state.cancelled) {
9317 throw new Error('cancelled');
9318 }
9319 return Promise.all(bulkGetResponse.results.map(function (bulkGetInfo) {
9320 return Promise.all(bulkGetInfo.docs.map(function (doc) {
9321 var remoteDoc = doc.ok;
9322
9323 if (doc.error) {
9324 // when AUTO_COMPACTION is set, docs can be returned which look
9325 // like this: {"missing":"1-7c3ac256b693c462af8442f992b83696"}
9326 ok = false;
9327 }
9328
9329 if (!remoteDoc || !remoteDoc._attachments) {
9330 return remoteDoc;
9331 }
9332
9333 return getDocAttachmentsFromTargetOrSource(target, src, remoteDoc).then((attachments) => {
9334 var filenames = Object.keys(remoteDoc._attachments);
9335 attachments.forEach(function (attachment, i) {
9336 var att = remoteDoc._attachments[filenames[i]];
9337 delete att.stub;
9338 delete att.length;
9339 att.data = attachment;
9340 });
9341
9342 return remoteDoc;
9343 });
9344 }));
9345 }))
9346
9347 .then(function (results) {
9348 resultDocs = resultDocs.concat(results.flat().filter(Boolean));
9349 });
9350 });
9351 }
9352
9353 function returnResult() {
9354 return { ok, docs:resultDocs };
9355 }
9356
9357 return Promise.resolve()
9358 .then(getAllDocs)
9359 .then(returnResult);
9360}
9361
9362var CHECKPOINT_VERSION = 1;
9363var REPLICATOR = "pouchdb";
9364// This is an arbitrary number to limit the
9365// amount of replication history we save in the checkpoint.
9366// If we save too much, the checkpoint docs will become very big,
9367// if we save fewer, we'll run a greater risk of having to
9368// read all the changes from 0 when checkpoint PUTs fail
9369// CouchDB 2.0 has a more involved history pruning,
9370// but let's go for the simple version for now.
9371var CHECKPOINT_HISTORY_SIZE = 5;
9372var LOWEST_SEQ = 0;
9373
9374function updateCheckpoint(db, id, checkpoint, session, returnValue) {
9375 return db.get(id).catch(function (err) {
9376 if (err.status === 404) {
9377 if (db.adapter === 'http' || db.adapter === 'https') {
9378 explainError(
9379 404, 'PouchDB is just checking if a remote checkpoint exists.'
9380 );
9381 }
9382 return {
9383 session_id: session,
9384 _id: id,
9385 history: [],
9386 replicator: REPLICATOR,
9387 version: CHECKPOINT_VERSION
9388 };
9389 }
9390 throw err;
9391 }).then(function (doc) {
9392 if (returnValue.cancelled) {
9393 return;
9394 }
9395
9396 // if the checkpoint has not changed, do not update
9397 if (doc.last_seq === checkpoint) {
9398 return;
9399 }
9400
9401 // Filter out current entry for this replication
9402 doc.history = (doc.history || []).filter(function (item) {
9403 return item.session_id !== session;
9404 });
9405
9406 // Add the latest checkpoint to history
9407 doc.history.unshift({
9408 last_seq: checkpoint,
9409 session_id: session
9410 });
9411
9412 // Just take the last pieces in history, to
9413 // avoid really big checkpoint docs.
9414 // see comment on history size above
9415 doc.history = doc.history.slice(0, CHECKPOINT_HISTORY_SIZE);
9416
9417 doc.version = CHECKPOINT_VERSION;
9418 doc.replicator = REPLICATOR;
9419
9420 doc.session_id = session;
9421 doc.last_seq = checkpoint;
9422
9423 return db.put(doc).catch(function (err) {
9424 if (err.status === 409) {
9425 // retry; someone is trying to write a checkpoint simultaneously
9426 return updateCheckpoint(db, id, checkpoint, session, returnValue);
9427 }
9428 throw err;
9429 });
9430 });
9431}
9432
9433class CheckpointerInternal {
9434 constructor(src, target, id, returnValue, opts = {
9435 writeSourceCheckpoint: true,
9436 writeTargetCheckpoint: true,
9437 }) {
9438 this.src = src;
9439 this.target = target;
9440 this.id = id;
9441 this.returnValue = returnValue;
9442 this.opts = opts;
9443
9444 if (typeof opts.writeSourceCheckpoint === "undefined") {
9445 opts.writeSourceCheckpoint = true;
9446 }
9447
9448 if (typeof opts.writeTargetCheckpoint === "undefined") {
9449 opts.writeTargetCheckpoint = true;
9450 }
9451 }
9452
9453 writeCheckpoint(checkpoint, session) {
9454 var self = this;
9455 return this.updateTarget(checkpoint, session).then(function () {
9456 return self.updateSource(checkpoint, session);
9457 });
9458 }
9459
9460 updateTarget(checkpoint, session) {
9461 if (this.opts.writeTargetCheckpoint) {
9462 return updateCheckpoint(this.target, this.id, checkpoint,
9463 session, this.returnValue);
9464 } else {
9465 return Promise.resolve(true);
9466 }
9467 }
9468
9469 updateSource(checkpoint, session) {
9470 if (this.opts.writeSourceCheckpoint) {
9471 var self = this;
9472 return updateCheckpoint(this.src, this.id, checkpoint,
9473 session, this.returnValue)
9474 .catch(function (err) {
9475 if (isForbiddenError(err)) {
9476 self.opts.writeSourceCheckpoint = false;
9477 return true;
9478 }
9479 throw err;
9480 });
9481 } else {
9482 return Promise.resolve(true);
9483 }
9484 }
9485
9486 getCheckpoint() {
9487 var self = this;
9488
9489 if (!self.opts.writeSourceCheckpoint && !self.opts.writeTargetCheckpoint) {
9490 return Promise.resolve(LOWEST_SEQ);
9491 }
9492
9493 if (self.opts && self.opts.writeSourceCheckpoint && !self.opts.writeTargetCheckpoint) {
9494 return self.src.get(self.id).then(function (sourceDoc) {
9495 return sourceDoc.last_seq || LOWEST_SEQ;
9496 }).catch(function (err) {
9497 /* istanbul ignore if */
9498 if (err.status !== 404) {
9499 throw err;
9500 }
9501 return LOWEST_SEQ;
9502 });
9503 }
9504
9505 return self.target.get(self.id).then(function (targetDoc) {
9506 if (self.opts && self.opts.writeTargetCheckpoint && !self.opts.writeSourceCheckpoint) {
9507 return targetDoc.last_seq || LOWEST_SEQ;
9508 }
9509
9510 return self.src.get(self.id).then(function (sourceDoc) {
9511 // Since we can't migrate an old version doc to a new one
9512 // (no session id), we just go with the lowest seq in this case
9513 /* istanbul ignore if */
9514 if (targetDoc.version !== sourceDoc.version) {
9515 return LOWEST_SEQ;
9516 }
9517
9518 var version;
9519 if (targetDoc.version) {
9520 version = targetDoc.version.toString();
9521 } else {
9522 version = "undefined";
9523 }
9524
9525 if (version in comparisons) {
9526 return comparisons[version](targetDoc, sourceDoc);
9527 }
9528 /* istanbul ignore next */
9529 return LOWEST_SEQ;
9530 }, function (err) {
9531 if (err.status === 404 && targetDoc.last_seq) {
9532 return self.src.put({
9533 _id: self.id,
9534 last_seq: LOWEST_SEQ
9535 }).then(function () {
9536 return LOWEST_SEQ;
9537 }, function (err) {
9538 if (isForbiddenError(err)) {
9539 self.opts.writeSourceCheckpoint = false;
9540 return targetDoc.last_seq;
9541 }
9542 /* istanbul ignore next */
9543 return LOWEST_SEQ;
9544 });
9545 }
9546 throw err;
9547 });
9548 }).catch(function (err) {
9549 if (err.status !== 404) {
9550 throw err;
9551 }
9552 return LOWEST_SEQ;
9553 });
9554 }
9555}
9556
9557var comparisons = {
9558 "undefined": function (targetDoc, sourceDoc) {
9559 // This is the previous comparison function
9560 if (collate(targetDoc.last_seq, sourceDoc.last_seq) === 0) {
9561 return sourceDoc.last_seq;
9562 }
9563 /* istanbul ignore next */
9564 return 0;
9565 },
9566 "1": function (targetDoc, sourceDoc) {
9567 // This is the comparison function ported from CouchDB
9568 return compareReplicationLogs(sourceDoc, targetDoc).last_seq;
9569 }
9570};
9571
9572// This checkpoint comparison is ported from CouchDBs source
9573// they come from here:
9574// https://github.com/apache/couchdb-couch-replicator/blob/master/src/couch_replicator.erl#L863-L906
9575
9576function compareReplicationLogs(srcDoc, tgtDoc) {
9577 if (srcDoc.session_id === tgtDoc.session_id) {
9578 return {
9579 last_seq: srcDoc.last_seq,
9580 history: srcDoc.history
9581 };
9582 }
9583
9584 return compareReplicationHistory(srcDoc.history, tgtDoc.history);
9585}
9586
9587function compareReplicationHistory(sourceHistory, targetHistory) {
9588 // the erlang loop via function arguments is not so easy to repeat in JS
9589 // therefore, doing this as recursion
9590 var S = sourceHistory[0];
9591 var sourceRest = sourceHistory.slice(1);
9592 var T = targetHistory[0];
9593 var targetRest = targetHistory.slice(1);
9594
9595 if (!S || targetHistory.length === 0) {
9596 return {
9597 last_seq: LOWEST_SEQ,
9598 history: []
9599 };
9600 }
9601
9602 var sourceId = S.session_id;
9603 /* istanbul ignore if */
9604 if (hasSessionId(sourceId, targetHistory)) {
9605 return {
9606 last_seq: S.last_seq,
9607 history: sourceHistory
9608 };
9609 }
9610
9611 var targetId = T.session_id;
9612 if (hasSessionId(targetId, sourceRest)) {
9613 return {
9614 last_seq: T.last_seq,
9615 history: targetRest
9616 };
9617 }
9618
9619 return compareReplicationHistory(sourceRest, targetRest);
9620}
9621
9622function hasSessionId(sessionId, history) {
9623 var props = history[0];
9624 var rest = history.slice(1);
9625
9626 if (!sessionId || history.length === 0) {
9627 return false;
9628 }
9629
9630 if (sessionId === props.session_id) {
9631 return true;
9632 }
9633
9634 return hasSessionId(sessionId, rest);
9635}
9636
9637function isForbiddenError(err) {
9638 return typeof err.status === 'number' && Math.floor(err.status / 100) === 4;
9639}
9640
9641function Checkpointer(src, target, id, returnValue, opts) {
9642 if (!(this instanceof CheckpointerInternal)) {
9643 return new CheckpointerInternal(src, target, id, returnValue, opts);
9644 }
9645 return Checkpointer;
9646}
9647
9648var STARTING_BACK_OFF = 0;
9649
9650function backOff(opts, returnValue, error, callback) {
9651 if (opts.retry === false) {
9652 returnValue.emit('error', error);
9653 returnValue.removeAllListeners();
9654 return;
9655 }
9656 /* istanbul ignore if */
9657 if (typeof opts.back_off_function !== 'function') {
9658 opts.back_off_function = defaultBackOff;
9659 }
9660 returnValue.emit('requestError', error);
9661 if (returnValue.state === 'active' || returnValue.state === 'pending') {
9662 returnValue.emit('paused', error);
9663 returnValue.state = 'stopped';
9664 var backOffSet = function backoffTimeSet() {
9665 opts.current_back_off = STARTING_BACK_OFF;
9666 };
9667 var removeBackOffSetter = function removeBackOffTimeSet() {
9668 returnValue.removeListener('active', backOffSet);
9669 };
9670 returnValue.once('paused', removeBackOffSetter);
9671 returnValue.once('active', backOffSet);
9672 }
9673
9674 opts.current_back_off = opts.current_back_off || STARTING_BACK_OFF;
9675 opts.current_back_off = opts.back_off_function(opts.current_back_off);
9676 setTimeout(callback, opts.current_back_off);
9677}
9678
9679function sortObjectPropertiesByKey(queryParams) {
9680 return Object.keys(queryParams).sort(collate).reduce(function (result, key) {
9681 result[key] = queryParams[key];
9682 return result;
9683 }, {});
9684}
9685
9686// Generate a unique id particular to this replication.
9687// Not guaranteed to align perfectly with CouchDB's rep ids.
9688function generateReplicationId(src, target, opts) {
9689 var docIds = opts.doc_ids ? opts.doc_ids.sort(collate) : '';
9690 var filterFun = opts.filter ? opts.filter.toString() : '';
9691 var queryParams = '';
9692 var filterViewName = '';
9693 var selector = '';
9694
9695 // possibility for checkpoints to be lost here as behaviour of
9696 // JSON.stringify is not stable (see #6226)
9697 /* istanbul ignore if */
9698 if (opts.selector) {
9699 selector = JSON.stringify(opts.selector);
9700 }
9701
9702 if (opts.filter && opts.query_params) {
9703 queryParams = JSON.stringify(sortObjectPropertiesByKey(opts.query_params));
9704 }
9705
9706 if (opts.filter && opts.filter === '_view') {
9707 filterViewName = opts.view.toString();
9708 }
9709
9710 return Promise.all([src.id(), target.id()]).then(function (res) {
9711 var queryData = res[0] + res[1] + filterFun + filterViewName +
9712 queryParams + docIds + selector;
9713 return new Promise(function (resolve) {
9714 binaryMd5(queryData, resolve);
9715 });
9716 }).then(function (md5sum) {
9717 // can't use straight-up md5 alphabet, because
9718 // the char '/' is interpreted as being for attachments,
9719 // and + is also not url-safe
9720 md5sum = md5sum.replace(/\//g, '.').replace(/\+/g, '_');
9721 return '_local/' + md5sum;
9722 });
9723}
9724
9725function replicate(src, target, opts, returnValue, result) {
9726 var batches = []; // list of batches to be processed
9727 var currentBatch; // the batch currently being processed
9728 var pendingBatch = {
9729 seq: 0,
9730 changes: [],
9731 docs: []
9732 }; // next batch, not yet ready to be processed
9733 var writingCheckpoint = false; // true while checkpoint is being written
9734 var changesCompleted = false; // true when all changes received
9735 var replicationCompleted = false; // true when replication has completed
9736 // initial_last_seq is the state of the source db before
9737 // replication started, and it is _not_ updated during
9738 // replication or used anywhere else, as opposed to last_seq
9739 var initial_last_seq = 0;
9740 var last_seq = 0;
9741 var continuous = opts.continuous || opts.live || false;
9742 var batch_size = opts.batch_size || 100;
9743 var batches_limit = opts.batches_limit || 10;
9744 var style = opts.style || 'all_docs';
9745 var changesPending = false; // true while src.changes is running
9746 var doc_ids = opts.doc_ids;
9747 var selector = opts.selector;
9748 var repId;
9749 var checkpointer;
9750 var changedDocs = [];
9751 // Like couchdb, every replication gets a unique session id
9752 var session = uuid();
9753 var taskId;
9754
9755 result = result || {
9756 ok: true,
9757 start_time: new Date().toISOString(),
9758 docs_read: 0,
9759 docs_written: 0,
9760 doc_write_failures: 0,
9761 errors: []
9762 };
9763
9764 var changesOpts = {};
9765 returnValue.ready(src, target);
9766
9767 function initCheckpointer() {
9768 if (checkpointer) {
9769 return Promise.resolve();
9770 }
9771 return generateReplicationId(src, target, opts).then(function (res) {
9772 repId = res;
9773
9774 var checkpointOpts = {};
9775 if (opts.checkpoint === false) {
9776 checkpointOpts = { writeSourceCheckpoint: false, writeTargetCheckpoint: false };
9777 } else if (opts.checkpoint === 'source') {
9778 checkpointOpts = { writeSourceCheckpoint: true, writeTargetCheckpoint: false };
9779 } else if (opts.checkpoint === 'target') {
9780 checkpointOpts = { writeSourceCheckpoint: false, writeTargetCheckpoint: true };
9781 } else {
9782 checkpointOpts = { writeSourceCheckpoint: true, writeTargetCheckpoint: true };
9783 }
9784
9785 checkpointer = new Checkpointer(src, target, repId, returnValue, checkpointOpts);
9786 });
9787 }
9788
9789 function writeDocs() {
9790 changedDocs = [];
9791
9792 if (currentBatch.docs.length === 0) {
9793 return;
9794 }
9795 var docs = currentBatch.docs;
9796 var bulkOpts = {timeout: opts.timeout};
9797 return target.bulkDocs({docs, new_edits: false}, bulkOpts).then(function (res) {
9798 /* istanbul ignore if */
9799 if (returnValue.cancelled) {
9800 completeReplication();
9801 throw new Error('cancelled');
9802 }
9803
9804 // `res` doesn't include full documents (which live in `docs`), so we create a map of
9805 // (id -> error), and check for errors while iterating over `docs`
9806 var errorsById = Object.create(null);
9807 res.forEach(function (res) {
9808 if (res.error) {
9809 errorsById[res.id] = res;
9810 }
9811 });
9812
9813 var errorsNo = Object.keys(errorsById).length;
9814 result.doc_write_failures += errorsNo;
9815 result.docs_written += docs.length - errorsNo;
9816
9817 docs.forEach(function (doc) {
9818 var error = errorsById[doc._id];
9819 if (error) {
9820 result.errors.push(error);
9821 // Normalize error name. i.e. 'Unauthorized' -> 'unauthorized' (eg Sync Gateway)
9822 var errorName = (error.name || '').toLowerCase();
9823 if (errorName === 'unauthorized' || errorName === 'forbidden') {
9824 returnValue.emit('denied', clone(error));
9825 } else {
9826 throw error;
9827 }
9828 } else {
9829 changedDocs.push(doc);
9830 }
9831 });
9832
9833 }, function (err) {
9834 result.doc_write_failures += docs.length;
9835 throw err;
9836 });
9837 }
9838
9839 function finishBatch() {
9840 if (currentBatch.error) {
9841 throw new Error('There was a problem getting docs.');
9842 }
9843 result.last_seq = last_seq = currentBatch.seq;
9844 var outResult = clone(result);
9845 if (changedDocs.length) {
9846 outResult.docs = changedDocs;
9847 // Attach 'pending' property if server supports it (CouchDB 2.0+)
9848 /* istanbul ignore if */
9849 if (typeof currentBatch.pending === 'number') {
9850 outResult.pending = currentBatch.pending;
9851 delete currentBatch.pending;
9852 }
9853 returnValue.emit('change', outResult);
9854 }
9855 writingCheckpoint = true;
9856
9857 src.info().then(function (info) {
9858 var task = src.activeTasks.get(taskId);
9859 if (!currentBatch || !task) {
9860 return;
9861 }
9862
9863 var completed = task.completed_items || 0;
9864 var total_items = parseInt(info.update_seq, 10) - parseInt(initial_last_seq, 10);
9865 src.activeTasks.update(taskId, {
9866 completed_items: completed + currentBatch.changes.length,
9867 total_items
9868 });
9869 });
9870
9871 return checkpointer.writeCheckpoint(currentBatch.seq,
9872 session).then(function () {
9873 returnValue.emit('checkpoint', { 'checkpoint': currentBatch.seq });
9874 writingCheckpoint = false;
9875 /* istanbul ignore if */
9876 if (returnValue.cancelled) {
9877 completeReplication();
9878 throw new Error('cancelled');
9879 }
9880 currentBatch = undefined;
9881 getChanges();
9882 }).catch(function (err) {
9883 onCheckpointError(err);
9884 throw err;
9885 });
9886 }
9887
9888 function getDiffs() {
9889 var diff = {};
9890 currentBatch.changes.forEach(function (change) {
9891 returnValue.emit('checkpoint', { 'revs_diff': change });
9892 // Couchbase Sync Gateway emits these, but we can ignore them
9893 /* istanbul ignore if */
9894 if (change.id === "_user/") {
9895 return;
9896 }
9897 diff[change.id] = change.changes.map(function (x) {
9898 return x.rev;
9899 });
9900 });
9901 return target.revsDiff(diff).then(function (diffs) {
9902 /* istanbul ignore if */
9903 if (returnValue.cancelled) {
9904 completeReplication();
9905 throw new Error('cancelled');
9906 }
9907 // currentBatch.diffs elements are deleted as the documents are written
9908 currentBatch.diffs = diffs;
9909 });
9910 }
9911
9912 function getBatchDocs() {
9913 return getDocs(src, target, currentBatch.diffs, returnValue).then(function (got) {
9914 currentBatch.error = !got.ok;
9915 got.docs.forEach(function (doc) {
9916 delete currentBatch.diffs[doc._id];
9917 result.docs_read++;
9918 currentBatch.docs.push(doc);
9919 });
9920 });
9921 }
9922
9923 function startNextBatch() {
9924 if (returnValue.cancelled || currentBatch) {
9925 return;
9926 }
9927 if (batches.length === 0) {
9928 processPendingBatch(true);
9929 return;
9930 }
9931 currentBatch = batches.shift();
9932 returnValue.emit('checkpoint', { 'start_next_batch': currentBatch.seq });
9933 getDiffs()
9934 .then(getBatchDocs)
9935 .then(writeDocs)
9936 .then(finishBatch)
9937 .then(startNextBatch)
9938 .catch(function (err) {
9939 abortReplication('batch processing terminated with error', err);
9940 });
9941 }
9942
9943
9944 function processPendingBatch(immediate) {
9945 if (pendingBatch.changes.length === 0) {
9946 if (batches.length === 0 && !currentBatch) {
9947 if ((continuous && changesOpts.live) || changesCompleted) {
9948 returnValue.state = 'pending';
9949 returnValue.emit('paused');
9950 }
9951 if (changesCompleted) {
9952 completeReplication();
9953 }
9954 }
9955 return;
9956 }
9957 if (
9958 immediate ||
9959 changesCompleted ||
9960 pendingBatch.changes.length >= batch_size
9961 ) {
9962 batches.push(pendingBatch);
9963 pendingBatch = {
9964 seq: 0,
9965 changes: [],
9966 docs: []
9967 };
9968 if (returnValue.state === 'pending' || returnValue.state === 'stopped') {
9969 returnValue.state = 'active';
9970 returnValue.emit('active');
9971 }
9972 startNextBatch();
9973 }
9974 }
9975
9976
9977 function abortReplication(reason, err) {
9978 if (replicationCompleted) {
9979 return;
9980 }
9981 if (!err.message) {
9982 err.message = reason;
9983 }
9984 result.ok = false;
9985 result.status = 'aborting';
9986 batches = [];
9987 pendingBatch = {
9988 seq: 0,
9989 changes: [],
9990 docs: []
9991 };
9992 completeReplication(err);
9993 }
9994
9995
9996 function completeReplication(fatalError) {
9997 if (replicationCompleted) {
9998 return;
9999 }
10000 /* istanbul ignore if */
10001 if (returnValue.cancelled) {
10002 result.status = 'cancelled';
10003 if (writingCheckpoint) {
10004 return;
10005 }
10006 }
10007 result.status = result.status || 'complete';
10008 result.end_time = new Date().toISOString();
10009 result.last_seq = last_seq;
10010 replicationCompleted = true;
10011
10012 src.activeTasks.remove(taskId, fatalError);
10013
10014 if (fatalError) {
10015 // need to extend the error because Firefox considers ".result" read-only
10016 fatalError = createError(fatalError);
10017 fatalError.result = result;
10018
10019 // Normalize error name. i.e. 'Unauthorized' -> 'unauthorized' (eg Sync Gateway)
10020 var errorName = (fatalError.name || '').toLowerCase();
10021 if (errorName === 'unauthorized' || errorName === 'forbidden') {
10022 returnValue.emit('error', fatalError);
10023 returnValue.removeAllListeners();
10024 } else {
10025 backOff(opts, returnValue, fatalError, function () {
10026 replicate(src, target, opts, returnValue);
10027 });
10028 }
10029 } else {
10030 returnValue.emit('complete', result);
10031 returnValue.removeAllListeners();
10032 }
10033 }
10034
10035 function onChange(change, pending, lastSeq) {
10036 /* istanbul ignore if */
10037 if (returnValue.cancelled) {
10038 return completeReplication();
10039 }
10040 // Attach 'pending' property if server supports it (CouchDB 2.0+)
10041 /* istanbul ignore if */
10042 if (typeof pending === 'number') {
10043 pendingBatch.pending = pending;
10044 }
10045
10046 var filter = filterChange(opts)(change);
10047 if (!filter) {
10048 // update processed items count by 1
10049 var task = src.activeTasks.get(taskId);
10050 if (task) {
10051 // we can assume that task exists here? shouldn't be deleted by here.
10052 var completed = task.completed_items || 0;
10053 src.activeTasks.update(taskId, {completed_items: ++completed});
10054 }
10055 return;
10056 }
10057 pendingBatch.seq = change.seq || lastSeq;
10058 pendingBatch.changes.push(change);
10059 returnValue.emit('checkpoint', { 'pending_batch': pendingBatch.seq });
10060 nextTick(function () {
10061 processPendingBatch(batches.length === 0 && changesOpts.live);
10062 });
10063 }
10064
10065
10066 function onChangesComplete(changes) {
10067 changesPending = false;
10068 /* istanbul ignore if */
10069 if (returnValue.cancelled) {
10070 return completeReplication();
10071 }
10072
10073 // if no results were returned then we're done,
10074 // else fetch more
10075 if (changes.results.length > 0) {
10076 changesOpts.since = changes.results[changes.results.length - 1].seq;
10077 getChanges();
10078 processPendingBatch(true);
10079 } else {
10080
10081 var complete = function () {
10082 if (continuous) {
10083 changesOpts.live = true;
10084 getChanges();
10085 } else {
10086 changesCompleted = true;
10087 }
10088 processPendingBatch(true);
10089 };
10090
10091 // update the checkpoint so we start from the right seq next time
10092 if (!currentBatch && changes.results.length === 0) {
10093 writingCheckpoint = true;
10094 checkpointer.writeCheckpoint(changes.last_seq,
10095 session).then(function () {
10096 writingCheckpoint = false;
10097 result.last_seq = last_seq = changes.last_seq;
10098 if (returnValue.cancelled) {
10099 completeReplication();
10100 throw new Error('cancelled');
10101 } else {
10102 complete();
10103 }
10104 })
10105 .catch(onCheckpointError);
10106 } else {
10107 complete();
10108 }
10109 }
10110 }
10111
10112
10113 function onChangesError(err) {
10114 changesPending = false;
10115 /* istanbul ignore if */
10116 if (returnValue.cancelled) {
10117 return completeReplication();
10118 }
10119 abortReplication('changes rejected', err);
10120 }
10121
10122
10123 function getChanges() {
10124 if (!(
10125 !changesPending &&
10126 !changesCompleted &&
10127 batches.length < batches_limit
10128 )) {
10129 return;
10130 }
10131 changesPending = true;
10132 function abortChanges() {
10133 changes.cancel();
10134 }
10135 function removeListener() {
10136 returnValue.removeListener('cancel', abortChanges);
10137 }
10138
10139 if (returnValue._changes) { // remove old changes() and listeners
10140 returnValue.removeListener('cancel', returnValue._abortChanges);
10141 returnValue._changes.cancel();
10142 }
10143 returnValue.once('cancel', abortChanges);
10144
10145 var changes = src.changes(changesOpts)
10146 .on('change', onChange);
10147 changes.then(removeListener, removeListener);
10148 changes.then(onChangesComplete)
10149 .catch(onChangesError);
10150
10151 if (opts.retry) {
10152 // save for later so we can cancel if necessary
10153 returnValue._changes = changes;
10154 returnValue._abortChanges = abortChanges;
10155 }
10156 }
10157
10158 function createTask(checkpoint) {
10159 return src.info().then(function (info) {
10160 var total_items = typeof opts.since === 'undefined' ?
10161 parseInt(info.update_seq, 10) - parseInt(checkpoint, 10) :
10162 parseInt(info.update_seq, 10);
10163
10164 taskId = src.activeTasks.add({
10165 name: `${continuous ? 'continuous ' : ''}replication from ${info.db_name}` ,
10166 total_items,
10167 });
10168
10169 return checkpoint;
10170 });
10171 }
10172
10173 function startChanges() {
10174 initCheckpointer().then(function () {
10175 /* istanbul ignore if */
10176 if (returnValue.cancelled) {
10177 completeReplication();
10178 return;
10179 }
10180 return checkpointer.getCheckpoint().then(createTask).then(function (checkpoint) {
10181 last_seq = checkpoint;
10182 initial_last_seq = checkpoint;
10183 changesOpts = {
10184 since: last_seq,
10185 limit: batch_size,
10186 batch_size,
10187 style,
10188 doc_ids,
10189 selector,
10190 return_docs: true // required so we know when we're done
10191 };
10192 if (opts.filter) {
10193 if (typeof opts.filter !== 'string') {
10194 // required for the client-side filter in onChange
10195 changesOpts.include_docs = true;
10196 } else { // ddoc filter
10197 changesOpts.filter = opts.filter;
10198 }
10199 }
10200 if ('heartbeat' in opts) {
10201 changesOpts.heartbeat = opts.heartbeat;
10202 }
10203 if ('timeout' in opts) {
10204 changesOpts.timeout = opts.timeout;
10205 }
10206 if (opts.query_params) {
10207 changesOpts.query_params = opts.query_params;
10208 }
10209 if (opts.view) {
10210 changesOpts.view = opts.view;
10211 }
10212 getChanges();
10213 });
10214 }).catch(function (err) {
10215 abortReplication('getCheckpoint rejected with ', err);
10216 });
10217 }
10218
10219 /* istanbul ignore next */
10220 function onCheckpointError(err) {
10221 writingCheckpoint = false;
10222 abortReplication('writeCheckpoint completed with error', err);
10223 }
10224
10225 /* istanbul ignore if */
10226 if (returnValue.cancelled) { // cancelled immediately
10227 completeReplication();
10228 return;
10229 }
10230
10231 if (!returnValue._addedListeners) {
10232 returnValue.once('cancel', completeReplication);
10233
10234 if (typeof opts.complete === 'function') {
10235 returnValue.once('error', opts.complete);
10236 returnValue.once('complete', function (result) {
10237 opts.complete(null, result);
10238 });
10239 }
10240 returnValue._addedListeners = true;
10241 }
10242
10243 if (typeof opts.since === 'undefined') {
10244 startChanges();
10245 } else {
10246 initCheckpointer().then(function () {
10247 writingCheckpoint = true;
10248 return checkpointer.writeCheckpoint(opts.since, session);
10249 }).then(function () {
10250 writingCheckpoint = false;
10251 /* istanbul ignore if */
10252 if (returnValue.cancelled) {
10253 completeReplication();
10254 return;
10255 }
10256 last_seq = opts.since;
10257 startChanges();
10258 }).catch(onCheckpointError);
10259 }
10260}
10261
10262// We create a basic promise so the caller can cancel the replication possibly
10263// before we have actually started listening to changes etc
10264class Replication extends EE {
10265 constructor() {
10266 super();
10267 this.cancelled = false;
10268 this.state = 'pending';
10269 const promise = new Promise((fulfill, reject) => {
10270 this.once('complete', fulfill);
10271 this.once('error', reject);
10272 });
10273 this.then = function (resolve, reject) {
10274 return promise.then(resolve, reject);
10275 };
10276 this.catch = function (reject) {
10277 return promise.catch(reject);
10278 };
10279 // As we allow error handling via "error" event as well,
10280 // put a stub in here so that rejecting never throws UnhandledError.
10281 this.catch(function () {});
10282 }
10283
10284 cancel() {
10285 this.cancelled = true;
10286 this.state = 'cancelled';
10287 this.emit('cancel');
10288 }
10289
10290 ready(src, target) {
10291 if (this._readyCalled) {
10292 return;
10293 }
10294 this._readyCalled = true;
10295
10296 const onDestroy = () => {
10297 this.cancel();
10298 };
10299 src.once('destroyed', onDestroy);
10300 target.once('destroyed', onDestroy);
10301 function cleanup() {
10302 src.removeListener('destroyed', onDestroy);
10303 target.removeListener('destroyed', onDestroy);
10304 }
10305 this.once('complete', cleanup);
10306 this.once('error', cleanup);
10307 }
10308}
10309
10310function toPouch(db, opts) {
10311 var PouchConstructor = opts.PouchConstructor;
10312 if (typeof db === 'string') {
10313 return new PouchConstructor(db, opts);
10314 } else {
10315 return db;
10316 }
10317}
10318
10319function replicateWrapper(src, target, opts, callback) {
10320
10321 if (typeof opts === 'function') {
10322 callback = opts;
10323 opts = {};
10324 }
10325 if (typeof opts === 'undefined') {
10326 opts = {};
10327 }
10328
10329 if (opts.doc_ids && !Array.isArray(opts.doc_ids)) {
10330 throw createError(BAD_REQUEST,
10331 "`doc_ids` filter parameter is not a list.");
10332 }
10333
10334 opts.complete = callback;
10335 opts = clone(opts);
10336 opts.continuous = opts.continuous || opts.live;
10337 opts.retry = ('retry' in opts) ? opts.retry : false;
10338 opts.PouchConstructor = opts.PouchConstructor || this;
10339 var replicateRet = new Replication(opts);
10340 var srcPouch = toPouch(src, opts);
10341 var targetPouch = toPouch(target, opts);
10342 replicate(srcPouch, targetPouch, opts, replicateRet);
10343 return replicateRet;
10344}
10345
10346function sync(src, target, opts, callback) {
10347 if (typeof opts === 'function') {
10348 callback = opts;
10349 opts = {};
10350 }
10351 if (typeof opts === 'undefined') {
10352 opts = {};
10353 }
10354 opts = clone(opts);
10355 opts.PouchConstructor = opts.PouchConstructor || this;
10356 src = toPouch(src, opts);
10357 target = toPouch(target, opts);
10358 return new Sync(src, target, opts, callback);
10359}
10360
10361class Sync extends EE {
10362 constructor(src, target, opts, callback) {
10363 super();
10364 this.canceled = false;
10365
10366 const optsPush = opts.push ? Object.assign({}, opts, opts.push) : opts;
10367 const optsPull = opts.pull ? Object.assign({}, opts, opts.pull) : opts;
10368
10369 this.push = replicateWrapper(src, target, optsPush);
10370 this.pull = replicateWrapper(target, src, optsPull);
10371
10372 this.pushPaused = true;
10373 this.pullPaused = true;
10374
10375 const pullChange = (change) => {
10376 this.emit('change', {
10377 direction: 'pull',
10378 change
10379 });
10380 };
10381 const pushChange = (change) => {
10382 this.emit('change', {
10383 direction: 'push',
10384 change
10385 });
10386 };
10387 const pushDenied = (doc) => {
10388 this.emit('denied', {
10389 direction: 'push',
10390 doc
10391 });
10392 };
10393 const pullDenied = (doc) => {
10394 this.emit('denied', {
10395 direction: 'pull',
10396 doc
10397 });
10398 };
10399 const pushPaused = () => {
10400 this.pushPaused = true;
10401 /* istanbul ignore if */
10402 if (this.pullPaused) {
10403 this.emit('paused');
10404 }
10405 };
10406 const pullPaused = () => {
10407 this.pullPaused = true;
10408 /* istanbul ignore if */
10409 if (this.pushPaused) {
10410 this.emit('paused');
10411 }
10412 };
10413 const pushActive = () => {
10414 this.pushPaused = false;
10415 /* istanbul ignore if */
10416 if (this.pullPaused) {
10417 this.emit('active', {
10418 direction: 'push'
10419 });
10420 }
10421 };
10422 const pullActive = () => {
10423 this.pullPaused = false;
10424 /* istanbul ignore if */
10425 if (this.pushPaused) {
10426 this.emit('active', {
10427 direction: 'pull'
10428 });
10429 }
10430 };
10431
10432 let removed = {};
10433
10434 const removeAll = (type) => { // type is 'push' or 'pull'
10435 return (event, func) => {
10436 const isChange = event === 'change' &&
10437 (func === pullChange || func === pushChange);
10438 const isDenied = event === 'denied' &&
10439 (func === pullDenied || func === pushDenied);
10440 const isPaused = event === 'paused' &&
10441 (func === pullPaused || func === pushPaused);
10442 const isActive = event === 'active' &&
10443 (func === pullActive || func === pushActive);
10444
10445 if (isChange || isDenied || isPaused || isActive) {
10446 if (!(event in removed)) {
10447 removed[event] = {};
10448 }
10449 removed[event][type] = true;
10450 if (Object.keys(removed[event]).length === 2) {
10451 // both push and pull have asked to be removed
10452 this.removeAllListeners(event);
10453 }
10454 }
10455 };
10456 };
10457
10458 if (opts.live) {
10459 this.push.on('complete', this.pull.cancel.bind(this.pull));
10460 this.pull.on('complete', this.push.cancel.bind(this.push));
10461 }
10462
10463 function addOneListener(ee, event, listener) {
10464 if (ee.listeners(event).indexOf(listener) == -1) {
10465 ee.on(event, listener);
10466 }
10467 }
10468
10469 this.on('newListener', function (event) {
10470 if (event === 'change') {
10471 addOneListener(this.pull, 'change', pullChange);
10472 addOneListener(this.push, 'change', pushChange);
10473 } else if (event === 'denied') {
10474 addOneListener(this.pull, 'denied', pullDenied);
10475 addOneListener(this.push, 'denied', pushDenied);
10476 } else if (event === 'active') {
10477 addOneListener(this.pull, 'active', pullActive);
10478 addOneListener(this.push, 'active', pushActive);
10479 } else if (event === 'paused') {
10480 addOneListener(this.pull, 'paused', pullPaused);
10481 addOneListener(this.push, 'paused', pushPaused);
10482 }
10483 });
10484
10485 this.on('removeListener', function (event) {
10486 if (event === 'change') {
10487 this.pull.removeListener('change', pullChange);
10488 this.push.removeListener('change', pushChange);
10489 } else if (event === 'denied') {
10490 this.pull.removeListener('denied', pullDenied);
10491 this.push.removeListener('denied', pushDenied);
10492 } else if (event === 'active') {
10493 this.pull.removeListener('active', pullActive);
10494 this.push.removeListener('active', pushActive);
10495 } else if (event === 'paused') {
10496 this.pull.removeListener('paused', pullPaused);
10497 this.push.removeListener('paused', pushPaused);
10498 }
10499 });
10500
10501 this.pull.on('removeListener', removeAll('pull'));
10502 this.push.on('removeListener', removeAll('push'));
10503
10504 const promise = Promise.all([
10505 this.push,
10506 this.pull
10507 ]).then((resp) => {
10508 const out = {
10509 push: resp[0],
10510 pull: resp[1]
10511 };
10512 this.emit('complete', out);
10513 if (callback) {
10514 callback(null, out);
10515 }
10516 this.removeAllListeners();
10517 return out;
10518 }, (err) => {
10519 this.cancel();
10520 if (callback) {
10521 // if there's a callback, then the callback can receive
10522 // the error event
10523 callback(err);
10524 } else {
10525 // if there's no callback, then we're safe to emit an error
10526 // event, which would otherwise throw an unhandled error
10527 // due to 'error' being a special event in EventEmitters
10528 this.emit('error', err);
10529 }
10530 this.removeAllListeners();
10531 if (callback) {
10532 // no sense throwing if we're already emitting an 'error' event
10533 throw err;
10534 }
10535 });
10536
10537 this.then = function (success, err) {
10538 return promise.then(success, err);
10539 };
10540
10541 this.catch = function (err) {
10542 return promise.catch(err);
10543 };
10544 }
10545
10546 cancel() {
10547 if (!this.canceled) {
10548 this.canceled = true;
10549 this.push.cancel();
10550 this.pull.cancel();
10551 }
10552 }
10553}
10554
10555function replication(PouchDB) {
10556 PouchDB.replicate = replicateWrapper;
10557 PouchDB.sync = sync;
10558
10559 Object.defineProperty(PouchDB.prototype, 'replicate', {
10560 get: function () {
10561 var self = this;
10562 if (typeof this.replicateMethods === 'undefined') {
10563 this.replicateMethods = {
10564 from: function (other, opts, callback) {
10565 return self.constructor.replicate(other, self, opts, callback);
10566 },
10567 to: function (other, opts, callback) {
10568 return self.constructor.replicate(self, other, opts, callback);
10569 }
10570 };
10571 }
10572 return this.replicateMethods;
10573 }
10574 });
10575
10576 PouchDB.prototype.sync = function (dbName, opts, callback) {
10577 return this.constructor.sync(this, dbName, opts, callback);
10578 };
10579}
10580
10581PouchDB.plugin(IDBPouch)
10582 .plugin(HttpPouch$1)
10583 .plugin(mapreduce)
10584 .plugin(replication);
10585
10586// Pull from src because pouchdb-node/pouchdb-browser themselves
10587
10588export default PouchDB;