UNPKG

300 kBJavaScriptView Raw
1'use strict';
2
3function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
4
5var crypto = _interopDefault(require('crypto'));
6var nodeFetch = require('node-fetch');
7var nodeFetch__default = _interopDefault(nodeFetch);
8var fetchCookie = _interopDefault(require('fetch-cookie'));
9var uuid = require('uuid');
10var levelup = _interopDefault(require('levelup'));
11var ltgt = _interopDefault(require('ltgt'));
12var Codec = _interopDefault(require('level-codec'));
13var ReadableStreamCore = _interopDefault(require('readable-stream'));
14var Deque = _interopDefault(require('double-ended-queue'));
15var vuvuzela = _interopDefault(require('vuvuzela'));
16var fs = _interopDefault(require('fs'));
17var path = _interopDefault(require('path'));
18var level = _interopDefault(require('level'));
19var through2 = require('through2');
20var LevelWriteStream = _interopDefault(require('level-write-stream'));
21var vm = _interopDefault(require('vm'));
22var EE = _interopDefault(require('events'));
23
24function isBinaryObject(object) {
25 return object instanceof Buffer;
26}
27
28var cloneBinaryObject = (buffer) => Buffer.from(buffer);
29
30// most of this is borrowed from lodash.isPlainObject:
31// https://github.com/fis-components/lodash.isplainobject/
32// blob/29c358140a74f252aeb08c9eb28bef86f2217d4a/index.js
33
34var funcToString = Function.prototype.toString;
35var objectCtorString = funcToString.call(Object);
36
37function isPlainObject(value) {
38 var proto = Object.getPrototypeOf(value);
39 /* istanbul ignore if */
40 if (proto === null) { // not sure when this happens, but I guess it can
41 return true;
42 }
43 var Ctor = proto.constructor;
44 return (typeof Ctor == 'function' &&
45 Ctor instanceof Ctor && funcToString.call(Ctor) == objectCtorString);
46}
47
48function clone(object) {
49 var newObject;
50 var i;
51 var len;
52
53 if (!object || typeof object !== 'object') {
54 return object;
55 }
56
57 if (Array.isArray(object)) {
58 newObject = [];
59 for (i = 0, len = object.length; i < len; i++) {
60 newObject[i] = clone(object[i]);
61 }
62 return newObject;
63 }
64
65 // special case: to avoid inconsistencies between IndexedDB
66 // and other backends, we automatically stringify Dates
67 if (object instanceof Date && isFinite(object)) {
68 return object.toISOString();
69 }
70
71 if (isBinaryObject(object)) {
72 return cloneBinaryObject(object);
73 }
74
75 if (!isPlainObject(object)) {
76 return object; // don't clone objects like Workers
77 }
78
79 newObject = {};
80 for (i in object) {
81 /* istanbul ignore else */
82 if (Object.prototype.hasOwnProperty.call(object, i)) {
83 var value = clone(object[i]);
84 if (typeof value !== 'undefined') {
85 newObject[i] = value;
86 }
87 }
88 }
89 return newObject;
90}
91
92function once(fun) {
93 var called = false;
94 return function (...args) {
95 /* istanbul ignore if */
96 if (called) {
97 // this is a smoke test and should never actually happen
98 throw new Error('once called more than once');
99 } else {
100 called = true;
101 fun.apply(this, args);
102 }
103 };
104}
105
106function toPromise(func) {
107 //create the function we will be returning
108 return function (...args) {
109 // Clone arguments
110 args = clone(args);
111 var self = this;
112 // if the last argument is a function, assume its a callback
113 var usedCB = (typeof args[args.length - 1] === 'function') ? args.pop() : false;
114 var promise = new Promise(function (fulfill, reject) {
115 var resp;
116 try {
117 var callback = once(function (err, mesg) {
118 if (err) {
119 reject(err);
120 } else {
121 fulfill(mesg);
122 }
123 });
124 // create a callback for this invocation
125 // apply the function in the orig context
126 args.push(callback);
127 resp = func.apply(self, args);
128 if (resp && typeof resp.then === 'function') {
129 fulfill(resp);
130 }
131 } catch (e) {
132 reject(e);
133 }
134 });
135 // if there is a callback, call it back
136 if (usedCB) {
137 promise.then(function (result) {
138 usedCB(null, result);
139 }, usedCB);
140 }
141 return promise;
142 };
143}
144
145function logApiCall(self, name, args) {
146 /* istanbul ignore if */
147 if (self.constructor.listeners('debug').length) {
148 var logArgs = ['api', self.name, name];
149 for (var i = 0; i < args.length - 1; i++) {
150 logArgs.push(args[i]);
151 }
152 self.constructor.emit('debug', logArgs);
153
154 // override the callback itself to log the response
155 var origCallback = args[args.length - 1];
156 args[args.length - 1] = function (err, res) {
157 var responseArgs = ['api', self.name, name];
158 responseArgs = responseArgs.concat(
159 err ? ['error', err] : ['success', res]
160 );
161 self.constructor.emit('debug', responseArgs);
162 origCallback(err, res);
163 };
164 }
165}
166
167function adapterFun(name, callback) {
168 return toPromise(function (...args) {
169 if (this._closed) {
170 return Promise.reject(new Error('database is closed'));
171 }
172 if (this._destroyed) {
173 return Promise.reject(new Error('database is destroyed'));
174 }
175 var self = this;
176 logApiCall(self, name, args);
177 if (!this.taskqueue.isReady) {
178 return new Promise(function (fulfill, reject) {
179 self.taskqueue.addTask(function (failed) {
180 if (failed) {
181 reject(failed);
182 } else {
183 fulfill(self[name].apply(self, args));
184 }
185 });
186 });
187 }
188 return callback.apply(this, args);
189 });
190}
191
192// like underscore/lodash _.pick()
193function pick(obj, arr) {
194 var res = {};
195 for (var i = 0, len = arr.length; i < len; i++) {
196 var prop = arr[i];
197 if (prop in obj) {
198 res[prop] = obj[prop];
199 }
200 }
201 return res;
202}
203
204// Most browsers throttle concurrent requests at 6, so it's silly
205// to shim _bulk_get by trying to launch potentially hundreds of requests
206// and then letting the majority time out. We can handle this ourselves.
207var MAX_NUM_CONCURRENT_REQUESTS = 6;
208
209function identityFunction(x) {
210 return x;
211}
212
213function formatResultForOpenRevsGet(result) {
214 return [{
215 ok: result
216 }];
217}
218
219// shim for P/CouchDB adapters that don't directly implement _bulk_get
220function bulkGet(db, opts, callback) {
221 var requests = opts.docs;
222
223 // consolidate into one request per doc if possible
224 var requestsById = new Map();
225 requests.forEach(function (request) {
226 if (requestsById.has(request.id)) {
227 requestsById.get(request.id).push(request);
228 } else {
229 requestsById.set(request.id, [request]);
230 }
231 });
232
233 var numDocs = requestsById.size;
234 var numDone = 0;
235 var perDocResults = new Array(numDocs);
236
237 function collapseResultsAndFinish() {
238 var results = [];
239 perDocResults.forEach(function (res) {
240 res.docs.forEach(function (info) {
241 results.push({
242 id: res.id,
243 docs: [info]
244 });
245 });
246 });
247 callback(null, {results});
248 }
249
250 function checkDone() {
251 if (++numDone === numDocs) {
252 collapseResultsAndFinish();
253 }
254 }
255
256 function gotResult(docIndex, id, docs) {
257 perDocResults[docIndex] = {id, docs};
258 checkDone();
259 }
260
261 var allRequests = [];
262 requestsById.forEach(function (value, key) {
263 allRequests.push(key);
264 });
265
266 var i = 0;
267
268 function nextBatch() {
269
270 if (i >= allRequests.length) {
271 return;
272 }
273
274 var upTo = Math.min(i + MAX_NUM_CONCURRENT_REQUESTS, allRequests.length);
275 var batch = allRequests.slice(i, upTo);
276 processBatch(batch, i);
277 i += batch.length;
278 }
279
280 function processBatch(batch, offset) {
281 batch.forEach(function (docId, j) {
282 var docIdx = offset + j;
283 var docRequests = requestsById.get(docId);
284
285 // just use the first request as the "template"
286 // TODO: The _bulk_get API allows for more subtle use cases than this,
287 // but for now it is unlikely that there will be a mix of different
288 // "atts_since" or "attachments" in the same request, since it's just
289 // replicate.js that is using this for the moment.
290 // Also, atts_since is aspirational, since we don't support it yet.
291 var docOpts = pick(docRequests[0], ['atts_since', 'attachments']);
292 docOpts.open_revs = docRequests.map(function (request) {
293 // rev is optional, open_revs disallowed
294 return request.rev;
295 });
296
297 // remove falsey / undefined revisions
298 docOpts.open_revs = docOpts.open_revs.filter(identityFunction);
299
300 var formatResult = identityFunction;
301
302 if (docOpts.open_revs.length === 0) {
303 delete docOpts.open_revs;
304
305 // when fetching only the "winning" leaf,
306 // transform the result so it looks like an open_revs
307 // request
308 formatResult = formatResultForOpenRevsGet;
309 }
310
311 // globally-supplied options
312 ['revs', 'attachments', 'binary', 'ajax', 'latest'].forEach(function (param) {
313 if (param in opts) {
314 docOpts[param] = opts[param];
315 }
316 });
317 db.get(docId, docOpts, function (err, res) {
318 var result;
319 /* istanbul ignore if */
320 if (err) {
321 result = [{error: err}];
322 } else {
323 result = formatResult(res);
324 }
325 gotResult(docIdx, docId, result);
326 nextBatch();
327 });
328 });
329 }
330
331 nextBatch();
332
333}
334
335// in Node of course this is false
336function hasLocalStorage() {
337 return false;
338}
339
340const nextTick = typeof queueMicrotask === "function"
341 ? queueMicrotask
342 : function nextTick(fn) {
343 Promise.resolve().then(fn);
344 };
345
346class Changes extends EE {
347 constructor() {
348 super();
349
350 this._listeners = {};
351
352 if (hasLocalStorage()) {
353 addEventListener("storage", (e) => {
354 this.emit(e.key);
355 });
356 }
357 }
358
359 addListener(dbName, id, db, opts) {
360 if (this._listeners[id]) {
361 return;
362 }
363 var inprogress = false;
364 var self = this;
365 function eventFunction() {
366 if (!self._listeners[id]) {
367 return;
368 }
369 if (inprogress) {
370 inprogress = 'waiting';
371 return;
372 }
373 inprogress = true;
374 var changesOpts = pick(opts, [
375 'style', 'include_docs', 'attachments', 'conflicts', 'filter',
376 'doc_ids', 'view', 'since', 'query_params', 'binary', 'return_docs'
377 ]);
378
379 function onError() {
380 inprogress = false;
381 }
382
383 db.changes(changesOpts).on('change', function (c) {
384 if (c.seq > opts.since && !opts.cancelled) {
385 opts.since = c.seq;
386 opts.onChange(c);
387 }
388 }).on('complete', function () {
389 if (inprogress === 'waiting') {
390 nextTick(eventFunction);
391 }
392 inprogress = false;
393 }).on('error', onError);
394 }
395 this._listeners[id] = eventFunction;
396 this.on(dbName, eventFunction);
397 }
398
399 removeListener(dbName, id) {
400 if (!(id in this._listeners)) {
401 return;
402 }
403 super.removeListener(dbName, this._listeners[id]);
404 delete this._listeners[id];
405 }
406
407 notifyLocalWindows(dbName) {
408 //do a useless change on a storage thing
409 //in order to get other windows's listeners to activate
410 if (hasLocalStorage()) {
411 localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a";
412 }
413 }
414
415 notify(dbName) {
416 this.emit(dbName);
417 this.notifyLocalWindows(dbName);
418 }
419}
420
421function guardedConsole(method) {
422 /* istanbul ignore else */
423 if (typeof console !== 'undefined' && typeof console[method] === 'function') {
424 var args = Array.prototype.slice.call(arguments, 1);
425 console[method].apply(console, args);
426 }
427}
428
429function randomNumber(min, max) {
430 var maxTimeout = 600000; // Hard-coded default of 10 minutes
431 min = parseInt(min, 10) || 0;
432 max = parseInt(max, 10);
433 if (max !== max || max <= min) {
434 max = (min || 1) << 1; //doubling
435 } else {
436 max = max + 1;
437 }
438 // In order to not exceed maxTimeout, pick a random value between half of maxTimeout and maxTimeout
439 if (max > maxTimeout) {
440 min = maxTimeout >> 1; // divide by two
441 max = maxTimeout;
442 }
443 var ratio = Math.random();
444 var range = max - min;
445
446 return ~~(range * ratio + min); // ~~ coerces to an int, but fast.
447}
448
449function defaultBackOff(min) {
450 var max = 0;
451 if (!min) {
452 max = 2000;
453 }
454 return randomNumber(min, max);
455}
456
457// We assume Node users don't need to see this warning
458var res = function () {};
459
460class PouchError extends Error {
461 constructor(status, error, reason) {
462 super();
463 this.status = status;
464 this.name = error;
465 this.message = reason;
466 this.error = true;
467 }
468
469 toString() {
470 return JSON.stringify({
471 status: this.status,
472 name: this.name,
473 message: this.message,
474 reason: this.reason
475 });
476 }
477}
478
479var UNAUTHORIZED = new PouchError(401, 'unauthorized', "Name or password is incorrect.");
480var MISSING_BULK_DOCS = new PouchError(400, 'bad_request', "Missing JSON list of 'docs'");
481var MISSING_DOC = new PouchError(404, 'not_found', 'missing');
482var REV_CONFLICT = new PouchError(409, 'conflict', 'Document update conflict');
483var INVALID_ID = new PouchError(400, 'bad_request', '_id field must contain a string');
484var MISSING_ID = new PouchError(412, 'missing_id', '_id is required for puts');
485var RESERVED_ID = new PouchError(400, 'bad_request', 'Only reserved document ids may start with underscore.');
486var NOT_OPEN = new PouchError(412, 'precondition_failed', 'Database not open');
487var UNKNOWN_ERROR = new PouchError(500, 'unknown_error', 'Database encountered an unknown error');
488var BAD_ARG = new PouchError(500, 'badarg', 'Some query argument is invalid');
489var INVALID_REQUEST = new PouchError(400, 'invalid_request', 'Request was invalid');
490var QUERY_PARSE_ERROR = new PouchError(400, 'query_parse_error', 'Some query parameter is invalid');
491var DOC_VALIDATION = new PouchError(500, 'doc_validation', 'Bad special document member');
492var BAD_REQUEST = new PouchError(400, 'bad_request', 'Something wrong with the request');
493var NOT_AN_OBJECT = new PouchError(400, 'bad_request', 'Document must be a JSON object');
494var DB_MISSING = new PouchError(404, 'not_found', 'Database not found');
495var IDB_ERROR = new PouchError(500, 'indexed_db_went_bad', 'unknown');
496var WSQ_ERROR = new PouchError(500, 'web_sql_went_bad', 'unknown');
497var LDB_ERROR = new PouchError(500, 'levelDB_went_went_bad', 'unknown');
498var FORBIDDEN = new PouchError(403, 'forbidden', 'Forbidden by design doc validate_doc_update function');
499var INVALID_REV = new PouchError(400, 'bad_request', 'Invalid rev format');
500var FILE_EXISTS = new PouchError(412, 'file_exists', 'The database could not be created, the file already exists.');
501var MISSING_STUB = new PouchError(412, 'missing_stub', 'A pre-existing attachment stub wasn\'t found');
502var INVALID_URL = new PouchError(413, 'invalid_url', 'Provided URL is invalid');
503
504function createError(error, reason) {
505 function CustomPouchError(reason) {
506 // inherit error properties from our parent error manually
507 // so as to allow proper JSON parsing.
508 var names = Object.getOwnPropertyNames(error);
509 for (var i = 0, len = names.length; i < len; i++) {
510 if (typeof error[names[i]] !== 'function') {
511 this[names[i]] = error[names[i]];
512 }
513 }
514
515 if (this.stack === undefined) {
516 this.stack = (new Error()).stack;
517 }
518
519 if (reason !== undefined) {
520 this.reason = reason;
521 }
522 }
523 CustomPouchError.prototype = PouchError.prototype;
524 return new CustomPouchError(reason);
525}
526
527function generateErrorFromResponse(err) {
528
529 if (typeof err !== 'object') {
530 var data = err;
531 err = UNKNOWN_ERROR;
532 err.data = data;
533 }
534
535 if ('error' in err && err.error === 'conflict') {
536 err.name = 'conflict';
537 err.status = 409;
538 }
539
540 if (!('name' in err)) {
541 err.name = err.error || 'unknown';
542 }
543
544 if (!('status' in err)) {
545 err.status = 500;
546 }
547
548 if (!('message' in err)) {
549 err.message = err.message || err.reason;
550 }
551
552 if (!('stack' in err)) {
553 err.stack = (new Error()).stack;
554 }
555
556 return err;
557}
558
559function tryFilter(filter, doc, req) {
560 try {
561 return !filter(doc, req);
562 } catch (err) {
563 var msg = 'Filter function threw: ' + err.toString();
564 return createError(BAD_REQUEST, msg);
565 }
566}
567
568function filterChange(opts) {
569 var req = {};
570 var hasFilter = opts.filter && typeof opts.filter === 'function';
571 req.query = opts.query_params;
572
573 return function filter(change) {
574 if (!change.doc) {
575 // CSG sends events on the changes feed that don't have documents,
576 // this hack makes a whole lot of existing code robust.
577 change.doc = {};
578 }
579
580 var filterReturn = hasFilter && tryFilter(opts.filter, change.doc, req);
581
582 if (typeof filterReturn === 'object') {
583 return filterReturn;
584 }
585
586 if (filterReturn) {
587 return false;
588 }
589
590 if (!opts.include_docs) {
591 delete change.doc;
592 } else if (!opts.attachments) {
593 for (var att in change.doc._attachments) {
594 /* istanbul ignore else */
595 if (Object.prototype.hasOwnProperty.call(change.doc._attachments, att)) {
596 change.doc._attachments[att].stub = true;
597 }
598 }
599 }
600 return true;
601 };
602}
603
604// shim for Function.prototype.name,
605// for browsers that don't support it like IE
606
607/* istanbul ignore next */
608function f() {}
609
610var hasName = f.name;
611var res$1;
612
613// We don't run coverage in IE
614/* istanbul ignore else */
615if (hasName) {
616 res$1 = function (fun) {
617 return fun.name;
618 };
619} else {
620 res$1 = function (fun) {
621 var match = fun.toString().match(/^\s*function\s*(?:(\S+)\s*)?\(/);
622 if (match && match[1]) {
623 return match[1];
624 }
625 else {
626 return '';
627 }
628 };
629}
630
631var functionName = res$1;
632
633// Determine id an ID is valid
634// - invalid IDs begin with an underescore that does not begin '_design' or
635// '_local'
636// - any other string value is a valid id
637// Returns the specific error object for each case
638function invalidIdError(id) {
639 var err;
640 if (!id) {
641 err = createError(MISSING_ID);
642 } else if (typeof id !== 'string') {
643 err = createError(INVALID_ID);
644 } else if (/^_/.test(id) && !(/^_(design|local)/).test(id)) {
645 err = createError(RESERVED_ID);
646 }
647 if (err) {
648 throw err;
649 }
650}
651
652// Checks if a PouchDB object is "remote" or not. This is
653
654function isRemote(db) {
655 if (typeof db._remote === 'boolean') {
656 return db._remote;
657 }
658 /* istanbul ignore next */
659 if (typeof db.type === 'function') {
660 guardedConsole('warn',
661 'db.type() is deprecated and will be removed in ' +
662 'a future version of PouchDB');
663 return db.type() === 'http';
664 }
665 /* istanbul ignore next */
666 return false;
667}
668
669function listenerCount(ee, type) {
670 return 'listenerCount' in ee ? ee.listenerCount(type) :
671 EE.listenerCount(ee, type);
672}
673
674function parseDesignDocFunctionName(s) {
675 if (!s) {
676 return null;
677 }
678 var parts = s.split('/');
679 if (parts.length === 2) {
680 return parts;
681 }
682 if (parts.length === 1) {
683 return [s, s];
684 }
685 return null;
686}
687
688function normalizeDesignDocFunctionName(s) {
689 var normalized = parseDesignDocFunctionName(s);
690 return normalized ? normalized.join('/') : null;
691}
692
693// originally parseUri 1.2.2, now patched by us
694// (c) Steven Levithan <stevenlevithan.com>
695// MIT License
696var keys = ["source", "protocol", "authority", "userInfo", "user", "password",
697 "host", "port", "relative", "path", "directory", "file", "query", "anchor"];
698var qName ="queryKey";
699var qParser = /(?:^|&)([^&=]*)=?([^&]*)/g;
700
701// use the "loose" parser
702/* eslint no-useless-escape: 0 */
703var parser = /^(?:(?![^:@]+:[^:@\/]*@)([^:\/?#.]+):)?(?:\/\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/;
704
705function parseUri(str) {
706 var m = parser.exec(str);
707 var uri = {};
708 var i = 14;
709
710 while (i--) {
711 var key = keys[i];
712 var value = m[i] || "";
713 var encoded = ['user', 'password'].indexOf(key) !== -1;
714 uri[key] = encoded ? decodeURIComponent(value) : value;
715 }
716
717 uri[qName] = {};
718 uri[keys[12]].replace(qParser, function ($0, $1, $2) {
719 if ($1) {
720 uri[qName][$1] = $2;
721 }
722 });
723
724 return uri;
725}
726
727// Based on https://github.com/alexdavid/scope-eval v0.0.3
728
729// this is essentially the "update sugar" function from daleharvey/pouchdb#1388
730// the diffFun tells us what delta to apply to the doc. it either returns
731// the doc, or false if it doesn't need to do an update after all
732function upsert(db, docId, diffFun) {
733 return db.get(docId)
734 .catch(function (err) {
735 /* istanbul ignore next */
736 if (err.status !== 404) {
737 throw err;
738 }
739 return {};
740 })
741 .then(function (doc) {
742 // the user might change the _rev, so save it for posterity
743 var docRev = doc._rev;
744 var newDoc = diffFun(doc);
745
746 if (!newDoc) {
747 // if the diffFun returns falsy, we short-circuit as
748 // an optimization
749 return {updated: false, rev: docRev};
750 }
751
752 // users aren't allowed to modify these values,
753 // so reset them here
754 newDoc._id = docId;
755 newDoc._rev = docRev;
756 return tryAndPut(db, newDoc, diffFun);
757 });
758}
759
760function tryAndPut(db, doc, diffFun) {
761 return db.put(doc).then(function (res) {
762 return {
763 updated: true,
764 rev: res.rev
765 };
766 }, function (err) {
767 /* istanbul ignore next */
768 if (err.status !== 409) {
769 throw err;
770 }
771 return upsert(db, doc._id, diffFun);
772 });
773}
774
775function binaryMd5(data, callback) {
776 var base64 = crypto.createHash('md5').update(data, 'binary').digest('base64');
777 callback(base64);
778}
779
780function stringMd5(string) {
781 return crypto.createHash('md5').update(string, 'binary').digest('hex');
782}
783
784/**
785 * Creates a new revision string that does NOT include the revision height
786 * For example '56649f1b0506c6ca9fda0746eb0cacdf'
787 */
788function rev(doc, deterministic_revs) {
789 if (!deterministic_revs) {
790 return uuid.v4().replace(/-/g, '').toLowerCase();
791 }
792
793 var mutateableDoc = Object.assign({}, doc);
794 delete mutateableDoc._rev_tree;
795 return stringMd5(JSON.stringify(mutateableDoc));
796}
797
798var uuid$1 = uuid.v4; // mimic old import, only v4 is ever used elsewhere
799
800// We fetch all leafs of the revision tree, and sort them based on tree length
801// and whether they were deleted, undeleted documents with the longest revision
802// tree (most edits) win
803// The final sort algorithm is slightly documented in a sidebar here:
804// http://guide.couchdb.org/draft/conflicts.html
805function winningRev(metadata) {
806 var winningId;
807 var winningPos;
808 var winningDeleted;
809 var toVisit = metadata.rev_tree.slice();
810 var node;
811 while ((node = toVisit.pop())) {
812 var tree = node.ids;
813 var branches = tree[2];
814 var pos = node.pos;
815 if (branches.length) { // non-leaf
816 for (var i = 0, len = branches.length; i < len; i++) {
817 toVisit.push({pos: pos + 1, ids: branches[i]});
818 }
819 continue;
820 }
821 var deleted = !!tree[1].deleted;
822 var id = tree[0];
823 // sort by deleted, then pos, then id
824 if (!winningId || (winningDeleted !== deleted ? winningDeleted :
825 winningPos !== pos ? winningPos < pos : winningId < id)) {
826 winningId = id;
827 winningPos = pos;
828 winningDeleted = deleted;
829 }
830 }
831
832 return winningPos + '-' + winningId;
833}
834
835// Pretty much all below can be combined into a higher order function to
836// traverse revisions
837// The return value from the callback will be passed as context to all
838// children of that node
839function traverseRevTree(revs, callback) {
840 var toVisit = revs.slice();
841
842 var node;
843 while ((node = toVisit.pop())) {
844 var pos = node.pos;
845 var tree = node.ids;
846 var branches = tree[2];
847 var newCtx =
848 callback(branches.length === 0, pos, tree[0], node.ctx, tree[1]);
849 for (var i = 0, len = branches.length; i < len; i++) {
850 toVisit.push({pos: pos + 1, ids: branches[i], ctx: newCtx});
851 }
852 }
853}
854
855function sortByPos(a, b) {
856 return a.pos - b.pos;
857}
858
859function collectLeaves(revs) {
860 var leaves = [];
861 traverseRevTree(revs, function (isLeaf, pos, id, acc, opts) {
862 if (isLeaf) {
863 leaves.push({rev: pos + "-" + id, pos, opts});
864 }
865 });
866 leaves.sort(sortByPos).reverse();
867 for (var i = 0, len = leaves.length; i < len; i++) {
868 delete leaves[i].pos;
869 }
870 return leaves;
871}
872
873// returns revs of all conflicts that is leaves such that
874// 1. are not deleted and
875// 2. are different than winning revision
876function collectConflicts(metadata) {
877 var win = winningRev(metadata);
878 var leaves = collectLeaves(metadata.rev_tree);
879 var conflicts = [];
880 for (var i = 0, len = leaves.length; i < len; i++) {
881 var leaf = leaves[i];
882 if (leaf.rev !== win && !leaf.opts.deleted) {
883 conflicts.push(leaf.rev);
884 }
885 }
886 return conflicts;
887}
888
889// compact a tree by marking its non-leafs as missing,
890// and return a list of revs to delete
891function compactTree(metadata) {
892 var revs = [];
893 traverseRevTree(metadata.rev_tree, function (isLeaf, pos,
894 revHash, ctx, opts) {
895 if (opts.status === 'available' && !isLeaf) {
896 revs.push(pos + '-' + revHash);
897 opts.status = 'missing';
898 }
899 });
900 return revs;
901}
902
903// `findPathToLeaf()` returns an array of revs that goes from the specified
904// leaf rev to the root of that leaf’s branch.
905//
906// eg. for this rev tree:
907// 1-9692 ▶ 2-37aa ▶ 3-df22 ▶ 4-6e94 ▶ 5-df4a ▶ 6-6a3a ▶ 7-57e5
908// ┃ ┗━━━━━━▶ 5-8d8c ▶ 6-65e0
909// ┗━━━━━━▶ 3-43f6 ▶ 4-a3b4
910//
911// For a `targetRev` of '7-57e5', `findPathToLeaf()` would return ['7-57e5', '6-6a3a', '5-df4a']
912// The `revs` argument has the same structure as what `revs_tree` has on e.g.
913// the IndexedDB representation of the rev tree datastructure. Please refer to
914// tests/unit/test.purge.js for examples of what these look like.
915//
916// This function will throw an error if:
917// - The requested revision does not exist
918// - The requested revision is not a leaf
919function findPathToLeaf(revs, targetRev) {
920 let path$$1 = [];
921 const toVisit = revs.slice();
922
923 let node;
924 while ((node = toVisit.pop())) {
925 const { pos, ids: tree } = node;
926 const rev = `${pos}-${tree[0]}`;
927 const branches = tree[2];
928
929 // just assuming we're already working on the path up towards our desired leaf.
930 path$$1.push(rev);
931
932 // we've reached the leaf of our dreams, so return the computed path.
933 if (rev === targetRev) {
934 //…unleeeeess
935 if (branches.length !== 0) {
936 throw new Error('The requested revision is not a leaf');
937 }
938 return path$$1.reverse();
939 }
940
941 // this is based on the assumption that after we have a leaf (`branches.length == 0`), we handle the next
942 // branch. this is true for all branches other than the path leading to the winning rev (which is 7-57e5 in
943 // the example above. i've added a reset condition for branching nodes (`branches.length > 1`) as well.
944 if (branches.length === 0 || branches.length > 1) {
945 path$$1 = [];
946 }
947
948 // as a next step, we push the branches of this node to `toVisit` for visiting it during the next iteration
949 for (let i = 0, len = branches.length; i < len; i++) {
950 toVisit.push({ pos: pos + 1, ids: branches[i] });
951 }
952 }
953 if (path$$1.length === 0) {
954 throw new Error('The requested revision does not exist');
955 }
956 return path$$1.reverse();
957}
958
959// build up a list of all the paths to the leafs in this revision tree
960function rootToLeaf(revs) {
961 var paths = [];
962 var toVisit = revs.slice();
963 var node;
964 while ((node = toVisit.pop())) {
965 var pos = node.pos;
966 var tree = node.ids;
967 var id = tree[0];
968 var opts = tree[1];
969 var branches = tree[2];
970 var isLeaf = branches.length === 0;
971
972 var history = node.history ? node.history.slice() : [];
973 history.push({id, opts});
974 if (isLeaf) {
975 paths.push({pos: (pos + 1 - history.length), ids: history});
976 }
977 for (var i = 0, len = branches.length; i < len; i++) {
978 toVisit.push({pos: pos + 1, ids: branches[i], history});
979 }
980 }
981 return paths.reverse();
982}
983
984// for a better overview of what this is doing, read:
985
986function sortByPos$1(a, b) {
987 return a.pos - b.pos;
988}
989
990// classic binary search
991function binarySearch(arr, item, comparator) {
992 var low = 0;
993 var high = arr.length;
994 var mid;
995 while (low < high) {
996 mid = (low + high) >>> 1;
997 if (comparator(arr[mid], item) < 0) {
998 low = mid + 1;
999 } else {
1000 high = mid;
1001 }
1002 }
1003 return low;
1004}
1005
1006// assuming the arr is sorted, insert the item in the proper place
1007function insertSorted(arr, item, comparator) {
1008 var idx = binarySearch(arr, item, comparator);
1009 arr.splice(idx, 0, item);
1010}
1011
1012// Turn a path as a flat array into a tree with a single branch.
1013// If any should be stemmed from the beginning of the array, that's passed
1014// in as the second argument
1015function pathToTree(path$$1, numStemmed) {
1016 var root;
1017 var leaf;
1018 for (var i = numStemmed, len = path$$1.length; i < len; i++) {
1019 var node = path$$1[i];
1020 var currentLeaf = [node.id, node.opts, []];
1021 if (leaf) {
1022 leaf[2].push(currentLeaf);
1023 leaf = currentLeaf;
1024 } else {
1025 root = leaf = currentLeaf;
1026 }
1027 }
1028 return root;
1029}
1030
1031// compare the IDs of two trees
1032function compareTree(a, b) {
1033 return a[0] < b[0] ? -1 : 1;
1034}
1035
1036// Merge two trees together
1037// The roots of tree1 and tree2 must be the same revision
1038function mergeTree(in_tree1, in_tree2) {
1039 var queue = [{tree1: in_tree1, tree2: in_tree2}];
1040 var conflicts = false;
1041 while (queue.length > 0) {
1042 var item = queue.pop();
1043 var tree1 = item.tree1;
1044 var tree2 = item.tree2;
1045
1046 if (tree1[1].status || tree2[1].status) {
1047 tree1[1].status =
1048 (tree1[1].status === 'available' ||
1049 tree2[1].status === 'available') ? 'available' : 'missing';
1050 }
1051
1052 for (var i = 0; i < tree2[2].length; i++) {
1053 if (!tree1[2][0]) {
1054 conflicts = 'new_leaf';
1055 tree1[2][0] = tree2[2][i];
1056 continue;
1057 }
1058
1059 var merged = false;
1060 for (var j = 0; j < tree1[2].length; j++) {
1061 if (tree1[2][j][0] === tree2[2][i][0]) {
1062 queue.push({tree1: tree1[2][j], tree2: tree2[2][i]});
1063 merged = true;
1064 }
1065 }
1066 if (!merged) {
1067 conflicts = 'new_branch';
1068 insertSorted(tree1[2], tree2[2][i], compareTree);
1069 }
1070 }
1071 }
1072 return {conflicts, tree: in_tree1};
1073}
1074
1075function doMerge(tree, path$$1, dontExpand) {
1076 var restree = [];
1077 var conflicts = false;
1078 var merged = false;
1079 var res;
1080
1081 if (!tree.length) {
1082 return {tree: [path$$1], conflicts: 'new_leaf'};
1083 }
1084
1085 for (var i = 0, len = tree.length; i < len; i++) {
1086 var branch = tree[i];
1087 if (branch.pos === path$$1.pos && branch.ids[0] === path$$1.ids[0]) {
1088 // Paths start at the same position and have the same root, so they need
1089 // merged
1090 res = mergeTree(branch.ids, path$$1.ids);
1091 restree.push({pos: branch.pos, ids: res.tree});
1092 conflicts = conflicts || res.conflicts;
1093 merged = true;
1094 } else if (dontExpand !== true) {
1095 // The paths start at a different position, take the earliest path and
1096 // traverse up until it as at the same point from root as the path we
1097 // want to merge. If the keys match we return the longer path with the
1098 // other merged After stemming we don't want to expand the trees
1099
1100 var t1 = branch.pos < path$$1.pos ? branch : path$$1;
1101 var t2 = branch.pos < path$$1.pos ? path$$1 : branch;
1102 var diff = t2.pos - t1.pos;
1103
1104 var candidateParents = [];
1105
1106 var trees = [];
1107 trees.push({ids: t1.ids, diff, parent: null, parentIdx: null});
1108 while (trees.length > 0) {
1109 var item = trees.pop();
1110 if (item.diff === 0) {
1111 if (item.ids[0] === t2.ids[0]) {
1112 candidateParents.push(item);
1113 }
1114 continue;
1115 }
1116 var elements = item.ids[2];
1117 for (var j = 0, elementsLen = elements.length; j < elementsLen; j++) {
1118 trees.push({
1119 ids: elements[j],
1120 diff: item.diff - 1,
1121 parent: item.ids,
1122 parentIdx: j
1123 });
1124 }
1125 }
1126
1127 var el = candidateParents[0];
1128
1129 if (!el) {
1130 restree.push(branch);
1131 } else {
1132 res = mergeTree(el.ids, t2.ids);
1133 el.parent[2][el.parentIdx] = res.tree;
1134 restree.push({pos: t1.pos, ids: t1.ids});
1135 conflicts = conflicts || res.conflicts;
1136 merged = true;
1137 }
1138 } else {
1139 restree.push(branch);
1140 }
1141 }
1142
1143 // We didnt find
1144 if (!merged) {
1145 restree.push(path$$1);
1146 }
1147
1148 restree.sort(sortByPos$1);
1149
1150 return {
1151 tree: restree,
1152 conflicts: conflicts || 'internal_node'
1153 };
1154}
1155
1156// To ensure we don't grow the revision tree infinitely, we stem old revisions
1157function stem(tree, depth) {
1158 // First we break out the tree into a complete list of root to leaf paths
1159 var paths = rootToLeaf(tree);
1160 var stemmedRevs;
1161
1162 var result;
1163 for (var i = 0, len = paths.length; i < len; i++) {
1164 // Then for each path, we cut off the start of the path based on the
1165 // `depth` to stem to, and generate a new set of flat trees
1166 var path$$1 = paths[i];
1167 var stemmed = path$$1.ids;
1168 var node;
1169 if (stemmed.length > depth) {
1170 // only do the stemming work if we actually need to stem
1171 if (!stemmedRevs) {
1172 stemmedRevs = {}; // avoid allocating this object unnecessarily
1173 }
1174 var numStemmed = stemmed.length - depth;
1175 node = {
1176 pos: path$$1.pos + numStemmed,
1177 ids: pathToTree(stemmed, numStemmed)
1178 };
1179
1180 for (var s = 0; s < numStemmed; s++) {
1181 var rev = (path$$1.pos + s) + '-' + stemmed[s].id;
1182 stemmedRevs[rev] = true;
1183 }
1184 } else { // no need to actually stem
1185 node = {
1186 pos: path$$1.pos,
1187 ids: pathToTree(stemmed, 0)
1188 };
1189 }
1190
1191 // Then we remerge all those flat trees together, ensuring that we don't
1192 // connect trees that would go beyond the depth limit
1193 if (result) {
1194 result = doMerge(result, node, true).tree;
1195 } else {
1196 result = [node];
1197 }
1198 }
1199
1200 // this is memory-heavy per Chrome profiler, avoid unless we actually stemmed
1201 if (stemmedRevs) {
1202 traverseRevTree(result, function (isLeaf, pos, revHash) {
1203 // some revisions may have been removed in a branch but not in another
1204 delete stemmedRevs[pos + '-' + revHash];
1205 });
1206 }
1207
1208 return {
1209 tree: result,
1210 revs: stemmedRevs ? Object.keys(stemmedRevs) : []
1211 };
1212}
1213
1214function merge(tree, path$$1, depth) {
1215 var newTree = doMerge(tree, path$$1);
1216 var stemmed = stem(newTree.tree, depth);
1217 return {
1218 tree: stemmed.tree,
1219 stemmedRevs: stemmed.revs,
1220 conflicts: newTree.conflicts
1221 };
1222}
1223
1224// return true if a rev exists in the rev tree, false otherwise
1225function revExists(revs, rev) {
1226 var toVisit = revs.slice();
1227 var splitRev = rev.split('-');
1228 var targetPos = parseInt(splitRev[0], 10);
1229 var targetId = splitRev[1];
1230
1231 var node;
1232 while ((node = toVisit.pop())) {
1233 if (node.pos === targetPos && node.ids[0] === targetId) {
1234 return true;
1235 }
1236 var branches = node.ids[2];
1237 for (var i = 0, len = branches.length; i < len; i++) {
1238 toVisit.push({pos: node.pos + 1, ids: branches[i]});
1239 }
1240 }
1241 return false;
1242}
1243
1244function getTrees(node) {
1245 return node.ids;
1246}
1247
1248// check if a specific revision of a doc has been deleted
1249// - metadata: the metadata object from the doc store
1250// - rev: (optional) the revision to check. defaults to winning revision
1251function isDeleted(metadata, rev) {
1252 if (!rev) {
1253 rev = winningRev(metadata);
1254 }
1255 var id = rev.substring(rev.indexOf('-') + 1);
1256 var toVisit = metadata.rev_tree.map(getTrees);
1257
1258 var tree;
1259 while ((tree = toVisit.pop())) {
1260 if (tree[0] === id) {
1261 return !!tree[1].deleted;
1262 }
1263 toVisit = toVisit.concat(tree[2]);
1264 }
1265}
1266
1267function isLocalId(id) {
1268 return typeof id === 'string' && id.startsWith('_local/');
1269}
1270
1271// returns the current leaf node for a given revision
1272function latest(rev, metadata) {
1273 var toVisit = metadata.rev_tree.slice();
1274 var node;
1275 while ((node = toVisit.pop())) {
1276 var pos = node.pos;
1277 var tree = node.ids;
1278 var id = tree[0];
1279 var opts = tree[1];
1280 var branches = tree[2];
1281 var isLeaf = branches.length === 0;
1282
1283 var history = node.history ? node.history.slice() : [];
1284 history.push({id, pos, opts});
1285
1286 if (isLeaf) {
1287 for (var i = 0, len = history.length; i < len; i++) {
1288 var historyNode = history[i];
1289 var historyRev = historyNode.pos + '-' + historyNode.id;
1290
1291 if (historyRev === rev) {
1292 // return the rev of this leaf
1293 return pos + '-' + id;
1294 }
1295 }
1296 }
1297
1298 for (var j = 0, l = branches.length; j < l; j++) {
1299 toVisit.push({pos: pos + 1, ids: branches[j], history});
1300 }
1301 }
1302
1303 /* istanbul ignore next */
1304 throw new Error('Unable to resolve latest revision for id ' + metadata.id + ', rev ' + rev);
1305}
1306
1307function tryCatchInChangeListener(self, change, pending, lastSeq) {
1308 // isolate try/catches to avoid V8 deoptimizations
1309 try {
1310 self.emit('change', change, pending, lastSeq);
1311 } catch (e) {
1312 guardedConsole('error', 'Error in .on("change", function):', e);
1313 }
1314}
1315
1316function processChange(doc, metadata, opts) {
1317 var changeList = [{rev: doc._rev}];
1318 if (opts.style === 'all_docs') {
1319 changeList = collectLeaves(metadata.rev_tree)
1320 .map(function (x) { return {rev: x.rev}; });
1321 }
1322 var change = {
1323 id: metadata.id,
1324 changes: changeList,
1325 doc
1326 };
1327
1328 if (isDeleted(metadata, doc._rev)) {
1329 change.deleted = true;
1330 }
1331 if (opts.conflicts) {
1332 change.doc._conflicts = collectConflicts(metadata);
1333 if (!change.doc._conflicts.length) {
1334 delete change.doc._conflicts;
1335 }
1336 }
1337 return change;
1338}
1339
1340class Changes$1 extends EE {
1341 constructor(db, opts, callback) {
1342 super();
1343 this.db = db;
1344 opts = opts ? clone(opts) : {};
1345 var complete = opts.complete = once((err, resp) => {
1346 if (err) {
1347 if (listenerCount(this, 'error') > 0) {
1348 this.emit('error', err);
1349 }
1350 } else {
1351 this.emit('complete', resp);
1352 }
1353 this.removeAllListeners();
1354 db.removeListener('destroyed', onDestroy);
1355 });
1356 if (callback) {
1357 this.on('complete', function (resp) {
1358 callback(null, resp);
1359 });
1360 this.on('error', callback);
1361 }
1362 const onDestroy = () => {
1363 this.cancel();
1364 };
1365 db.once('destroyed', onDestroy);
1366
1367 opts.onChange = (change, pending, lastSeq) => {
1368 /* istanbul ignore if */
1369 if (this.isCancelled) {
1370 return;
1371 }
1372 tryCatchInChangeListener(this, change, pending, lastSeq);
1373 };
1374
1375 var promise = new Promise(function (fulfill, reject) {
1376 opts.complete = function (err, res$$1) {
1377 if (err) {
1378 reject(err);
1379 } else {
1380 fulfill(res$$1);
1381 }
1382 };
1383 });
1384 this.once('cancel', function () {
1385 db.removeListener('destroyed', onDestroy);
1386 opts.complete(null, {status: 'cancelled'});
1387 });
1388 this.then = promise.then.bind(promise);
1389 this['catch'] = promise['catch'].bind(promise);
1390 this.then(function (result) {
1391 complete(null, result);
1392 }, complete);
1393
1394
1395
1396 if (!db.taskqueue.isReady) {
1397 db.taskqueue.addTask((failed) => {
1398 if (failed) {
1399 opts.complete(failed);
1400 } else if (this.isCancelled) {
1401 this.emit('cancel');
1402 } else {
1403 this.validateChanges(opts);
1404 }
1405 });
1406 } else {
1407 this.validateChanges(opts);
1408 }
1409 }
1410
1411 cancel() {
1412 this.isCancelled = true;
1413 if (this.db.taskqueue.isReady) {
1414 this.emit('cancel');
1415 }
1416 }
1417
1418 validateChanges(opts) {
1419 var callback = opts.complete;
1420
1421 /* istanbul ignore else */
1422 if (PouchDB._changesFilterPlugin) {
1423 PouchDB._changesFilterPlugin.validate(opts, (err) => {
1424 if (err) {
1425 return callback(err);
1426 }
1427 this.doChanges(opts);
1428 });
1429 } else {
1430 this.doChanges(opts);
1431 }
1432 }
1433
1434 doChanges(opts) {
1435 var callback = opts.complete;
1436
1437 opts = clone(opts);
1438 if ('live' in opts && !('continuous' in opts)) {
1439 opts.continuous = opts.live;
1440 }
1441 opts.processChange = processChange;
1442
1443 if (opts.since === 'latest') {
1444 opts.since = 'now';
1445 }
1446 if (!opts.since) {
1447 opts.since = 0;
1448 }
1449 if (opts.since === 'now') {
1450 this.db.info().then((info) => {
1451 /* istanbul ignore if */
1452 if (this.isCancelled) {
1453 callback(null, {status: 'cancelled'});
1454 return;
1455 }
1456 opts.since = info.update_seq;
1457 this.doChanges(opts);
1458 }, callback);
1459 return;
1460 }
1461
1462 /* istanbul ignore else */
1463 if (PouchDB._changesFilterPlugin) {
1464 PouchDB._changesFilterPlugin.normalize(opts);
1465 if (PouchDB._changesFilterPlugin.shouldFilter(this, opts)) {
1466 return PouchDB._changesFilterPlugin.filter(this, opts);
1467 }
1468 } else {
1469 ['doc_ids', 'filter', 'selector', 'view'].forEach(function (key) {
1470 if (key in opts) {
1471 guardedConsole('warn',
1472 'The "' + key + '" option was passed in to changes/replicate, ' +
1473 'but pouchdb-changes-filter plugin is not installed, so it ' +
1474 'was ignored. Please install the plugin to enable filtering.'
1475 );
1476 }
1477 });
1478 }
1479
1480 if (!('descending' in opts)) {
1481 opts.descending = false;
1482 }
1483
1484 // 0 and 1 should return 1 document
1485 opts.limit = opts.limit === 0 ? 1 : opts.limit;
1486 opts.complete = callback;
1487 var newPromise = this.db._changes(opts);
1488 /* istanbul ignore else */
1489 if (newPromise && typeof newPromise.cancel === 'function') {
1490 const cancel = this.cancel;
1491 this.cancel = (...args) => {
1492 newPromise.cancel();
1493 cancel.apply(this, args);
1494 };
1495 }
1496 }
1497}
1498
1499/*
1500 * A generic pouch adapter
1501 */
1502
1503// Wrapper for functions that call the bulkdocs api with a single doc,
1504// if the first result is an error, return an error
1505function yankError(callback, docId) {
1506 return function (err, results) {
1507 if (err || (results[0] && results[0].error)) {
1508 err = err || results[0];
1509 err.docId = docId;
1510 callback(err);
1511 } else {
1512 callback(null, results.length ? results[0] : results);
1513 }
1514 };
1515}
1516
1517// clean docs given to us by the user
1518function cleanDocs(docs) {
1519 for (var i = 0; i < docs.length; i++) {
1520 var doc = docs[i];
1521 if (doc._deleted) {
1522 delete doc._attachments; // ignore atts for deleted docs
1523 } else if (doc._attachments) {
1524 // filter out extraneous keys from _attachments
1525 var atts = Object.keys(doc._attachments);
1526 for (var j = 0; j < atts.length; j++) {
1527 var att = atts[j];
1528 doc._attachments[att] = pick(doc._attachments[att],
1529 ['data', 'digest', 'content_type', 'length', 'revpos', 'stub']);
1530 }
1531 }
1532 }
1533}
1534
1535// compare two docs, first by _id then by _rev
1536function compareByIdThenRev(a, b) {
1537 if (a._id === b._id) {
1538 const aStart = a._revisions ? a._revisions.start : 0;
1539 const bStart = b._revisions ? b._revisions.start : 0;
1540 return aStart - bStart;
1541 }
1542 return a._id < b._id ? -1 : 1;
1543}
1544
1545// for every node in a revision tree computes its distance from the closest
1546// leaf
1547function computeHeight(revs) {
1548 var height = {};
1549 var edges = [];
1550 traverseRevTree(revs, function (isLeaf, pos, id, prnt) {
1551 var rev$$1 = pos + "-" + id;
1552 if (isLeaf) {
1553 height[rev$$1] = 0;
1554 }
1555 if (prnt !== undefined) {
1556 edges.push({from: prnt, to: rev$$1});
1557 }
1558 return rev$$1;
1559 });
1560
1561 edges.reverse();
1562 edges.forEach(function (edge) {
1563 if (height[edge.from] === undefined) {
1564 height[edge.from] = 1 + height[edge.to];
1565 } else {
1566 height[edge.from] = Math.min(height[edge.from], 1 + height[edge.to]);
1567 }
1568 });
1569 return height;
1570}
1571
1572function allDocsKeysParse(opts) {
1573 var keys = ('limit' in opts) ?
1574 opts.keys.slice(opts.skip, opts.limit + opts.skip) :
1575 (opts.skip > 0) ? opts.keys.slice(opts.skip) : opts.keys;
1576 opts.keys = keys;
1577 opts.skip = 0;
1578 delete opts.limit;
1579 if (opts.descending) {
1580 keys.reverse();
1581 opts.descending = false;
1582 }
1583}
1584
1585// all compaction is done in a queue, to avoid attaching
1586// too many listeners at once
1587function doNextCompaction(self) {
1588 var task = self._compactionQueue[0];
1589 var opts = task.opts;
1590 var callback = task.callback;
1591 self.get('_local/compaction').catch(function () {
1592 return false;
1593 }).then(function (doc) {
1594 if (doc && doc.last_seq) {
1595 opts.last_seq = doc.last_seq;
1596 }
1597 self._compact(opts, function (err, res$$1) {
1598 /* istanbul ignore if */
1599 if (err) {
1600 callback(err);
1601 } else {
1602 callback(null, res$$1);
1603 }
1604 nextTick(function () {
1605 self._compactionQueue.shift();
1606 if (self._compactionQueue.length) {
1607 doNextCompaction(self);
1608 }
1609 });
1610 });
1611 });
1612}
1613
1614function appendPurgeSeq(db, docId, rev$$1) {
1615 return db.get('_local/purges').then(function (doc) {
1616 const purgeSeq = doc.purgeSeq + 1;
1617 doc.purges.push({
1618 docId,
1619 rev: rev$$1,
1620 purgeSeq,
1621 });
1622 if (doc.purges.length > self.purged_infos_limit) {
1623 doc.purges.splice(0, doc.purges.length - self.purged_infos_limit);
1624 }
1625 doc.purgeSeq = purgeSeq;
1626 return doc;
1627 }).catch(function (err) {
1628 if (err.status !== 404) {
1629 throw err;
1630 }
1631 return {
1632 _id: '_local/purges',
1633 purges: [{
1634 docId,
1635 rev: rev$$1,
1636 purgeSeq: 0,
1637 }],
1638 purgeSeq: 0,
1639 };
1640 }).then(function (doc) {
1641 return db.put(doc);
1642 });
1643}
1644
1645function attachmentNameError(name) {
1646 if (name.charAt(0) === '_') {
1647 return name + ' is not a valid attachment name, attachment ' +
1648 'names cannot start with \'_\'';
1649 }
1650 return false;
1651}
1652
1653function isNotSingleDoc(doc) {
1654 return doc === null || typeof doc !== 'object' || Array.isArray(doc);
1655}
1656
1657const validRevRegex = /^\d+-[^-]*$/;
1658function isValidRev(rev$$1) {
1659 return typeof rev$$1 === 'string' && validRevRegex.test(rev$$1);
1660}
1661
1662class AbstractPouchDB extends EE {
1663 _setup() {
1664 this.post = adapterFun('post', function (doc, opts, callback) {
1665 if (typeof opts === 'function') {
1666 callback = opts;
1667 opts = {};
1668 }
1669 if (isNotSingleDoc(doc)) {
1670 return callback(createError(NOT_AN_OBJECT));
1671 }
1672 this.bulkDocs({docs: [doc]}, opts, yankError(callback, doc._id));
1673 }).bind(this);
1674
1675 this.put = adapterFun('put', function (doc, opts, cb) {
1676 if (typeof opts === 'function') {
1677 cb = opts;
1678 opts = {};
1679 }
1680 if (isNotSingleDoc(doc)) {
1681 return cb(createError(NOT_AN_OBJECT));
1682 }
1683 invalidIdError(doc._id);
1684 if ('_rev' in doc && !isValidRev(doc._rev)) {
1685 return cb(createError(INVALID_REV));
1686 }
1687 if (isLocalId(doc._id) && typeof this._putLocal === 'function') {
1688 if (doc._deleted) {
1689 return this._removeLocal(doc, cb);
1690 } else {
1691 return this._putLocal(doc, cb);
1692 }
1693 }
1694
1695 const putDoc = (next) => {
1696 if (typeof this._put === 'function' && opts.new_edits !== false) {
1697 this._put(doc, opts, next);
1698 } else {
1699 this.bulkDocs({docs: [doc]}, opts, yankError(next, doc._id));
1700 }
1701 };
1702
1703 if (opts.force && doc._rev) {
1704 transformForceOptionToNewEditsOption();
1705 putDoc(function (err) {
1706 var result = err ? null : {ok: true, id: doc._id, rev: doc._rev};
1707 cb(err, result);
1708 });
1709 } else {
1710 putDoc(cb);
1711 }
1712
1713 function transformForceOptionToNewEditsOption() {
1714 var parts = doc._rev.split('-');
1715 var oldRevId = parts[1];
1716 var oldRevNum = parseInt(parts[0], 10);
1717
1718 var newRevNum = oldRevNum + 1;
1719 var newRevId = rev();
1720
1721 doc._revisions = {
1722 start: newRevNum,
1723 ids: [newRevId, oldRevId]
1724 };
1725 doc._rev = newRevNum + '-' + newRevId;
1726 opts.new_edits = false;
1727 }
1728 }).bind(this);
1729
1730 this.putAttachment = adapterFun('putAttachment', function (docId, attachmentId, rev$$1, blob, type) {
1731 var api = this;
1732 if (typeof type === 'function') {
1733 type = blob;
1734 blob = rev$$1;
1735 rev$$1 = null;
1736 }
1737 // Lets fix in https://github.com/pouchdb/pouchdb/issues/3267
1738 /* istanbul ignore if */
1739 if (typeof type === 'undefined') {
1740 type = blob;
1741 blob = rev$$1;
1742 rev$$1 = null;
1743 }
1744 if (!type) {
1745 guardedConsole('warn', 'Attachment', attachmentId, 'on document', docId, 'is missing content_type');
1746 }
1747
1748 function createAttachment(doc) {
1749 var prevrevpos = '_rev' in doc ? parseInt(doc._rev, 10) : 0;
1750 doc._attachments = doc._attachments || {};
1751 doc._attachments[attachmentId] = {
1752 content_type: type,
1753 data: blob,
1754 revpos: ++prevrevpos
1755 };
1756 return api.put(doc);
1757 }
1758
1759 return api.get(docId).then(function (doc) {
1760 if (doc._rev !== rev$$1) {
1761 throw createError(REV_CONFLICT);
1762 }
1763
1764 return createAttachment(doc);
1765 }, function (err) {
1766 // create new doc
1767 /* istanbul ignore else */
1768 if (err.reason === MISSING_DOC.message) {
1769 return createAttachment({_id: docId});
1770 } else {
1771 throw err;
1772 }
1773 });
1774 }).bind(this);
1775
1776 this.removeAttachment = adapterFun('removeAttachment', function (docId, attachmentId, rev$$1, callback) {
1777 this.get(docId, (err, obj) => {
1778 /* istanbul ignore if */
1779 if (err) {
1780 callback(err);
1781 return;
1782 }
1783 if (obj._rev !== rev$$1) {
1784 callback(createError(REV_CONFLICT));
1785 return;
1786 }
1787 /* istanbul ignore if */
1788 if (!obj._attachments) {
1789 return callback();
1790 }
1791 delete obj._attachments[attachmentId];
1792 if (Object.keys(obj._attachments).length === 0) {
1793 delete obj._attachments;
1794 }
1795 this.put(obj, callback);
1796 });
1797 }).bind(this);
1798
1799 this.remove = adapterFun('remove', function (docOrId, optsOrRev, opts, callback) {
1800 var doc;
1801 if (typeof optsOrRev === 'string') {
1802 // id, rev, opts, callback style
1803 doc = {
1804 _id: docOrId,
1805 _rev: optsOrRev
1806 };
1807 if (typeof opts === 'function') {
1808 callback = opts;
1809 opts = {};
1810 }
1811 } else {
1812 // doc, opts, callback style
1813 doc = docOrId;
1814 if (typeof optsOrRev === 'function') {
1815 callback = optsOrRev;
1816 opts = {};
1817 } else {
1818 callback = opts;
1819 opts = optsOrRev;
1820 }
1821 }
1822 opts = opts || {};
1823 opts.was_delete = true;
1824 var newDoc = {_id: doc._id, _rev: (doc._rev || opts.rev)};
1825 newDoc._deleted = true;
1826 if (isLocalId(newDoc._id) && typeof this._removeLocal === 'function') {
1827 return this._removeLocal(doc, callback);
1828 }
1829 this.bulkDocs({docs: [newDoc]}, opts, yankError(callback, newDoc._id));
1830 }).bind(this);
1831
1832 this.revsDiff = adapterFun('revsDiff', function (req, opts, callback) {
1833 if (typeof opts === 'function') {
1834 callback = opts;
1835 opts = {};
1836 }
1837 var ids = Object.keys(req);
1838
1839 if (!ids.length) {
1840 return callback(null, {});
1841 }
1842
1843 var count = 0;
1844 var missing = new Map();
1845
1846 function addToMissing(id, revId) {
1847 if (!missing.has(id)) {
1848 missing.set(id, {missing: []});
1849 }
1850 missing.get(id).missing.push(revId);
1851 }
1852
1853 function processDoc(id, rev_tree) {
1854 // Is this fast enough? Maybe we should switch to a set simulated by a map
1855 var missingForId = req[id].slice(0);
1856 traverseRevTree(rev_tree, function (isLeaf, pos, revHash, ctx,
1857 opts) {
1858 var rev$$1 = pos + '-' + revHash;
1859 var idx = missingForId.indexOf(rev$$1);
1860 if (idx === -1) {
1861 return;
1862 }
1863
1864 missingForId.splice(idx, 1);
1865 /* istanbul ignore if */
1866 if (opts.status !== 'available') {
1867 addToMissing(id, rev$$1);
1868 }
1869 });
1870
1871 // Traversing the tree is synchronous, so now `missingForId` contains
1872 // revisions that were not found in the tree
1873 missingForId.forEach(function (rev$$1) {
1874 addToMissing(id, rev$$1);
1875 });
1876 }
1877
1878 ids.forEach(function (id) {
1879 this._getRevisionTree(id, function (err, rev_tree) {
1880 if (err && err.status === 404 && err.message === 'missing') {
1881 missing.set(id, {missing: req[id]});
1882 } else if (err) {
1883 /* istanbul ignore next */
1884 return callback(err);
1885 } else {
1886 processDoc(id, rev_tree);
1887 }
1888
1889 if (++count === ids.length) {
1890 // convert LazyMap to object
1891 var missingObj = {};
1892 missing.forEach(function (value, key) {
1893 missingObj[key] = value;
1894 });
1895 return callback(null, missingObj);
1896 }
1897 });
1898 }, this);
1899 }).bind(this);
1900
1901 // _bulk_get API for faster replication, as described in
1902 // https://github.com/apache/couchdb-chttpd/pull/33
1903 // At the "abstract" level, it will just run multiple get()s in
1904 // parallel, because this isn't much of a performance cost
1905 // for local databases (except the cost of multiple transactions, which is
1906 // small). The http adapter overrides this in order
1907 // to do a more efficient single HTTP request.
1908 this.bulkGet = adapterFun('bulkGet', function (opts, callback) {
1909 bulkGet(this, opts, callback);
1910 }).bind(this);
1911
1912 // compact one document and fire callback
1913 // by compacting we mean removing all revisions which
1914 // are further from the leaf in revision tree than max_height
1915 this.compactDocument = adapterFun('compactDocument', function (docId, maxHeight, callback) {
1916 this._getRevisionTree(docId, (err, revTree) => {
1917 /* istanbul ignore if */
1918 if (err) {
1919 return callback(err);
1920 }
1921 var height = computeHeight(revTree);
1922 var candidates = [];
1923 var revs = [];
1924 Object.keys(height).forEach(function (rev$$1) {
1925 if (height[rev$$1] > maxHeight) {
1926 candidates.push(rev$$1);
1927 }
1928 });
1929
1930 traverseRevTree(revTree, function (isLeaf, pos, revHash, ctx, opts) {
1931 var rev$$1 = pos + '-' + revHash;
1932 if (opts.status === 'available' && candidates.indexOf(rev$$1) !== -1) {
1933 revs.push(rev$$1);
1934 }
1935 });
1936 this._doCompaction(docId, revs, callback);
1937 });
1938 }).bind(this);
1939
1940 // compact the whole database using single document
1941 // compaction
1942 this.compact = adapterFun('compact', function (opts, callback) {
1943 if (typeof opts === 'function') {
1944 callback = opts;
1945 opts = {};
1946 }
1947
1948 opts = opts || {};
1949
1950 this._compactionQueue = this._compactionQueue || [];
1951 this._compactionQueue.push({opts, callback});
1952 if (this._compactionQueue.length === 1) {
1953 doNextCompaction(this);
1954 }
1955 }).bind(this);
1956
1957 /* Begin api wrappers. Specific functionality to storage belongs in the _[method] */
1958 this.get = adapterFun('get', function (id, opts, cb) {
1959 if (typeof opts === 'function') {
1960 cb = opts;
1961 opts = {};
1962 }
1963 opts = opts || {};
1964 if (typeof id !== 'string') {
1965 return cb(createError(INVALID_ID));
1966 }
1967 if (isLocalId(id) && typeof this._getLocal === 'function') {
1968 return this._getLocal(id, cb);
1969 }
1970 var leaves = [];
1971
1972 const finishOpenRevs = () => {
1973 var result = [];
1974 var count = leaves.length;
1975 /* istanbul ignore if */
1976 if (!count) {
1977 return cb(null, result);
1978 }
1979
1980 // order with open_revs is unspecified
1981 leaves.forEach((leaf) => {
1982 this.get(id, {
1983 rev: leaf,
1984 revs: opts.revs,
1985 latest: opts.latest,
1986 attachments: opts.attachments,
1987 binary: opts.binary
1988 }, function (err, doc) {
1989 if (!err) {
1990 // using latest=true can produce duplicates
1991 var existing;
1992 for (var i = 0, l = result.length; i < l; i++) {
1993 if (result[i].ok && result[i].ok._rev === doc._rev) {
1994 existing = true;
1995 break;
1996 }
1997 }
1998 if (!existing) {
1999 result.push({ok: doc});
2000 }
2001 } else {
2002 result.push({missing: leaf});
2003 }
2004 count--;
2005 if (!count) {
2006 cb(null, result);
2007 }
2008 });
2009 });
2010 };
2011
2012 if (opts.open_revs) {
2013 if (opts.open_revs === "all") {
2014 this._getRevisionTree(id, function (err, rev_tree) {
2015 /* istanbul ignore if */
2016 if (err) {
2017 return cb(err);
2018 }
2019 leaves = collectLeaves(rev_tree).map(function (leaf) {
2020 return leaf.rev;
2021 });
2022 finishOpenRevs();
2023 });
2024 } else {
2025 if (Array.isArray(opts.open_revs)) {
2026 leaves = opts.open_revs;
2027 for (var i = 0; i < leaves.length; i++) {
2028 var l = leaves[i];
2029 // looks like it's the only thing couchdb checks
2030 if (!isValidRev(l)) {
2031 return cb(createError(INVALID_REV));
2032 }
2033 }
2034 finishOpenRevs();
2035 } else {
2036 return cb(createError(UNKNOWN_ERROR, 'function_clause'));
2037 }
2038 }
2039 return; // open_revs does not like other options
2040 }
2041
2042 return this._get(id, opts, (err, result) => {
2043 if (err) {
2044 err.docId = id;
2045 return cb(err);
2046 }
2047
2048 var doc = result.doc;
2049 var metadata = result.metadata;
2050 var ctx = result.ctx;
2051
2052 if (opts.conflicts) {
2053 var conflicts = collectConflicts(metadata);
2054 if (conflicts.length) {
2055 doc._conflicts = conflicts;
2056 }
2057 }
2058
2059 if (isDeleted(metadata, doc._rev)) {
2060 doc._deleted = true;
2061 }
2062
2063 if (opts.revs || opts.revs_info) {
2064 var splittedRev = doc._rev.split('-');
2065 var revNo = parseInt(splittedRev[0], 10);
2066 var revHash = splittedRev[1];
2067
2068 var paths = rootToLeaf(metadata.rev_tree);
2069 var path$$1 = null;
2070
2071 for (var i = 0; i < paths.length; i++) {
2072 var currentPath = paths[i];
2073 const hashIndex = currentPath.ids.findIndex(x => x.id === revHash);
2074 var hashFoundAtRevPos = hashIndex === (revNo - 1);
2075
2076 if (hashFoundAtRevPos || (!path$$1 && hashIndex !== -1)) {
2077 path$$1 = currentPath;
2078 }
2079 }
2080
2081 /* istanbul ignore if */
2082 if (!path$$1) {
2083 err = new Error('invalid rev tree');
2084 err.docId = id;
2085 return cb(err);
2086 }
2087
2088 const pathId = doc._rev.split('-')[1];
2089 const indexOfRev = path$$1.ids.findIndex(x => x.id === pathId) + 1;
2090 var howMany = path$$1.ids.length - indexOfRev;
2091 path$$1.ids.splice(indexOfRev, howMany);
2092 path$$1.ids.reverse();
2093
2094 if (opts.revs) {
2095 doc._revisions = {
2096 start: (path$$1.pos + path$$1.ids.length) - 1,
2097 ids: path$$1.ids.map(function (rev$$1) {
2098 return rev$$1.id;
2099 })
2100 };
2101 }
2102 if (opts.revs_info) {
2103 var pos = path$$1.pos + path$$1.ids.length;
2104 doc._revs_info = path$$1.ids.map(function (rev$$1) {
2105 pos--;
2106 return {
2107 rev: pos + '-' + rev$$1.id,
2108 status: rev$$1.opts.status
2109 };
2110 });
2111 }
2112 }
2113
2114 if (opts.attachments && doc._attachments) {
2115 var attachments = doc._attachments;
2116 var count = Object.keys(attachments).length;
2117 if (count === 0) {
2118 return cb(null, doc);
2119 }
2120 Object.keys(attachments).forEach((key) => {
2121 this._getAttachment(doc._id, key, attachments[key], {
2122 binary: opts.binary,
2123 metadata,
2124 ctx
2125 }, function (err, data) {
2126 var att = doc._attachments[key];
2127 att.data = data;
2128 delete att.stub;
2129 delete att.length;
2130 if (!--count) {
2131 cb(null, doc);
2132 }
2133 });
2134 });
2135 } else {
2136 if (doc._attachments) {
2137 for (var key in doc._attachments) {
2138 /* istanbul ignore else */
2139 if (Object.prototype.hasOwnProperty.call(doc._attachments, key)) {
2140 doc._attachments[key].stub = true;
2141 }
2142 }
2143 }
2144 cb(null, doc);
2145 }
2146 });
2147 }).bind(this);
2148
2149 // TODO: I don't like this, it forces an extra read for every
2150 // attachment read and enforces a confusing api between
2151 // adapter.js and the adapter implementation
2152 this.getAttachment = adapterFun('getAttachment', function (docId, attachmentId, opts, callback) {
2153 if (opts instanceof Function) {
2154 callback = opts;
2155 opts = {};
2156 }
2157 this._get(docId, opts, (err, res$$1) => {
2158 if (err) {
2159 return callback(err);
2160 }
2161 if (res$$1.doc._attachments && res$$1.doc._attachments[attachmentId]) {
2162 opts.ctx = res$$1.ctx;
2163 opts.binary = true;
2164 opts.metadata = res$$1.metadata;
2165 this._getAttachment(docId, attachmentId,
2166 res$$1.doc._attachments[attachmentId], opts, callback);
2167 } else {
2168 return callback(createError(MISSING_DOC));
2169 }
2170 });
2171 }).bind(this);
2172
2173 this.allDocs = adapterFun('allDocs', function (opts, callback) {
2174 if (typeof opts === 'function') {
2175 callback = opts;
2176 opts = {};
2177 }
2178 opts.skip = typeof opts.skip !== 'undefined' ? opts.skip : 0;
2179 if (opts.start_key) {
2180 opts.startkey = opts.start_key;
2181 }
2182 if (opts.end_key) {
2183 opts.endkey = opts.end_key;
2184 }
2185 if ('keys' in opts) {
2186 if (!Array.isArray(opts.keys)) {
2187 return callback(new TypeError('options.keys must be an array'));
2188 }
2189 var incompatibleOpt =
2190 ['startkey', 'endkey', 'key'].filter(function (incompatibleOpt) {
2191 return incompatibleOpt in opts;
2192 })[0];
2193 if (incompatibleOpt) {
2194 callback(createError(QUERY_PARSE_ERROR,
2195 'Query parameter `' + incompatibleOpt +
2196 '` is not compatible with multi-get'
2197 ));
2198 return;
2199 }
2200 if (!isRemote(this)) {
2201 allDocsKeysParse(opts);
2202 if (opts.keys.length === 0) {
2203 return this._allDocs({limit: 0}, callback);
2204 }
2205 }
2206 }
2207
2208 return this._allDocs(opts, callback);
2209 }).bind(this);
2210
2211 this.close = adapterFun('close', function (callback) {
2212 this._closed = true;
2213 this.emit('closed');
2214 return this._close(callback);
2215 }).bind(this);
2216
2217 this.info = adapterFun('info', function (callback) {
2218 this._info((err, info) => {
2219 if (err) {
2220 return callback(err);
2221 }
2222 // assume we know better than the adapter, unless it informs us
2223 info.db_name = info.db_name || this.name;
2224 info.auto_compaction = !!(this.auto_compaction && !isRemote(this));
2225 info.adapter = this.adapter;
2226 callback(null, info);
2227 });
2228 }).bind(this);
2229
2230 this.id = adapterFun('id', function (callback) {
2231 return this._id(callback);
2232 }).bind(this);
2233
2234 this.bulkDocs = adapterFun('bulkDocs', function (req, opts, callback) {
2235 if (typeof opts === 'function') {
2236 callback = opts;
2237 opts = {};
2238 }
2239
2240 opts = opts || {};
2241
2242 if (Array.isArray(req)) {
2243 req = {
2244 docs: req
2245 };
2246 }
2247
2248 if (!req || !req.docs || !Array.isArray(req.docs)) {
2249 return callback(createError(MISSING_BULK_DOCS));
2250 }
2251
2252 for (var i = 0; i < req.docs.length; ++i) {
2253 const doc = req.docs[i];
2254 if (isNotSingleDoc(doc)) {
2255 return callback(createError(NOT_AN_OBJECT));
2256 }
2257 if ('_rev' in doc && !isValidRev(doc._rev)) {
2258 return callback(createError(INVALID_REV));
2259 }
2260 }
2261
2262 var attachmentError;
2263 req.docs.forEach(function (doc) {
2264 if (doc._attachments) {
2265 Object.keys(doc._attachments).forEach(function (name) {
2266 attachmentError = attachmentError || attachmentNameError(name);
2267 if (!doc._attachments[name].content_type) {
2268 guardedConsole('warn', 'Attachment', name, 'on document', doc._id, 'is missing content_type');
2269 }
2270 });
2271 }
2272 });
2273
2274 if (attachmentError) {
2275 return callback(createError(BAD_REQUEST, attachmentError));
2276 }
2277
2278 if (!('new_edits' in opts)) {
2279 if ('new_edits' in req) {
2280 opts.new_edits = req.new_edits;
2281 } else {
2282 opts.new_edits = true;
2283 }
2284 }
2285
2286 var adapter = this;
2287 if (!opts.new_edits && !isRemote(adapter)) {
2288 // ensure revisions of the same doc are sorted, so that
2289 // the local adapter processes them correctly (#2935)
2290 req.docs.sort(compareByIdThenRev);
2291 }
2292
2293 cleanDocs(req.docs);
2294
2295 // in the case of conflicts, we want to return the _ids to the user
2296 // however, the underlying adapter may destroy the docs array, so
2297 // create a copy here
2298 var ids = req.docs.map(function (doc) {
2299 return doc._id;
2300 });
2301
2302 this._bulkDocs(req, opts, function (err, res$$1) {
2303 if (err) {
2304 return callback(err);
2305 }
2306 if (!opts.new_edits) {
2307 // this is what couch does when new_edits is false
2308 res$$1 = res$$1.filter(function (x) {
2309 return x.error;
2310 });
2311 }
2312 // add ids for error/conflict responses (not required for CouchDB)
2313 if (!isRemote(adapter)) {
2314 for (var i = 0, l = res$$1.length; i < l; i++) {
2315 res$$1[i].id = res$$1[i].id || ids[i];
2316 }
2317 }
2318
2319 callback(null, res$$1);
2320 });
2321 }).bind(this);
2322
2323 this.registerDependentDatabase = adapterFun('registerDependentDatabase', function (dependentDb, callback) {
2324 var dbOptions = clone(this.__opts);
2325 if (this.__opts.view_adapter) {
2326 dbOptions.adapter = this.__opts.view_adapter;
2327 }
2328
2329 var depDB = new this.constructor(dependentDb, dbOptions);
2330
2331 function diffFun(doc) {
2332 doc.dependentDbs = doc.dependentDbs || {};
2333 if (doc.dependentDbs[dependentDb]) {
2334 return false; // no update required
2335 }
2336 doc.dependentDbs[dependentDb] = true;
2337 return doc;
2338 }
2339 upsert(this, '_local/_pouch_dependentDbs', diffFun).then(function () {
2340 callback(null, {db: depDB});
2341 }).catch(callback);
2342 }).bind(this);
2343
2344 this.destroy = adapterFun('destroy', function (opts, callback) {
2345
2346 if (typeof opts === 'function') {
2347 callback = opts;
2348 opts = {};
2349 }
2350
2351 var usePrefix = 'use_prefix' in this ? this.use_prefix : true;
2352
2353 const destroyDb = () => {
2354 // call destroy method of the particular adaptor
2355 this._destroy(opts, (err, resp) => {
2356 if (err) {
2357 return callback(err);
2358 }
2359 this._destroyed = true;
2360 this.emit('destroyed');
2361 callback(null, resp || { 'ok': true });
2362 });
2363 };
2364
2365 if (isRemote(this)) {
2366 // no need to check for dependent DBs if it's a remote DB
2367 return destroyDb();
2368 }
2369
2370 this.get('_local/_pouch_dependentDbs', (err, localDoc) => {
2371 if (err) {
2372 /* istanbul ignore if */
2373 if (err.status !== 404) {
2374 return callback(err);
2375 } else { // no dependencies
2376 return destroyDb();
2377 }
2378 }
2379 var dependentDbs = localDoc.dependentDbs;
2380 var PouchDB = this.constructor;
2381 var deletedMap = Object.keys(dependentDbs).map((name) => {
2382 // use_prefix is only false in the browser
2383 /* istanbul ignore next */
2384 var trueName = usePrefix ?
2385 name.replace(new RegExp('^' + PouchDB.prefix), '') : name;
2386 return new PouchDB(trueName, this.__opts).destroy();
2387 });
2388 Promise.all(deletedMap).then(destroyDb, callback);
2389 });
2390 }).bind(this);
2391 }
2392
2393 _compact(opts, callback) {
2394 var changesOpts = {
2395 return_docs: false,
2396 last_seq: opts.last_seq || 0,
2397 since: opts.last_seq || 0
2398 };
2399 var promises = [];
2400
2401 var taskId;
2402 var compactedDocs = 0;
2403
2404 const onChange = (row) => {
2405 this.activeTasks.update(taskId, {
2406 completed_items: ++compactedDocs
2407 });
2408 promises.push(this.compactDocument(row.id, 0));
2409 };
2410 const onError = (err) => {
2411 this.activeTasks.remove(taskId, err);
2412 callback(err);
2413 };
2414 const onComplete = (resp) => {
2415 var lastSeq = resp.last_seq;
2416 Promise.all(promises).then(() => {
2417 return upsert(this, '_local/compaction', (doc) => {
2418 if (!doc.last_seq || doc.last_seq < lastSeq) {
2419 doc.last_seq = lastSeq;
2420 return doc;
2421 }
2422 return false; // somebody else got here first, don't update
2423 });
2424 }).then(() => {
2425 this.activeTasks.remove(taskId);
2426 callback(null, {ok: true});
2427 }).catch(onError);
2428 };
2429
2430 this.info().then((info) => {
2431 taskId = this.activeTasks.add({
2432 name: 'database_compaction',
2433 total_items: info.update_seq - changesOpts.last_seq,
2434 });
2435
2436 this.changes(changesOpts)
2437 .on('change', onChange)
2438 .on('complete', onComplete)
2439 .on('error', onError);
2440 });
2441 }
2442
2443 changes(opts, callback) {
2444 if (typeof opts === 'function') {
2445 callback = opts;
2446 opts = {};
2447 }
2448
2449 opts = opts || {};
2450
2451 // By default set return_docs to false if the caller has opts.live = true,
2452 // this will prevent us from collecting the set of changes indefinitely
2453 // resulting in growing memory
2454 opts.return_docs = ('return_docs' in opts) ? opts.return_docs : !opts.live;
2455
2456 return new Changes$1(this, opts, callback);
2457 }
2458
2459 type() {
2460 return (typeof this._type === 'function') ? this._type() : this.adapter;
2461 }
2462}
2463
2464// The abstract purge implementation expects a doc id and the rev of a leaf node in that doc.
2465// It will return errors if the rev doesn’t exist or isn’t a leaf.
2466AbstractPouchDB.prototype.purge = adapterFun('_purge', function (docId, rev$$1, callback) {
2467 if (typeof this._purge === 'undefined') {
2468 return callback(createError(UNKNOWN_ERROR, 'Purge is not implemented in the ' + this.adapter + ' adapter.'));
2469 }
2470 var self = this;
2471
2472 self._getRevisionTree(docId, (error, revs) => {
2473 if (error) {
2474 return callback(error);
2475 }
2476 if (!revs) {
2477 return callback(createError(MISSING_DOC));
2478 }
2479 let path$$1;
2480 try {
2481 path$$1 = findPathToLeaf(revs, rev$$1);
2482 } catch (error) {
2483 return callback(error.message || error);
2484 }
2485 self._purge(docId, path$$1, (error, result) => {
2486 if (error) {
2487 return callback(error);
2488 } else {
2489 appendPurgeSeq(self, docId, rev$$1).then(function () {
2490 return callback(null, result);
2491 });
2492 }
2493 });
2494 });
2495});
2496
2497class TaskQueue {
2498 constructor() {
2499 this.isReady = false;
2500 this.failed = false;
2501 this.queue = [];
2502 }
2503
2504 execute() {
2505 var fun;
2506 if (this.failed) {
2507 while ((fun = this.queue.shift())) {
2508 fun(this.failed);
2509 }
2510 } else {
2511 while ((fun = this.queue.shift())) {
2512 fun();
2513 }
2514 }
2515 }
2516
2517 fail(err) {
2518 this.failed = err;
2519 this.execute();
2520 }
2521
2522 ready(db) {
2523 this.isReady = true;
2524 this.db = db;
2525 this.execute();
2526 }
2527
2528 addTask(fun) {
2529 this.queue.push(fun);
2530 if (this.failed) {
2531 this.execute();
2532 }
2533 }
2534}
2535
2536function parseAdapter(name, opts) {
2537 var match = name.match(/([a-z-]*):\/\/(.*)/);
2538 if (match) {
2539 // the http adapter expects the fully qualified name
2540 return {
2541 name: /https?/.test(match[1]) ? match[1] + '://' + match[2] : match[2],
2542 adapter: match[1]
2543 };
2544 }
2545
2546 var adapters = PouchDB.adapters;
2547 var preferredAdapters = PouchDB.preferredAdapters;
2548 var prefix = PouchDB.prefix;
2549 var adapterName = opts.adapter;
2550
2551 if (!adapterName) { // automatically determine adapter
2552 for (var i = 0; i < preferredAdapters.length; ++i) {
2553 adapterName = preferredAdapters[i];
2554 // check for browsers that have been upgraded from websql-only to websql+idb
2555 /* istanbul ignore if */
2556 if (adapterName === 'idb' && 'websql' in adapters &&
2557 hasLocalStorage() && localStorage['_pouch__websqldb_' + prefix + name]) {
2558 // log it, because this can be confusing during development
2559 guardedConsole('log', 'PouchDB is downgrading "' + name + '" to WebSQL to' +
2560 ' avoid data loss, because it was already opened with WebSQL.');
2561 continue; // keep using websql to avoid user data loss
2562 }
2563 break;
2564 }
2565 }
2566
2567 var adapter = adapters[adapterName];
2568
2569 // if adapter is invalid, then an error will be thrown later
2570 var usePrefix = (adapter && 'use_prefix' in adapter) ?
2571 adapter.use_prefix : true;
2572
2573 return {
2574 name: usePrefix ? (prefix + name) : name,
2575 adapter: adapterName
2576 };
2577}
2578
2579function inherits(A, B) {
2580 A.prototype = Object.create(B.prototype, {
2581 constructor: { value: A }
2582 });
2583}
2584
2585function createClass(parent, init) {
2586 let klass = function (...args) {
2587 if (!(this instanceof klass)) {
2588 return new klass(...args);
2589 }
2590 init.apply(this, args);
2591 };
2592 inherits(klass, parent);
2593 return klass;
2594}
2595
2596// OK, so here's the deal. Consider this code:
2597// var db1 = new PouchDB('foo');
2598// var db2 = new PouchDB('foo');
2599// db1.destroy();
2600// ^ these two both need to emit 'destroyed' events,
2601// as well as the PouchDB constructor itself.
2602// So we have one db object (whichever one got destroy() called on it)
2603// responsible for emitting the initial event, which then gets emitted
2604// by the constructor, which then broadcasts it to any other dbs
2605// that may have been created with the same name.
2606function prepareForDestruction(self) {
2607
2608 function onDestroyed(from_constructor) {
2609 self.removeListener('closed', onClosed);
2610 if (!from_constructor) {
2611 self.constructor.emit('destroyed', self.name);
2612 }
2613 }
2614
2615 function onClosed() {
2616 self.removeListener('destroyed', onDestroyed);
2617 self.constructor.emit('unref', self);
2618 }
2619
2620 self.once('destroyed', onDestroyed);
2621 self.once('closed', onClosed);
2622 self.constructor.emit('ref', self);
2623}
2624
2625class PouchInternal extends AbstractPouchDB {
2626 constructor(name, opts) {
2627 super();
2628 this._setup(name, opts);
2629 }
2630
2631 _setup(name, opts) {
2632 super._setup();
2633 opts = opts || {};
2634
2635 if (name && typeof name === 'object') {
2636 opts = name;
2637 name = opts.name;
2638 delete opts.name;
2639 }
2640
2641 if (opts.deterministic_revs === undefined) {
2642 opts.deterministic_revs = true;
2643 }
2644
2645 this.__opts = opts = clone(opts);
2646
2647 this.auto_compaction = opts.auto_compaction;
2648 this.purged_infos_limit = opts.purged_infos_limit || 1000;
2649 this.prefix = PouchDB.prefix;
2650
2651 if (typeof name !== 'string') {
2652 throw new Error('Missing/invalid DB name');
2653 }
2654
2655 var prefixedName = (opts.prefix || '') + name;
2656 var backend = parseAdapter(prefixedName, opts);
2657
2658 opts.name = backend.name;
2659 opts.adapter = opts.adapter || backend.adapter;
2660
2661 this.name = name;
2662 this._adapter = opts.adapter;
2663 PouchDB.emit('debug', ['adapter', 'Picked adapter: ', opts.adapter]);
2664
2665 if (!PouchDB.adapters[opts.adapter] ||
2666 !PouchDB.adapters[opts.adapter].valid()) {
2667 throw new Error('Invalid Adapter: ' + opts.adapter);
2668 }
2669
2670 if (opts.view_adapter) {
2671 if (!PouchDB.adapters[opts.view_adapter] ||
2672 !PouchDB.adapters[opts.view_adapter].valid()) {
2673 throw new Error('Invalid View Adapter: ' + opts.view_adapter);
2674 }
2675 }
2676
2677 this.taskqueue = new TaskQueue();
2678
2679 this.adapter = opts.adapter;
2680
2681 PouchDB.adapters[opts.adapter].call(this, opts, (err) => {
2682 if (err) {
2683 return this.taskqueue.fail(err);
2684 }
2685 prepareForDestruction(this);
2686
2687 this.emit('created', this);
2688 PouchDB.emit('created', this.name);
2689 this.taskqueue.ready(this);
2690 });
2691 }
2692}
2693
2694const PouchDB = createClass(PouchInternal, function (name, opts) {
2695 PouchInternal.prototype._setup.call(this, name, opts);
2696});
2697
2698var fetch = fetchCookie(nodeFetch__default);
2699
2700class ActiveTasks {
2701 constructor() {
2702 this.tasks = {};
2703 }
2704
2705 list() {
2706 return Object.values(this.tasks);
2707 }
2708
2709 add(task) {
2710 const id = uuid.v4();
2711 this.tasks[id] = {
2712 id,
2713 name: task.name,
2714 total_items: task.total_items,
2715 created_at: new Date().toJSON()
2716 };
2717 return id;
2718 }
2719
2720 get(id) {
2721 return this.tasks[id];
2722 }
2723
2724 /* eslint-disable no-unused-vars */
2725 remove(id, reason) {
2726 delete this.tasks[id];
2727 return this.tasks;
2728 }
2729
2730 update(id, updatedTask) {
2731 const task = this.tasks[id];
2732 if (typeof task !== 'undefined') {
2733 const mergedTask = {
2734 id: task.id,
2735 name: task.name,
2736 created_at: task.created_at,
2737 total_items: updatedTask.total_items || task.total_items,
2738 completed_items: updatedTask.completed_items || task.completed_items,
2739 updated_at: new Date().toJSON()
2740 };
2741 this.tasks[id] = mergedTask;
2742 }
2743 return this.tasks;
2744 }
2745}
2746
2747PouchDB.adapters = {};
2748PouchDB.preferredAdapters = [];
2749
2750PouchDB.prefix = '_pouch_';
2751
2752var eventEmitter = new EE();
2753
2754function setUpEventEmitter(Pouch) {
2755 Object.keys(EE.prototype).forEach(function (key) {
2756 if (typeof EE.prototype[key] === 'function') {
2757 Pouch[key] = eventEmitter[key].bind(eventEmitter);
2758 }
2759 });
2760
2761 // these are created in constructor.js, and allow us to notify each DB with
2762 // the same name that it was destroyed, via the constructor object
2763 var destructListeners = Pouch._destructionListeners = new Map();
2764
2765 Pouch.on('ref', function onConstructorRef(db) {
2766 if (!destructListeners.has(db.name)) {
2767 destructListeners.set(db.name, []);
2768 }
2769 destructListeners.get(db.name).push(db);
2770 });
2771
2772 Pouch.on('unref', function onConstructorUnref(db) {
2773 if (!destructListeners.has(db.name)) {
2774 return;
2775 }
2776 var dbList = destructListeners.get(db.name);
2777 var pos = dbList.indexOf(db);
2778 if (pos < 0) {
2779 /* istanbul ignore next */
2780 return;
2781 }
2782 dbList.splice(pos, 1);
2783 if (dbList.length > 1) {
2784 /* istanbul ignore next */
2785 destructListeners.set(db.name, dbList);
2786 } else {
2787 destructListeners.delete(db.name);
2788 }
2789 });
2790
2791 Pouch.on('destroyed', function onConstructorDestroyed(name) {
2792 if (!destructListeners.has(name)) {
2793 return;
2794 }
2795 var dbList = destructListeners.get(name);
2796 destructListeners.delete(name);
2797 dbList.forEach(function (db) {
2798 db.emit('destroyed',true);
2799 });
2800 });
2801}
2802
2803setUpEventEmitter(PouchDB);
2804
2805PouchDB.adapter = function (id, obj, addToPreferredAdapters) {
2806 /* istanbul ignore else */
2807 if (obj.valid()) {
2808 PouchDB.adapters[id] = obj;
2809 if (addToPreferredAdapters) {
2810 PouchDB.preferredAdapters.push(id);
2811 }
2812 }
2813};
2814
2815PouchDB.plugin = function (obj) {
2816 if (typeof obj === 'function') { // function style for plugins
2817 obj(PouchDB);
2818 } else if (typeof obj !== 'object' || Object.keys(obj).length === 0) {
2819 throw new Error('Invalid plugin: got "' + obj + '", expected an object or a function');
2820 } else {
2821 Object.keys(obj).forEach(function (id) { // object style for plugins
2822 PouchDB.prototype[id] = obj[id];
2823 });
2824 }
2825 if (this.__defaults) {
2826 PouchDB.__defaults = Object.assign({}, this.__defaults);
2827 }
2828 return PouchDB;
2829};
2830
2831PouchDB.defaults = function (defaultOpts) {
2832 let PouchWithDefaults = createClass(PouchDB, function (name, opts) {
2833 opts = opts || {};
2834
2835 if (name && typeof name === 'object') {
2836 opts = name;
2837 name = opts.name;
2838 delete opts.name;
2839 }
2840
2841 opts = Object.assign({}, PouchWithDefaults.__defaults, opts);
2842 PouchDB.call(this, name, opts);
2843 });
2844
2845 PouchWithDefaults.preferredAdapters = PouchDB.preferredAdapters.slice();
2846 Object.keys(PouchDB).forEach(function (key) {
2847 if (!(key in PouchWithDefaults)) {
2848 PouchWithDefaults[key] = PouchDB[key];
2849 }
2850 });
2851
2852 // make default options transitive
2853 // https://github.com/pouchdb/pouchdb/issues/5922
2854 PouchWithDefaults.__defaults = Object.assign({}, this.__defaults, defaultOpts);
2855
2856 return PouchWithDefaults;
2857};
2858
2859PouchDB.fetch = function (url, opts) {
2860 return fetch(url, opts);
2861};
2862
2863PouchDB.prototype.activeTasks = PouchDB.activeTasks = new ActiveTasks();
2864
2865// managed automatically by set-version.js
2866var version = "9.0.0";
2867
2868// this would just be "return doc[field]", but fields
2869// can be "deep" due to dot notation
2870function getFieldFromDoc(doc, parsedField) {
2871 var value = doc;
2872 for (var i = 0, len = parsedField.length; i < len; i++) {
2873 var key = parsedField[i];
2874 value = value[key];
2875 if (!value) {
2876 break;
2877 }
2878 }
2879 return value;
2880}
2881
2882function compare(left, right) {
2883 return left < right ? -1 : left > right ? 1 : 0;
2884}
2885
2886// Converts a string in dot notation to an array of its components, with backslash escaping
2887function parseField(fieldName) {
2888 // fields may be deep (e.g. "foo.bar.baz"), so parse
2889 var fields = [];
2890 var current = '';
2891 for (var i = 0, len = fieldName.length; i < len; i++) {
2892 var ch = fieldName[i];
2893 if (i > 0 && fieldName[i - 1] === '\\' && (ch === '$' || ch === '.')) {
2894 // escaped delimiter
2895 current = current.substring(0, current.length - 1) + ch;
2896 } else if (ch === '.') {
2897 // When `.` is not escaped (above), it is a field delimiter
2898 fields.push(current);
2899 current = '';
2900 } else { // normal character
2901 current += ch;
2902 }
2903 }
2904 fields.push(current);
2905 return fields;
2906}
2907
2908var combinationFields = ['$or', '$nor', '$not'];
2909function isCombinationalField(field) {
2910 return combinationFields.indexOf(field) > -1;
2911}
2912
2913function getKey(obj) {
2914 return Object.keys(obj)[0];
2915}
2916
2917function getValue(obj) {
2918 return obj[getKey(obj)];
2919}
2920
2921
2922// flatten an array of selectors joined by an $and operator
2923function mergeAndedSelectors(selectors) {
2924
2925 // sort to ensure that e.g. if the user specified
2926 // $and: [{$gt: 'a'}, {$gt: 'b'}], then it's collapsed into
2927 // just {$gt: 'b'}
2928 var res$$1 = {};
2929 var first = {$or: true, $nor: true};
2930
2931 selectors.forEach(function (selector) {
2932 Object.keys(selector).forEach(function (field) {
2933 var matcher = selector[field];
2934 if (typeof matcher !== 'object') {
2935 matcher = {$eq: matcher};
2936 }
2937
2938 if (isCombinationalField(field)) {
2939 // or, nor
2940 if (matcher instanceof Array) {
2941 if (first[field]) {
2942 first[field] = false;
2943 res$$1[field] = matcher;
2944 return;
2945 }
2946
2947 var entries = [];
2948 res$$1[field].forEach(function (existing) {
2949 Object.keys(matcher).forEach(function (key) {
2950 var m = matcher[key];
2951 var longest = Math.max(Object.keys(existing).length, Object.keys(m).length);
2952 var merged = mergeAndedSelectors([existing, m]);
2953 if (Object.keys(merged).length <= longest) {
2954 // we have a situation like: (a :{$eq :1} || ...) && (a {$eq: 2} || ...)
2955 // merging would produce a $eq 2 when actually we shouldn't ever match against these merged conditions
2956 // merged should always contain more values to be valid
2957 return;
2958 }
2959 entries.push(merged);
2960 });
2961 });
2962 res$$1[field] = entries;
2963 } else {
2964 // not
2965 res$$1[field] = mergeAndedSelectors([matcher]);
2966 }
2967 } else {
2968 var fieldMatchers = res$$1[field] = res$$1[field] || {};
2969 Object.keys(matcher).forEach(function (operator) {
2970 var value = matcher[operator];
2971
2972 if (operator === '$gt' || operator === '$gte') {
2973 return mergeGtGte(operator, value, fieldMatchers);
2974 } else if (operator === '$lt' || operator === '$lte') {
2975 return mergeLtLte(operator, value, fieldMatchers);
2976 } else if (operator === '$ne') {
2977 return mergeNe(value, fieldMatchers);
2978 } else if (operator === '$eq') {
2979 return mergeEq(value, fieldMatchers);
2980 } else if (operator === "$regex") {
2981 return mergeRegex(value, fieldMatchers);
2982 }
2983 fieldMatchers[operator] = value;
2984 });
2985 }
2986 });
2987 });
2988
2989 return res$$1;
2990}
2991
2992
2993
2994// collapse logically equivalent gt/gte values
2995function mergeGtGte(operator, value, fieldMatchers) {
2996 if (typeof fieldMatchers.$eq !== 'undefined') {
2997 return; // do nothing
2998 }
2999 if (typeof fieldMatchers.$gte !== 'undefined') {
3000 if (operator === '$gte') {
3001 if (value > fieldMatchers.$gte) { // more specificity
3002 fieldMatchers.$gte = value;
3003 }
3004 } else { // operator === '$gt'
3005 if (value >= fieldMatchers.$gte) { // more specificity
3006 delete fieldMatchers.$gte;
3007 fieldMatchers.$gt = value;
3008 }
3009 }
3010 } else if (typeof fieldMatchers.$gt !== 'undefined') {
3011 if (operator === '$gte') {
3012 if (value > fieldMatchers.$gt) { // more specificity
3013 delete fieldMatchers.$gt;
3014 fieldMatchers.$gte = value;
3015 }
3016 } else { // operator === '$gt'
3017 if (value > fieldMatchers.$gt) { // more specificity
3018 fieldMatchers.$gt = value;
3019 }
3020 }
3021 } else {
3022 fieldMatchers[operator] = value;
3023 }
3024}
3025
3026// collapse logically equivalent lt/lte values
3027function mergeLtLte(operator, value, fieldMatchers) {
3028 if (typeof fieldMatchers.$eq !== 'undefined') {
3029 return; // do nothing
3030 }
3031 if (typeof fieldMatchers.$lte !== 'undefined') {
3032 if (operator === '$lte') {
3033 if (value < fieldMatchers.$lte) { // more specificity
3034 fieldMatchers.$lte = value;
3035 }
3036 } else { // operator === '$gt'
3037 if (value <= fieldMatchers.$lte) { // more specificity
3038 delete fieldMatchers.$lte;
3039 fieldMatchers.$lt = value;
3040 }
3041 }
3042 } else if (typeof fieldMatchers.$lt !== 'undefined') {
3043 if (operator === '$lte') {
3044 if (value < fieldMatchers.$lt) { // more specificity
3045 delete fieldMatchers.$lt;
3046 fieldMatchers.$lte = value;
3047 }
3048 } else { // operator === '$gt'
3049 if (value < fieldMatchers.$lt) { // more specificity
3050 fieldMatchers.$lt = value;
3051 }
3052 }
3053 } else {
3054 fieldMatchers[operator] = value;
3055 }
3056}
3057
3058// combine $ne values into one array
3059function mergeNe(value, fieldMatchers) {
3060 if ('$ne' in fieldMatchers) {
3061 // there are many things this could "not" be
3062 fieldMatchers.$ne.push(value);
3063 } else { // doesn't exist yet
3064 fieldMatchers.$ne = [value];
3065 }
3066}
3067
3068// add $eq into the mix
3069function mergeEq(value, fieldMatchers) {
3070 // these all have less specificity than the $eq
3071 // TODO: check for user errors here
3072 delete fieldMatchers.$gt;
3073 delete fieldMatchers.$gte;
3074 delete fieldMatchers.$lt;
3075 delete fieldMatchers.$lte;
3076 delete fieldMatchers.$ne;
3077 fieldMatchers.$eq = value;
3078}
3079
3080// combine $regex values into one array
3081function mergeRegex(value, fieldMatchers) {
3082 if ('$regex' in fieldMatchers) {
3083 // a value could match multiple regexes
3084 fieldMatchers.$regex.push(value);
3085 } else { // doesn't exist yet
3086 fieldMatchers.$regex = [value];
3087 }
3088}
3089
3090//#7458: execute function mergeAndedSelectors on nested $and
3091function mergeAndedSelectorsNested(obj) {
3092 for (var prop in obj) {
3093 if (Array.isArray(obj)) {
3094 for (var i in obj) {
3095 if (obj[i]['$and']) {
3096 obj[i] = mergeAndedSelectors(obj[i]['$and']);
3097 }
3098 }
3099 }
3100 var value = obj[prop];
3101 if (typeof value === 'object') {
3102 mergeAndedSelectorsNested(value); // <- recursive call
3103 }
3104 }
3105 return obj;
3106}
3107
3108//#7458: determine id $and is present in selector (at any level)
3109function isAndInSelector(obj, isAnd) {
3110 for (var prop in obj) {
3111 if (prop === '$and') {
3112 isAnd = true;
3113 }
3114 var value = obj[prop];
3115 if (typeof value === 'object') {
3116 isAnd = isAndInSelector(value, isAnd); // <- recursive call
3117 }
3118 }
3119 return isAnd;
3120}
3121
3122//
3123// normalize the selector
3124//
3125function massageSelector(input) {
3126 var result = clone(input);
3127
3128 //#7458: if $and is present in selector (at any level) merge nested $and
3129 if (isAndInSelector(result, false)) {
3130 result = mergeAndedSelectorsNested(result);
3131 if ('$and' in result) {
3132 result = mergeAndedSelectors(result['$and']);
3133 }
3134 }
3135
3136 ['$or', '$nor'].forEach(function (orOrNor) {
3137 if (orOrNor in result) {
3138 // message each individual selector
3139 // e.g. {foo: 'bar'} becomes {foo: {$eq: 'bar'}}
3140 result[orOrNor].forEach(function (subSelector) {
3141 var fields = Object.keys(subSelector);
3142 for (var i = 0; i < fields.length; i++) {
3143 var field = fields[i];
3144 var matcher = subSelector[field];
3145 if (typeof matcher !== 'object' || matcher === null) {
3146 subSelector[field] = {$eq: matcher};
3147 }
3148 }
3149 });
3150 }
3151 });
3152
3153 if ('$not' in result) {
3154 //This feels a little like forcing, but it will work for now,
3155 //I would like to come back to this and make the merging of selectors a little more generic
3156 result['$not'] = mergeAndedSelectors([result['$not']]);
3157 }
3158
3159 var fields = Object.keys(result);
3160
3161 for (var i = 0; i < fields.length; i++) {
3162 var field = fields[i];
3163 var matcher = result[field];
3164
3165 if (typeof matcher !== 'object' || matcher === null) {
3166 matcher = {$eq: matcher};
3167 }
3168 result[field] = matcher;
3169 }
3170
3171 normalizeArrayOperators(result);
3172
3173 return result;
3174}
3175
3176//
3177// The $ne and $regex values must be placed in an array because these operators can be used multiple times on the same field.
3178// When $and is used, mergeAndedSelectors takes care of putting some of them into arrays, otherwise it's done here.
3179//
3180function normalizeArrayOperators(selector) {
3181 Object.keys(selector).forEach(function (field) {
3182 var matcher = selector[field];
3183
3184 if (Array.isArray(matcher)) {
3185 matcher.forEach(function (matcherItem) {
3186 if (matcherItem && typeof matcherItem === 'object') {
3187 normalizeArrayOperators(matcherItem);
3188 }
3189 });
3190 } else if (field === '$ne') {
3191 selector.$ne = [matcher];
3192 } else if (field === '$regex') {
3193 selector.$regex = [matcher];
3194 } else if (matcher && typeof matcher === 'object') {
3195 normalizeArrayOperators(matcher);
3196 }
3197 });
3198}
3199
3200function pad(str, padWith, upToLength) {
3201 var padding = '';
3202 var targetLength = upToLength - str.length;
3203 /* istanbul ignore next */
3204 while (padding.length < targetLength) {
3205 padding += padWith;
3206 }
3207 return padding;
3208}
3209
3210function padLeft(str, padWith, upToLength) {
3211 var padding = pad(str, padWith, upToLength);
3212 return padding + str;
3213}
3214
3215var MIN_MAGNITUDE = -324; // verified by -Number.MIN_VALUE
3216var MAGNITUDE_DIGITS = 3; // ditto
3217var SEP = ''; // set to '_' for easier debugging
3218
3219function collate(a, b) {
3220
3221 if (a === b) {
3222 return 0;
3223 }
3224
3225 a = normalizeKey(a);
3226 b = normalizeKey(b);
3227
3228 var ai = collationIndex(a);
3229 var bi = collationIndex(b);
3230 if ((ai - bi) !== 0) {
3231 return ai - bi;
3232 }
3233 switch (typeof a) {
3234 case 'number':
3235 return a - b;
3236 case 'boolean':
3237 return a < b ? -1 : 1;
3238 case 'string':
3239 return stringCollate(a, b);
3240 }
3241 return Array.isArray(a) ? arrayCollate(a, b) : objectCollate(a, b);
3242}
3243
3244// couch considers null/NaN/Infinity/-Infinity === undefined,
3245// for the purposes of mapreduce indexes. also, dates get stringified.
3246function normalizeKey(key) {
3247 switch (typeof key) {
3248 case 'undefined':
3249 return null;
3250 case 'number':
3251 if (key === Infinity || key === -Infinity || isNaN(key)) {
3252 return null;
3253 }
3254 return key;
3255 case 'object':
3256 var origKey = key;
3257 if (Array.isArray(key)) {
3258 var len = key.length;
3259 key = new Array(len);
3260 for (var i = 0; i < len; i++) {
3261 key[i] = normalizeKey(origKey[i]);
3262 }
3263 /* istanbul ignore next */
3264 } else if (key instanceof Date) {
3265 return key.toJSON();
3266 } else if (key !== null) { // generic object
3267 key = {};
3268 for (var k in origKey) {
3269 if (Object.prototype.hasOwnProperty.call(origKey, k)) {
3270 var val = origKey[k];
3271 if (typeof val !== 'undefined') {
3272 key[k] = normalizeKey(val);
3273 }
3274 }
3275 }
3276 }
3277 }
3278 return key;
3279}
3280
3281function indexify(key) {
3282 if (key !== null) {
3283 switch (typeof key) {
3284 case 'boolean':
3285 return key ? 1 : 0;
3286 case 'number':
3287 return numToIndexableString(key);
3288 case 'string':
3289 // We've to be sure that key does not contain \u0000
3290 // Do order-preserving replacements:
3291 // 0 -> 1, 1
3292 // 1 -> 1, 2
3293 // 2 -> 2, 2
3294 /* eslint-disable no-control-regex */
3295 return key
3296 .replace(/\u0002/g, '\u0002\u0002')
3297 .replace(/\u0001/g, '\u0001\u0002')
3298 .replace(/\u0000/g, '\u0001\u0001');
3299 /* eslint-enable no-control-regex */
3300 case 'object':
3301 var isArray = Array.isArray(key);
3302 var arr = isArray ? key : Object.keys(key);
3303 var i = -1;
3304 var len = arr.length;
3305 var result = '';
3306 if (isArray) {
3307 while (++i < len) {
3308 result += toIndexableString(arr[i]);
3309 }
3310 } else {
3311 while (++i < len) {
3312 var objKey = arr[i];
3313 result += toIndexableString(objKey) +
3314 toIndexableString(key[objKey]);
3315 }
3316 }
3317 return result;
3318 }
3319 }
3320 return '';
3321}
3322
3323// convert the given key to a string that would be appropriate
3324// for lexical sorting, e.g. within a database, where the
3325// sorting is the same given by the collate() function.
3326function toIndexableString(key) {
3327 var zero = '\u0000';
3328 key = normalizeKey(key);
3329 return collationIndex(key) + SEP + indexify(key) + zero;
3330}
3331
3332function parseNumber(str, i) {
3333 var originalIdx = i;
3334 var num;
3335 var zero = str[i] === '1';
3336 if (zero) {
3337 num = 0;
3338 i++;
3339 } else {
3340 var neg = str[i] === '0';
3341 i++;
3342 var numAsString = '';
3343 var magAsString = str.substring(i, i + MAGNITUDE_DIGITS);
3344 var magnitude = parseInt(magAsString, 10) + MIN_MAGNITUDE;
3345 /* istanbul ignore next */
3346 if (neg) {
3347 magnitude = -magnitude;
3348 }
3349 i += MAGNITUDE_DIGITS;
3350 while (true) {
3351 var ch = str[i];
3352 if (ch === '\u0000') {
3353 break;
3354 } else {
3355 numAsString += ch;
3356 }
3357 i++;
3358 }
3359 numAsString = numAsString.split('.');
3360 if (numAsString.length === 1) {
3361 num = parseInt(numAsString, 10);
3362 } else {
3363 /* istanbul ignore next */
3364 num = parseFloat(numAsString[0] + '.' + numAsString[1]);
3365 }
3366 /* istanbul ignore next */
3367 if (neg) {
3368 num = num - 10;
3369 }
3370 /* istanbul ignore next */
3371 if (magnitude !== 0) {
3372 // parseFloat is more reliable than pow due to rounding errors
3373 // e.g. Number.MAX_VALUE would return Infinity if we did
3374 // num * Math.pow(10, magnitude);
3375 num = parseFloat(num + 'e' + magnitude);
3376 }
3377 }
3378 return {num, length : i - originalIdx};
3379}
3380
3381// move up the stack while parsing
3382// this function moved outside of parseIndexableString for performance
3383function pop(stack, metaStack) {
3384 var obj = stack.pop();
3385
3386 if (metaStack.length) {
3387 var lastMetaElement = metaStack[metaStack.length - 1];
3388 if (obj === lastMetaElement.element) {
3389 // popping a meta-element, e.g. an object whose value is another object
3390 metaStack.pop();
3391 lastMetaElement = metaStack[metaStack.length - 1];
3392 }
3393 var element = lastMetaElement.element;
3394 var lastElementIndex = lastMetaElement.index;
3395 if (Array.isArray(element)) {
3396 element.push(obj);
3397 } else if (lastElementIndex === stack.length - 2) { // obj with key+value
3398 var key = stack.pop();
3399 element[key] = obj;
3400 } else {
3401 stack.push(obj); // obj with key only
3402 }
3403 }
3404}
3405
3406function parseIndexableString(str) {
3407 var stack = [];
3408 var metaStack = []; // stack for arrays and objects
3409 var i = 0;
3410
3411 /*eslint no-constant-condition: ["error", { "checkLoops": false }]*/
3412 while (true) {
3413 var collationIndex = str[i++];
3414 if (collationIndex === '\u0000') {
3415 if (stack.length === 1) {
3416 return stack.pop();
3417 } else {
3418 pop(stack, metaStack);
3419 continue;
3420 }
3421 }
3422 switch (collationIndex) {
3423 case '1':
3424 stack.push(null);
3425 break;
3426 case '2':
3427 stack.push(str[i] === '1');
3428 i++;
3429 break;
3430 case '3':
3431 var parsedNum = parseNumber(str, i);
3432 stack.push(parsedNum.num);
3433 i += parsedNum.length;
3434 break;
3435 case '4':
3436 var parsedStr = '';
3437 /*eslint no-constant-condition: ["error", { "checkLoops": false }]*/
3438 while (true) {
3439 var ch = str[i];
3440 if (ch === '\u0000') {
3441 break;
3442 }
3443 parsedStr += ch;
3444 i++;
3445 }
3446 // perform the reverse of the order-preserving replacement
3447 // algorithm (see above)
3448 /* eslint-disable no-control-regex */
3449 parsedStr = parsedStr.replace(/\u0001\u0001/g, '\u0000')
3450 .replace(/\u0001\u0002/g, '\u0001')
3451 .replace(/\u0002\u0002/g, '\u0002');
3452 /* eslint-enable no-control-regex */
3453 stack.push(parsedStr);
3454 break;
3455 case '5':
3456 var arrayElement = { element: [], index: stack.length };
3457 stack.push(arrayElement.element);
3458 metaStack.push(arrayElement);
3459 break;
3460 case '6':
3461 var objElement = { element: {}, index: stack.length };
3462 stack.push(objElement.element);
3463 metaStack.push(objElement);
3464 break;
3465 /* istanbul ignore next */
3466 default:
3467 throw new Error(
3468 'bad collationIndex or unexpectedly reached end of input: ' +
3469 collationIndex);
3470 }
3471 }
3472}
3473
3474function arrayCollate(a, b) {
3475 var len = Math.min(a.length, b.length);
3476 for (var i = 0; i < len; i++) {
3477 var sort = collate(a[i], b[i]);
3478 if (sort !== 0) {
3479 return sort;
3480 }
3481 }
3482 return (a.length === b.length) ? 0 :
3483 (a.length > b.length) ? 1 : -1;
3484}
3485function stringCollate(a, b) {
3486 // See: https://github.com/daleharvey/pouchdb/issues/40
3487 // This is incompatible with the CouchDB implementation, but its the
3488 // best we can do for now
3489 return (a === b) ? 0 : ((a > b) ? 1 : -1);
3490}
3491function objectCollate(a, b) {
3492 var ak = Object.keys(a), bk = Object.keys(b);
3493 var len = Math.min(ak.length, bk.length);
3494 for (var i = 0; i < len; i++) {
3495 // First sort the keys
3496 var sort = collate(ak[i], bk[i]);
3497 if (sort !== 0) {
3498 return sort;
3499 }
3500 // if the keys are equal sort the values
3501 sort = collate(a[ak[i]], b[bk[i]]);
3502 if (sort !== 0) {
3503 return sort;
3504 }
3505
3506 }
3507 return (ak.length === bk.length) ? 0 :
3508 (ak.length > bk.length) ? 1 : -1;
3509}
3510// The collation is defined by erlangs ordered terms
3511// the atoms null, true, false come first, then numbers, strings,
3512// arrays, then objects
3513// null/undefined/NaN/Infinity/-Infinity are all considered null
3514function collationIndex(x) {
3515 var id = ['boolean', 'number', 'string', 'object'];
3516 var idx = id.indexOf(typeof x);
3517 //false if -1 otherwise true, but fast!!!!1
3518 if (~idx) {
3519 if (x === null) {
3520 return 1;
3521 }
3522 if (Array.isArray(x)) {
3523 return 5;
3524 }
3525 return idx < 3 ? (idx + 2) : (idx + 3);
3526 }
3527 /* istanbul ignore next */
3528 if (Array.isArray(x)) {
3529 return 5;
3530 }
3531}
3532
3533// conversion:
3534// x yyy zz...zz
3535// x = 0 for negative, 1 for 0, 2 for positive
3536// y = exponent (for negative numbers negated) moved so that it's >= 0
3537// z = mantisse
3538function numToIndexableString(num) {
3539
3540 if (num === 0) {
3541 return '1';
3542 }
3543
3544 // convert number to exponential format for easier and
3545 // more succinct string sorting
3546 var expFormat = num.toExponential().split(/e\+?/);
3547 var magnitude = parseInt(expFormat[1], 10);
3548
3549 var neg = num < 0;
3550
3551 var result = neg ? '0' : '2';
3552
3553 // first sort by magnitude
3554 // it's easier if all magnitudes are positive
3555 var magForComparison = ((neg ? -magnitude : magnitude) - MIN_MAGNITUDE);
3556 var magString = padLeft((magForComparison).toString(), '0', MAGNITUDE_DIGITS);
3557
3558 result += SEP + magString;
3559
3560 // then sort by the factor
3561 var factor = Math.abs(parseFloat(expFormat[0])); // [1..10)
3562 /* istanbul ignore next */
3563 if (neg) { // for negative reverse ordering
3564 factor = 10 - factor;
3565 }
3566
3567 var factorStr = factor.toFixed(20);
3568
3569 // strip zeros from the end
3570 factorStr = factorStr.replace(/\.?0+$/, '');
3571
3572 result += SEP + factorStr;
3573
3574 return result;
3575}
3576
3577// create a comparator based on the sort object
3578function createFieldSorter(sort) {
3579
3580 function getFieldValuesAsArray(doc) {
3581 return sort.map(function (sorting) {
3582 var fieldName = getKey(sorting);
3583 var parsedField = parseField(fieldName);
3584 var docFieldValue = getFieldFromDoc(doc, parsedField);
3585 return docFieldValue;
3586 });
3587 }
3588
3589 return function (aRow, bRow) {
3590 var aFieldValues = getFieldValuesAsArray(aRow.doc);
3591 var bFieldValues = getFieldValuesAsArray(bRow.doc);
3592 var collation = collate(aFieldValues, bFieldValues);
3593 if (collation !== 0) {
3594 return collation;
3595 }
3596 // this is what mango seems to do
3597 return compare(aRow.doc._id, bRow.doc._id);
3598 };
3599}
3600
3601function filterInMemoryFields(rows, requestDef, inMemoryFields) {
3602 rows = rows.filter(function (row) {
3603 return rowFilter(row.doc, requestDef.selector, inMemoryFields);
3604 });
3605
3606 if (requestDef.sort) {
3607 // in-memory sort
3608 var fieldSorter = createFieldSorter(requestDef.sort);
3609 rows = rows.sort(fieldSorter);
3610 if (typeof requestDef.sort[0] !== 'string' &&
3611 getValue(requestDef.sort[0]) === 'desc') {
3612 rows = rows.reverse();
3613 }
3614 }
3615
3616 if ('limit' in requestDef || 'skip' in requestDef) {
3617 // have to do the limit in-memory
3618 var skip = requestDef.skip || 0;
3619 var limit = ('limit' in requestDef ? requestDef.limit : rows.length) + skip;
3620 rows = rows.slice(skip, limit);
3621 }
3622 return rows;
3623}
3624
3625function rowFilter(doc, selector, inMemoryFields) {
3626 return inMemoryFields.every(function (field) {
3627 var matcher = selector[field];
3628 var parsedField = parseField(field);
3629 var docFieldValue = getFieldFromDoc(doc, parsedField);
3630 if (isCombinationalField(field)) {
3631 return matchCominationalSelector(field, matcher, doc);
3632 }
3633
3634 return matchSelector(matcher, doc, parsedField, docFieldValue);
3635 });
3636}
3637
3638function matchSelector(matcher, doc, parsedField, docFieldValue) {
3639 if (!matcher) {
3640 // no filtering necessary; this field is just needed for sorting
3641 return true;
3642 }
3643
3644 // is matcher an object, if so continue recursion
3645 if (typeof matcher === 'object') {
3646 return Object.keys(matcher).every(function (maybeUserOperator) {
3647 var userValue = matcher[ maybeUserOperator ];
3648 // explicit operator
3649 if (maybeUserOperator.indexOf("$") === 0) {
3650 return match(maybeUserOperator, doc, userValue, parsedField, docFieldValue);
3651 } else {
3652 var subParsedField = parseField(maybeUserOperator);
3653
3654 if (
3655 docFieldValue === undefined &&
3656 typeof userValue !== "object" &&
3657 subParsedField.length > 0
3658 ) {
3659 // the field does not exist, return or getFieldFromDoc will throw
3660 return false;
3661 }
3662
3663 var subDocFieldValue = getFieldFromDoc(docFieldValue, subParsedField);
3664
3665 if (typeof userValue === "object") {
3666 // field value is an object that might contain more operators
3667 return matchSelector(userValue, doc, parsedField, subDocFieldValue);
3668 }
3669
3670 // implicit operator
3671 return match("$eq", doc, userValue, subParsedField, subDocFieldValue);
3672 }
3673 });
3674 }
3675
3676 // no more depth, No need to recurse further
3677 return matcher === docFieldValue;
3678}
3679
3680function matchCominationalSelector(field, matcher, doc) {
3681
3682 if (field === '$or') {
3683 return matcher.some(function (orMatchers) {
3684 return rowFilter(doc, orMatchers, Object.keys(orMatchers));
3685 });
3686 }
3687
3688 if (field === '$not') {
3689 return !rowFilter(doc, matcher, Object.keys(matcher));
3690 }
3691
3692 //`$nor`
3693 return !matcher.find(function (orMatchers) {
3694 return rowFilter(doc, orMatchers, Object.keys(orMatchers));
3695 });
3696
3697}
3698
3699function match(userOperator, doc, userValue, parsedField, docFieldValue) {
3700 if (!matchers[userOperator]) {
3701 /* istanbul ignore next */
3702 throw new Error('unknown operator "' + userOperator +
3703 '" - should be one of $eq, $lte, $lt, $gt, $gte, $exists, $ne, $in, ' +
3704 '$nin, $size, $mod, $regex, $elemMatch, $type, $allMatch or $all');
3705 }
3706 return matchers[userOperator](doc, userValue, parsedField, docFieldValue);
3707}
3708
3709function fieldExists(docFieldValue) {
3710 return typeof docFieldValue !== 'undefined' && docFieldValue !== null;
3711}
3712
3713function fieldIsNotUndefined(docFieldValue) {
3714 return typeof docFieldValue !== 'undefined';
3715}
3716
3717function modField(docFieldValue, userValue) {
3718 if (typeof docFieldValue !== "number" ||
3719 parseInt(docFieldValue, 10) !== docFieldValue) {
3720 return false;
3721 }
3722
3723 var divisor = userValue[0];
3724 var mod = userValue[1];
3725
3726 return docFieldValue % divisor === mod;
3727}
3728
3729function arrayContainsValue(docFieldValue, userValue) {
3730 return userValue.some(function (val) {
3731 if (docFieldValue instanceof Array) {
3732 return docFieldValue.some(function (docFieldValueItem) {
3733 return collate(val, docFieldValueItem) === 0;
3734 });
3735 }
3736
3737 return collate(val, docFieldValue) === 0;
3738 });
3739}
3740
3741function arrayContainsAllValues(docFieldValue, userValue) {
3742 return userValue.every(function (val) {
3743 return docFieldValue.some(function (docFieldValueItem) {
3744 return collate(val, docFieldValueItem) === 0;
3745 });
3746 });
3747}
3748
3749function arraySize(docFieldValue, userValue) {
3750 return docFieldValue.length === userValue;
3751}
3752
3753function regexMatch(docFieldValue, userValue) {
3754 var re = new RegExp(userValue);
3755
3756 return re.test(docFieldValue);
3757}
3758
3759function typeMatch(docFieldValue, userValue) {
3760
3761 switch (userValue) {
3762 case 'null':
3763 return docFieldValue === null;
3764 case 'boolean':
3765 return typeof (docFieldValue) === 'boolean';
3766 case 'number':
3767 return typeof (docFieldValue) === 'number';
3768 case 'string':
3769 return typeof (docFieldValue) === 'string';
3770 case 'array':
3771 return docFieldValue instanceof Array;
3772 case 'object':
3773 return ({}).toString.call(docFieldValue) === '[object Object]';
3774 }
3775}
3776
3777var matchers = {
3778
3779 '$elemMatch': function (doc, userValue, parsedField, docFieldValue) {
3780 if (!Array.isArray(docFieldValue)) {
3781 return false;
3782 }
3783
3784 if (docFieldValue.length === 0) {
3785 return false;
3786 }
3787
3788 if (typeof docFieldValue[0] === 'object' && docFieldValue[0] !== null) {
3789 return docFieldValue.some(function (val) {
3790 return rowFilter(val, userValue, Object.keys(userValue));
3791 });
3792 }
3793
3794 return docFieldValue.some(function (val) {
3795 return matchSelector(userValue, doc, parsedField, val);
3796 });
3797 },
3798
3799 '$allMatch': function (doc, userValue, parsedField, docFieldValue) {
3800 if (!Array.isArray(docFieldValue)) {
3801 return false;
3802 }
3803
3804 /* istanbul ignore next */
3805 if (docFieldValue.length === 0) {
3806 return false;
3807 }
3808
3809 if (typeof docFieldValue[0] === 'object' && docFieldValue[0] !== null) {
3810 return docFieldValue.every(function (val) {
3811 return rowFilter(val, userValue, Object.keys(userValue));
3812 });
3813 }
3814
3815 return docFieldValue.every(function (val) {
3816 return matchSelector(userValue, doc, parsedField, val);
3817 });
3818 },
3819
3820 '$eq': function (doc, userValue, parsedField, docFieldValue) {
3821 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) === 0;
3822 },
3823
3824 '$gte': function (doc, userValue, parsedField, docFieldValue) {
3825 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) >= 0;
3826 },
3827
3828 '$gt': function (doc, userValue, parsedField, docFieldValue) {
3829 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) > 0;
3830 },
3831
3832 '$lte': function (doc, userValue, parsedField, docFieldValue) {
3833 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) <= 0;
3834 },
3835
3836 '$lt': function (doc, userValue, parsedField, docFieldValue) {
3837 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) < 0;
3838 },
3839
3840 '$exists': function (doc, userValue, parsedField, docFieldValue) {
3841 //a field that is null is still considered to exist
3842 if (userValue) {
3843 return fieldIsNotUndefined(docFieldValue);
3844 }
3845
3846 return !fieldIsNotUndefined(docFieldValue);
3847 },
3848
3849 '$mod': function (doc, userValue, parsedField, docFieldValue) {
3850 return fieldExists(docFieldValue) && modField(docFieldValue, userValue);
3851 },
3852
3853 '$ne': function (doc, userValue, parsedField, docFieldValue) {
3854 return userValue.every(function (neValue) {
3855 return collate(docFieldValue, neValue) !== 0;
3856 });
3857 },
3858 '$in': function (doc, userValue, parsedField, docFieldValue) {
3859 return fieldExists(docFieldValue) && arrayContainsValue(docFieldValue, userValue);
3860 },
3861
3862 '$nin': function (doc, userValue, parsedField, docFieldValue) {
3863 return fieldExists(docFieldValue) && !arrayContainsValue(docFieldValue, userValue);
3864 },
3865
3866 '$size': function (doc, userValue, parsedField, docFieldValue) {
3867 return fieldExists(docFieldValue) &&
3868 Array.isArray(docFieldValue) &&
3869 arraySize(docFieldValue, userValue);
3870 },
3871
3872 '$all': function (doc, userValue, parsedField, docFieldValue) {
3873 return Array.isArray(docFieldValue) && arrayContainsAllValues(docFieldValue, userValue);
3874 },
3875
3876 '$regex': function (doc, userValue, parsedField, docFieldValue) {
3877 return fieldExists(docFieldValue) &&
3878 typeof docFieldValue == "string" &&
3879 userValue.every(function (regexValue) {
3880 return regexMatch(docFieldValue, regexValue);
3881 });
3882 },
3883
3884 '$type': function (doc, userValue, parsedField, docFieldValue) {
3885 return typeMatch(docFieldValue, userValue);
3886 }
3887};
3888
3889// return true if the given doc matches the supplied selector
3890function matchesSelector(doc, selector) {
3891 /* istanbul ignore if */
3892 if (typeof selector !== 'object') {
3893 // match the CouchDB error message
3894 throw new Error('Selector error: expected a JSON object');
3895 }
3896
3897 selector = massageSelector(selector);
3898 var row = {
3899 doc
3900 };
3901
3902 var rowsMatched = filterInMemoryFields([row], { selector }, Object.keys(selector));
3903 return rowsMatched && rowsMatched.length === 1;
3904}
3905
3906function evalFilter(input) {
3907 var code = '(function() {\n"use strict";\nreturn ' + input + '\n})()';
3908
3909 return vm.runInNewContext(code);
3910}
3911
3912function evalView(input) {
3913 var code = [
3914 '"use strict";',
3915 'var emitted = false;',
3916 'var emit = function (a, b) {',
3917 ' emitted = true;',
3918 '};',
3919 'var view = ' + input + ';',
3920 'view(doc);',
3921 'if (emitted) {',
3922 ' return true;',
3923 '}'
3924 ].join('\n');
3925
3926 return vm.runInNewContext('(function(doc) {\n' + code + '\n})');
3927}
3928
3929function validate(opts, callback) {
3930 if (opts.selector) {
3931 if (opts.filter && opts.filter !== '_selector') {
3932 var filterName = typeof opts.filter === 'string' ?
3933 opts.filter : 'function';
3934 return callback(new Error('selector invalid for filter "' + filterName + '"'));
3935 }
3936 }
3937 callback();
3938}
3939
3940function normalize(opts) {
3941 if (opts.view && !opts.filter) {
3942 opts.filter = '_view';
3943 }
3944
3945 if (opts.selector && !opts.filter) {
3946 opts.filter = '_selector';
3947 }
3948
3949 if (opts.filter && typeof opts.filter === 'string') {
3950 if (opts.filter === '_view') {
3951 opts.view = normalizeDesignDocFunctionName(opts.view);
3952 } else {
3953 opts.filter = normalizeDesignDocFunctionName(opts.filter);
3954 }
3955 }
3956}
3957
3958function shouldFilter(changesHandler, opts) {
3959 return opts.filter && typeof opts.filter === 'string' &&
3960 !opts.doc_ids && !isRemote(changesHandler.db);
3961}
3962
3963function filter(changesHandler, opts) {
3964 var callback = opts.complete;
3965 if (opts.filter === '_view') {
3966 if (!opts.view || typeof opts.view !== 'string') {
3967 var err = createError(BAD_REQUEST,
3968 '`view` filter parameter not found or invalid.');
3969 return callback(err);
3970 }
3971 // fetch a view from a design doc, make it behave like a filter
3972 var viewName = parseDesignDocFunctionName(opts.view);
3973 changesHandler.db.get('_design/' + viewName[0], function (err, ddoc) {
3974 /* istanbul ignore if */
3975 if (changesHandler.isCancelled) {
3976 return callback(null, {status: 'cancelled'});
3977 }
3978 /* istanbul ignore next */
3979 if (err) {
3980 return callback(generateErrorFromResponse(err));
3981 }
3982 var mapFun = ddoc && ddoc.views && ddoc.views[viewName[1]] &&
3983 ddoc.views[viewName[1]].map;
3984 if (!mapFun) {
3985 return callback(createError(MISSING_DOC,
3986 (ddoc.views ? 'missing json key: ' + viewName[1] :
3987 'missing json key: views')));
3988 }
3989 opts.filter = evalView(mapFun);
3990 changesHandler.doChanges(opts);
3991 });
3992 } else if (opts.selector) {
3993 opts.filter = function (doc) {
3994 return matchesSelector(doc, opts.selector);
3995 };
3996 changesHandler.doChanges(opts);
3997 } else {
3998 // fetch a filter from a design doc
3999 var filterName = parseDesignDocFunctionName(opts.filter);
4000 changesHandler.db.get('_design/' + filterName[0], function (err, ddoc) {
4001 /* istanbul ignore if */
4002 if (changesHandler.isCancelled) {
4003 return callback(null, {status: 'cancelled'});
4004 }
4005 /* istanbul ignore next */
4006 if (err) {
4007 return callback(generateErrorFromResponse(err));
4008 }
4009 var filterFun = ddoc && ddoc.filters && ddoc.filters[filterName[1]];
4010 if (!filterFun) {
4011 return callback(createError(MISSING_DOC,
4012 ((ddoc && ddoc.filters) ? 'missing json key: ' + filterName[1]
4013 : 'missing json key: filters')));
4014 }
4015 opts.filter = evalFilter(filterFun);
4016 changesHandler.doChanges(opts);
4017 });
4018 }
4019}
4020
4021function applyChangesFilterPlugin(PouchDB) {
4022 PouchDB._changesFilterPlugin = {
4023 validate,
4024 normalize,
4025 shouldFilter,
4026 filter
4027 };
4028}
4029
4030// TODO: remove from pouchdb-core (breaking)
4031PouchDB.plugin(applyChangesFilterPlugin);
4032
4033PouchDB.version = version;
4034
4035function isFunction(f) {
4036 return 'function' === typeof f;
4037}
4038
4039function getPrefix(db) {
4040 if (isFunction(db.prefix)) {
4041 return db.prefix();
4042 }
4043 return db;
4044}
4045
4046function clone$1(_obj) {
4047 var obj = {};
4048 for (var k in _obj) {
4049 obj[k] = _obj[k];
4050 }
4051 return obj;
4052}
4053
4054function nut(db, precodec, codec) {
4055 function encodePrefix(prefix, key, opts1, opts2) {
4056 return precodec.encode([ prefix, codec.encodeKey(key, opts1, opts2 ) ]);
4057 }
4058
4059 function addEncodings(op, prefix) {
4060 if (prefix && prefix.options) {
4061 op.keyEncoding =
4062 op.keyEncoding || prefix.options.keyEncoding;
4063 op.valueEncoding =
4064 op.valueEncoding || prefix.options.valueEncoding;
4065 }
4066 return op;
4067 }
4068
4069 db.open(function () { /* no-op */});
4070
4071 return {
4072 apply: function (ops, opts, cb) {
4073 opts = opts || {};
4074
4075 var batch = [];
4076 var i = -1;
4077 var len = ops.length;
4078
4079 while (++i < len) {
4080 var op = ops[i];
4081 addEncodings(op, op.prefix);
4082 op.prefix = getPrefix(op.prefix);
4083 batch.push({
4084 key: encodePrefix(op.prefix, op.key, opts, op),
4085 value: op.type !== 'del' && codec.encodeValue(op.value, opts, op),
4086 type: op.type
4087 });
4088 }
4089 db.db.batch(batch, opts, cb);
4090 },
4091 get: function (key, prefix, opts, cb) {
4092 opts.asBuffer = codec.valueAsBuffer(opts);
4093 return db.db.get(
4094 encodePrefix(prefix, key, opts),
4095 opts,
4096 function (err, value) {
4097 if (err) {
4098 cb(err);
4099 } else {
4100 cb(null, codec.decodeValue(value, opts));
4101 }
4102 }
4103 );
4104 },
4105 createDecoder: function (opts) {
4106 return function (key, value) {
4107 return {
4108 key: codec.decodeKey(precodec.decode(key)[1], opts),
4109 value: codec.decodeValue(value, opts)
4110 };
4111 };
4112 },
4113 isClosed: function isClosed() {
4114 return db.isClosed();
4115 },
4116 close: function close(cb) {
4117 return db.close(cb);
4118 },
4119 iterator: function (_opts) {
4120 var opts = clone$1(_opts || {});
4121 var prefix = _opts.prefix || [];
4122
4123 function encodeKey(key) {
4124 return encodePrefix(prefix, key, opts, {});
4125 }
4126
4127 ltgt.toLtgt(_opts, opts, encodeKey, precodec.lowerBound, precodec.upperBound);
4128
4129 // if these legacy values are in the options, remove them
4130
4131 opts.prefix = null;
4132
4133 //************************************************
4134 //hard coded defaults, for now...
4135 //TODO: pull defaults and encoding out of levelup.
4136 opts.keyAsBuffer = opts.valueAsBuffer = false;
4137 //************************************************
4138
4139
4140 //this is vital, otherwise limit: undefined will
4141 //create an empty stream.
4142 /* istanbul ignore next */
4143 if ('number' !== typeof opts.limit) {
4144 opts.limit = -1;
4145 }
4146
4147 opts.keyAsBuffer = precodec.buffer;
4148 opts.valueAsBuffer = codec.valueAsBuffer(opts);
4149
4150 function wrapIterator(iterator) {
4151 return {
4152 next: function (cb) {
4153 return iterator.next(cb);
4154 },
4155 end: function (cb) {
4156 iterator.end(cb);
4157 }
4158 };
4159 }
4160
4161 return wrapIterator(db.db.iterator(opts));
4162 }
4163 };
4164}
4165
4166class NotFoundError extends Error {
4167 constructor() {
4168 super();
4169 this.name = 'NotFoundError';
4170 }
4171}
4172
4173var EventEmitter = EE.EventEmitter;
4174var version$1 = "6.5.4";
4175
4176var NOT_FOUND_ERROR = new NotFoundError();
4177
4178var sublevel = function (nut, prefix, createStream, options) {
4179 var emitter = new EventEmitter();
4180 emitter.sublevels = {};
4181 emitter.options = options;
4182
4183 emitter.version = version$1;
4184
4185 emitter.methods = {};
4186 prefix = prefix || [];
4187
4188 function mergeOpts(opts) {
4189 var o = {};
4190 var k;
4191 if (options) {
4192 for (k in options) {
4193 if (typeof options[k] !== 'undefined') {
4194 o[k] = options[k];
4195 }
4196 }
4197 }
4198 if (opts) {
4199 for (k in opts) {
4200 if (typeof opts[k] !== 'undefined') {
4201 o[k] = opts[k];
4202 }
4203 }
4204 }
4205 return o;
4206 }
4207
4208 emitter.put = function (key, value, opts, cb) {
4209 if ('function' === typeof opts) {
4210 cb = opts;
4211 opts = {};
4212 }
4213
4214 nut.apply([{
4215 key, value,
4216 prefix: prefix.slice(), type: 'put'
4217 }], mergeOpts(opts), function (err) {
4218 /* istanbul ignore next */
4219 if (err) {
4220 return cb(err);
4221 }
4222 emitter.emit('put', key, value);
4223 cb(null);
4224 });
4225 };
4226
4227 emitter.prefix = function () {
4228 return prefix.slice();
4229 };
4230
4231 emitter.batch = function (ops, opts, cb) {
4232 if ('function' === typeof opts) {
4233 cb = opts;
4234 opts = {};
4235 }
4236
4237 ops = ops.map(function (op) {
4238 return {
4239 key: op.key,
4240 value: op.value,
4241 prefix: op.prefix || prefix,
4242 keyEncoding: op.keyEncoding, // *
4243 valueEncoding: op.valueEncoding, // * (TODO: encodings on sublevel)
4244 type: op.type
4245 };
4246 });
4247
4248 nut.apply(ops, mergeOpts(opts), function (err) {
4249 /* istanbul ignore next */
4250 if (err) {
4251 return cb(err);
4252 }
4253 emitter.emit('batch', ops);
4254 cb(null);
4255 });
4256 };
4257
4258 emitter.get = function (key, opts, cb) {
4259 /* istanbul ignore else */
4260 if ('function' === typeof opts) {
4261 cb = opts;
4262 opts = {};
4263 }
4264 nut.get(key, prefix, mergeOpts(opts), function (err, value) {
4265 if (err) {
4266 cb(NOT_FOUND_ERROR);
4267 } else {
4268 cb(null, value);
4269 }
4270 });
4271 };
4272
4273 emitter.sublevel = function (name, opts) {
4274 return emitter.sublevels[name] =
4275 emitter.sublevels[name] || sublevel(nut, prefix.concat(name), createStream, mergeOpts(opts));
4276 };
4277
4278 emitter.readStream = emitter.createReadStream = function (opts) {
4279 opts = mergeOpts(opts);
4280 opts.prefix = prefix;
4281 var stream;
4282 var it = nut.iterator(opts);
4283
4284 stream = createStream(opts, nut.createDecoder(opts));
4285 stream.setIterator(it);
4286
4287 return stream;
4288 };
4289
4290 emitter.close = function (cb) {
4291 nut.close(cb);
4292 };
4293
4294 emitter.isOpen = nut.isOpen;
4295 emitter.isClosed = nut.isClosed;
4296
4297 return emitter;
4298};
4299
4300/* Copyright (c) 2012-2014 LevelUP contributors
4301 * See list at <https://github.com/rvagg/node-levelup#contributing>
4302 * MIT License <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md>
4303 */
4304
4305var Readable = ReadableStreamCore.Readable;
4306
4307function createClass$1(parent, init) {
4308 let klass = function (...args) {
4309 if (!(this instanceof klass)) {
4310 return new klass(...args);
4311 }
4312 init.apply(this, args);
4313 };
4314 klass.prototype = Object.create(parent.prototype, {
4315 constructor: { value: klass }
4316 });
4317 return klass;
4318}
4319
4320class ReadStreamInternal extends Readable {
4321 constructor(options, makeData) {
4322 super({ objectMode: true, highWaterMark: options.highWaterMark });
4323 this._setup(options, makeData);
4324 }
4325
4326 _setup(options, makeData) {
4327 super.constructor({ objectMode: true, highWaterMark: options.highWaterMark });
4328
4329 // purely to keep `db` around until we're done so it's not GCed if the user doesn't keep a ref
4330 this._waiting = false;
4331 this._options = options;
4332 this._makeData = makeData;
4333 }
4334
4335 setIterator(it) {
4336 this._iterator = it;
4337 /* istanbul ignore if */
4338 if (this._destroyed) {
4339 return it.end(function () {});
4340 }
4341 /* istanbul ignore if */
4342 if (this._waiting) {
4343 this._waiting = false;
4344 return this._read();
4345 }
4346 return this;
4347 }
4348
4349 _cleanup(err) {
4350 if (this._destroyed) {
4351 return;
4352 }
4353
4354 this._destroyed = true;
4355
4356 var self = this;
4357 /* istanbul ignore if */
4358 if (err && err.message !== 'iterator has ended') {
4359 self.emit('error', err);
4360 }
4361
4362 /* istanbul ignore else */
4363 if (self._iterator) {
4364 self._iterator.end(function () {
4365 self._iterator = null;
4366 self.emit('close');
4367 });
4368 } else {
4369 self.emit('close');
4370 }
4371 }
4372
4373 destroy() {
4374 this._cleanup();
4375 }
4376
4377 _read() {
4378 var self = this;
4379 /* istanbul ignore if */
4380 if (self._destroyed) {
4381 return;
4382 }
4383 /* istanbul ignore if */
4384 if (!self._iterator) {
4385 return this._waiting = true;
4386 }
4387
4388 self._iterator.next(function (err, key, value) {
4389 if (err || (key === undefined && value === undefined)) {
4390 if (!err && !self._destroyed) {
4391 self.push(null);
4392 }
4393 return self._cleanup(err);
4394 }
4395
4396
4397 value = self._makeData(key, value);
4398 if (!self._destroyed) {
4399 self.push(value);
4400 }
4401 });
4402 }
4403}
4404
4405const ReadStream = createClass$1(ReadStreamInternal, function (options, makeData) {
4406 ReadStreamInternal.prototype._setup.call(this, options, makeData);
4407});
4408
4409var precodec = {
4410 encode: function (decodedKey) {
4411 return '\xff' + decodedKey[0] + '\xff' + decodedKey[1];
4412 },
4413 decode: function (encodedKeyAsBuffer) {
4414 var str = encodedKeyAsBuffer.toString();
4415 var idx = str.indexOf('\xff', 1);
4416 return [str.substring(1, idx), str.substring(idx + 1)];
4417 },
4418 lowerBound: '\x00',
4419 upperBound: '\xff'
4420};
4421
4422var codec = new Codec();
4423
4424function sublevelPouch(db) {
4425 return sublevel(nut(db, precodec, codec), [], ReadStream, db.options);
4426}
4427
4428function allDocsKeysQuery(api, opts) {
4429 var keys = opts.keys;
4430 var finalResults = {
4431 offset: opts.skip
4432 };
4433 return Promise.all(keys.map(function (key) {
4434 var subOpts = Object.assign({key, deleted: 'ok'}, opts);
4435 ['limit', 'skip', 'keys'].forEach(function (optKey) {
4436 delete subOpts[optKey];
4437 });
4438 return new Promise(function (resolve, reject) {
4439 api._allDocs(subOpts, function (err, res) {
4440 /* istanbul ignore if */
4441 if (err) {
4442 return reject(err);
4443 }
4444 /* istanbul ignore if */
4445 if (opts.update_seq && res.update_seq !== undefined) {
4446 finalResults.update_seq = res.update_seq;
4447 }
4448 finalResults.total_rows = res.total_rows;
4449 resolve(res.rows[0] || {key, error: 'not_found'});
4450 });
4451 });
4452 })).then(function (results) {
4453 finalResults.rows = results;
4454 return finalResults;
4455 });
4456}
4457
4458function thisAtob(str) {
4459 var base64 = Buffer.from(str, 'base64');
4460 // Node.js will just skip the characters it can't decode instead of
4461 // throwing an exception
4462 if (base64.toString('base64') !== str) {
4463 throw new Error("attachment is not a valid base64 string");
4464 }
4465 return base64.toString('binary');
4466}
4467
4468function thisBtoa(str) {
4469 return Buffer.from(str, 'binary').toString('base64');
4470}
4471
4472function typedBuffer(binString, buffType, type) {
4473 // buffType is either 'binary' or 'base64'
4474 const buff = Buffer.from(binString, buffType);
4475 buff.type = type; // non-standard, but used for consistency with the browser
4476 return buff;
4477}
4478
4479function b64ToBluffer(b64, type) {
4480 return typedBuffer(b64, 'base64', type);
4481}
4482
4483// From http://stackoverflow.com/questions/14967647/ (continues on next line)
4484
4485function binStringToBluffer(binString, type) {
4486 return typedBuffer(binString, 'binary', type);
4487}
4488
4489// This function is unused in Node
4490
4491function blobToBase64(blobOrBuffer, callback) {
4492 callback(blobOrBuffer.toString('base64'));
4493}
4494
4495// not used in Node, but here for completeness
4496
4497// simplified API. universal browser support is assumed
4498
4499//Can't find original post, but this is close
4500
4501function toObject(array) {
4502 return array.reduce(function (obj, item) {
4503 obj[item] = true;
4504 return obj;
4505 }, {});
4506}
4507// List of top level reserved words for doc
4508var reservedWords = toObject([
4509 '_id',
4510 '_rev',
4511 '_access',
4512 '_attachments',
4513 '_deleted',
4514 '_revisions',
4515 '_revs_info',
4516 '_conflicts',
4517 '_deleted_conflicts',
4518 '_local_seq',
4519 '_rev_tree',
4520 // replication documents
4521 '_replication_id',
4522 '_replication_state',
4523 '_replication_state_time',
4524 '_replication_state_reason',
4525 '_replication_stats',
4526 // Specific to Couchbase Sync Gateway
4527 '_removed'
4528]);
4529
4530// List of reserved words that should end up in the document
4531var dataWords = toObject([
4532 '_access',
4533 '_attachments',
4534 // replication documents
4535 '_replication_id',
4536 '_replication_state',
4537 '_replication_state_time',
4538 '_replication_state_reason',
4539 '_replication_stats'
4540]);
4541
4542function parseRevisionInfo(rev$$1) {
4543 if (!/^\d+-/.test(rev$$1)) {
4544 return createError(INVALID_REV);
4545 }
4546 var idx = rev$$1.indexOf('-');
4547 var left = rev$$1.substring(0, idx);
4548 var right = rev$$1.substring(idx + 1);
4549 return {
4550 prefix: parseInt(left, 10),
4551 id: right
4552 };
4553}
4554
4555function makeRevTreeFromRevisions(revisions, opts) {
4556 var pos = revisions.start - revisions.ids.length + 1;
4557
4558 var revisionIds = revisions.ids;
4559 var ids = [revisionIds[0], opts, []];
4560
4561 for (var i = 1, len = revisionIds.length; i < len; i++) {
4562 ids = [revisionIds[i], {status: 'missing'}, [ids]];
4563 }
4564
4565 return [{
4566 pos,
4567 ids
4568 }];
4569}
4570
4571// Preprocess documents, parse their revisions, assign an id and a
4572// revision for new writes that are missing them, etc
4573function parseDoc(doc, newEdits, dbOpts) {
4574 if (!dbOpts) {
4575 dbOpts = {
4576 deterministic_revs: true
4577 };
4578 }
4579
4580 var nRevNum;
4581 var newRevId;
4582 var revInfo;
4583 var opts = {status: 'available'};
4584 if (doc._deleted) {
4585 opts.deleted = true;
4586 }
4587
4588 if (newEdits) {
4589 if (!doc._id) {
4590 doc._id = uuid$1();
4591 }
4592 newRevId = rev(doc, dbOpts.deterministic_revs);
4593 if (doc._rev) {
4594 revInfo = parseRevisionInfo(doc._rev);
4595 if (revInfo.error) {
4596 return revInfo;
4597 }
4598 doc._rev_tree = [{
4599 pos: revInfo.prefix,
4600 ids: [revInfo.id, {status: 'missing'}, [[newRevId, opts, []]]]
4601 }];
4602 nRevNum = revInfo.prefix + 1;
4603 } else {
4604 doc._rev_tree = [{
4605 pos: 1,
4606 ids : [newRevId, opts, []]
4607 }];
4608 nRevNum = 1;
4609 }
4610 } else {
4611 if (doc._revisions) {
4612 doc._rev_tree = makeRevTreeFromRevisions(doc._revisions, opts);
4613 nRevNum = doc._revisions.start;
4614 newRevId = doc._revisions.ids[0];
4615 }
4616 if (!doc._rev_tree) {
4617 revInfo = parseRevisionInfo(doc._rev);
4618 if (revInfo.error) {
4619 return revInfo;
4620 }
4621 nRevNum = revInfo.prefix;
4622 newRevId = revInfo.id;
4623 doc._rev_tree = [{
4624 pos: nRevNum,
4625 ids: [newRevId, opts, []]
4626 }];
4627 }
4628 }
4629
4630 invalidIdError(doc._id);
4631
4632 doc._rev = nRevNum + '-' + newRevId;
4633
4634 var result = {metadata : {}, data : {}};
4635 for (var key in doc) {
4636 /* istanbul ignore else */
4637 if (Object.prototype.hasOwnProperty.call(doc, key)) {
4638 var specialKey = key[0] === '_';
4639 if (specialKey && !reservedWords[key]) {
4640 var error = createError(DOC_VALIDATION, key);
4641 error.message = DOC_VALIDATION.message + ': ' + key;
4642 throw error;
4643 } else if (specialKey && !dataWords[key]) {
4644 result.metadata[key.slice(1)] = doc[key];
4645 } else {
4646 result.data[key] = doc[key];
4647 }
4648 }
4649 }
4650 return result;
4651}
4652
4653function updateDoc(revLimit, prev, docInfo, results,
4654 i, cb, writeDoc, newEdits) {
4655
4656 if (revExists(prev.rev_tree, docInfo.metadata.rev) && !newEdits) {
4657 results[i] = docInfo;
4658 return cb();
4659 }
4660
4661 // sometimes this is pre-calculated. historically not always
4662 var previousWinningRev = prev.winningRev || winningRev(prev);
4663 var previouslyDeleted = 'deleted' in prev ? prev.deleted :
4664 isDeleted(prev, previousWinningRev);
4665 var deleted = 'deleted' in docInfo.metadata ? docInfo.metadata.deleted :
4666 isDeleted(docInfo.metadata);
4667 var isRoot = /^1-/.test(docInfo.metadata.rev);
4668
4669 if (previouslyDeleted && !deleted && newEdits && isRoot) {
4670 var newDoc = docInfo.data;
4671 newDoc._rev = previousWinningRev;
4672 newDoc._id = docInfo.metadata.id;
4673 docInfo = parseDoc(newDoc, newEdits);
4674 }
4675
4676 var merged = merge(prev.rev_tree, docInfo.metadata.rev_tree[0], revLimit);
4677
4678 var inConflict = newEdits && ((
4679 (previouslyDeleted && deleted && merged.conflicts !== 'new_leaf') ||
4680 (!previouslyDeleted && merged.conflicts !== 'new_leaf') ||
4681 (previouslyDeleted && !deleted && merged.conflicts === 'new_branch')));
4682
4683 if (inConflict) {
4684 var err = createError(REV_CONFLICT);
4685 results[i] = err;
4686 return cb();
4687 }
4688
4689 var newRev = docInfo.metadata.rev;
4690 docInfo.metadata.rev_tree = merged.tree;
4691 docInfo.stemmedRevs = merged.stemmedRevs || [];
4692 /* istanbul ignore else */
4693 if (prev.rev_map) {
4694 docInfo.metadata.rev_map = prev.rev_map; // used only by leveldb
4695 }
4696
4697 // recalculate
4698 var winningRev$$1 = winningRev(docInfo.metadata);
4699 var winningRevIsDeleted = isDeleted(docInfo.metadata, winningRev$$1);
4700
4701 // calculate the total number of documents that were added/removed,
4702 // from the perspective of total_rows/doc_count
4703 var delta = (previouslyDeleted === winningRevIsDeleted) ? 0 :
4704 previouslyDeleted < winningRevIsDeleted ? -1 : 1;
4705
4706 var newRevIsDeleted;
4707 if (newRev === winningRev$$1) {
4708 // if the new rev is the same as the winning rev, we can reuse that value
4709 newRevIsDeleted = winningRevIsDeleted;
4710 } else {
4711 // if they're not the same, then we need to recalculate
4712 newRevIsDeleted = isDeleted(docInfo.metadata, newRev);
4713 }
4714
4715 writeDoc(docInfo, winningRev$$1, winningRevIsDeleted, newRevIsDeleted,
4716 true, delta, i, cb);
4717}
4718
4719function rootIsMissing(docInfo) {
4720 return docInfo.metadata.rev_tree[0].ids[1].status === 'missing';
4721}
4722
4723function processDocs(revLimit, docInfos, api, fetchedDocs, tx, results,
4724 writeDoc, opts, overallCallback) {
4725
4726 // Default to 1000 locally
4727 revLimit = revLimit || 1000;
4728
4729 function insertDoc(docInfo, resultsIdx, callback) {
4730 // Cant insert new deleted documents
4731 var winningRev$$1 = winningRev(docInfo.metadata);
4732 var deleted = isDeleted(docInfo.metadata, winningRev$$1);
4733 if ('was_delete' in opts && deleted) {
4734 results[resultsIdx] = createError(MISSING_DOC, 'deleted');
4735 return callback();
4736 }
4737
4738 // 4712 - detect whether a new document was inserted with a _rev
4739 var inConflict = newEdits && rootIsMissing(docInfo);
4740
4741 if (inConflict) {
4742 var err = createError(REV_CONFLICT);
4743 results[resultsIdx] = err;
4744 return callback();
4745 }
4746
4747 var delta = deleted ? 0 : 1;
4748
4749 writeDoc(docInfo, winningRev$$1, deleted, deleted, false,
4750 delta, resultsIdx, callback);
4751 }
4752
4753 var newEdits = opts.new_edits;
4754 var idsToDocs = new Map();
4755
4756 var docsDone = 0;
4757 var docsToDo = docInfos.length;
4758
4759 function checkAllDocsDone() {
4760 if (++docsDone === docsToDo && overallCallback) {
4761 overallCallback();
4762 }
4763 }
4764
4765 docInfos.forEach(function (currentDoc, resultsIdx) {
4766
4767 if (currentDoc._id && isLocalId(currentDoc._id)) {
4768 var fun = currentDoc._deleted ? '_removeLocal' : '_putLocal';
4769 api[fun](currentDoc, {ctx: tx}, function (err, res) {
4770 results[resultsIdx] = err || res;
4771 checkAllDocsDone();
4772 });
4773 return;
4774 }
4775
4776 var id = currentDoc.metadata.id;
4777 if (idsToDocs.has(id)) {
4778 docsToDo--; // duplicate
4779 idsToDocs.get(id).push([currentDoc, resultsIdx]);
4780 } else {
4781 idsToDocs.set(id, [[currentDoc, resultsIdx]]);
4782 }
4783 });
4784
4785 // in the case of new_edits, the user can provide multiple docs
4786 // with the same id. these need to be processed sequentially
4787 idsToDocs.forEach(function (docs, id) {
4788 var numDone = 0;
4789
4790 function docWritten() {
4791 if (++numDone < docs.length) {
4792 nextDoc();
4793 } else {
4794 checkAllDocsDone();
4795 }
4796 }
4797 function nextDoc() {
4798 var value = docs[numDone];
4799 var currentDoc = value[0];
4800 var resultsIdx = value[1];
4801
4802 if (fetchedDocs.has(id)) {
4803 updateDoc(revLimit, fetchedDocs.get(id), currentDoc, results,
4804 resultsIdx, docWritten, writeDoc, newEdits);
4805 } else {
4806 // Ensure stemming applies to new writes as well
4807 var merged = merge([], currentDoc.metadata.rev_tree[0], revLimit);
4808 currentDoc.metadata.rev_tree = merged.tree;
4809 currentDoc.stemmedRevs = merged.stemmedRevs || [];
4810 insertDoc(currentDoc, resultsIdx, docWritten);
4811 }
4812 }
4813 nextDoc();
4814 });
4815}
4816
4817function safeJsonParse(str) {
4818 // This try/catch guards against stack overflow errors.
4819 // JSON.parse() is faster than vuvuzela.parse() but vuvuzela
4820 // cannot overflow.
4821 try {
4822 return JSON.parse(str);
4823 } catch (e) {
4824 /* istanbul ignore next */
4825 return vuvuzela.parse(str);
4826 }
4827}
4828
4829function safeJsonStringify(json) {
4830 try {
4831 return JSON.stringify(json);
4832 } catch (e) {
4833 /* istanbul ignore next */
4834 return vuvuzela.stringify(json);
4835 }
4836}
4837
4838function readAsBlobOrBuffer(storedObject, type) {
4839 // In Node, we've stored a buffer
4840 storedObject.type = type; // non-standard, but used for consistency
4841 return storedObject;
4842}
4843
4844// in Node, we store the buffer directly
4845function prepareAttachmentForStorage(attData, cb) {
4846 cb(attData);
4847}
4848
4849function createEmptyBlobOrBuffer(type) {
4850 return typedBuffer('', 'binary', type);
4851}
4852
4853// similar to an idb or websql transaction object
4854
4855function getCacheFor(transaction, store) {
4856 var prefix = store.prefix()[0];
4857 var cache = transaction._cache;
4858 var subCache = cache.get(prefix);
4859 if (!subCache) {
4860 subCache = new Map();
4861 cache.set(prefix, subCache);
4862 }
4863 return subCache;
4864}
4865
4866class LevelTransaction {
4867 constructor() {
4868 this._batch = [];
4869 this._cache = new Map();
4870 }
4871
4872 get(store, key, callback) {
4873 var cache = getCacheFor(this, store);
4874 var exists = cache.get(key);
4875 if (exists) {
4876 return nextTick(function () {
4877 callback(null, exists);
4878 });
4879 } else if (exists === null) { // deleted marker
4880 /* istanbul ignore next */
4881 return nextTick(function () {
4882 callback({name: 'NotFoundError'});
4883 });
4884 }
4885 store.get(key, function (err, res$$1) {
4886 if (err) {
4887 /* istanbul ignore else */
4888 if (err.name === 'NotFoundError') {
4889 cache.set(key, null);
4890 }
4891 return callback(err);
4892 }
4893 cache.set(key, res$$1);
4894 callback(null, res$$1);
4895 });
4896 }
4897
4898 batch(batch) {
4899 for (var i = 0, len = batch.length; i < len; i++) {
4900 var operation = batch[i];
4901
4902 var cache = getCacheFor(this, operation.prefix);
4903
4904 if (operation.type === 'put') {
4905 cache.set(operation.key, operation.value);
4906 } else {
4907 cache.set(operation.key, null);
4908 }
4909 }
4910 this._batch = this._batch.concat(batch);
4911 }
4912
4913 execute(db, callback) {
4914 var keys = new Set();
4915 var uniqBatches = [];
4916
4917 // remove duplicates; last one wins
4918 for (var i = this._batch.length - 1; i >= 0; i--) {
4919 var operation = this._batch[i];
4920 var lookupKey = operation.prefix.prefix()[0] + '\xff' + operation.key;
4921 if (keys.has(lookupKey)) {
4922 continue;
4923 }
4924 keys.add(lookupKey);
4925 uniqBatches.push(operation);
4926 }
4927
4928 db.batch(uniqBatches, callback);
4929 }
4930}
4931
4932var DOC_STORE = 'document-store';
4933var BY_SEQ_STORE = 'by-sequence';
4934var ATTACHMENT_STORE = 'attach-store';
4935var BINARY_STORE = 'attach-binary-store';
4936var LOCAL_STORE = 'local-store';
4937var META_STORE = 'meta-store';
4938
4939// leveldb barks if we try to open a db multiple times
4940// so we cache opened connections here for initstore()
4941var dbStores = new Map();
4942
4943// store the value of update_seq in the by-sequence store the key name will
4944// never conflict, since the keys in the by-sequence store are integers
4945var UPDATE_SEQ_KEY = '_local_last_update_seq';
4946var DOC_COUNT_KEY = '_local_doc_count';
4947var UUID_KEY = '_local_uuid';
4948
4949var MD5_PREFIX = 'md5-';
4950
4951var safeJsonEncoding = {
4952 encode: safeJsonStringify,
4953 decode: safeJsonParse,
4954 buffer: false,
4955 type: 'cheap-json'
4956};
4957
4958var levelChanges = new Changes();
4959
4960// winningRev and deleted are performance-killers, but
4961// in newer versions of PouchDB, they are cached on the metadata
4962function getWinningRev(metadata) {
4963 return 'winningRev' in metadata ?
4964 metadata.winningRev : winningRev(metadata);
4965}
4966
4967function getIsDeleted(metadata, winningRev$$1) {
4968 return 'deleted' in metadata ?
4969 metadata.deleted : isDeleted(metadata, winningRev$$1);
4970}
4971
4972function fetchAttachment(att, stores, opts) {
4973 var type = att.content_type;
4974 return new Promise(function (resolve, reject) {
4975 stores.binaryStore.get(att.digest, function (err, buffer) {
4976 var data;
4977 if (err) {
4978 /* istanbul ignore if */
4979 if (err.name !== 'NotFoundError') {
4980 return reject(err);
4981 } else {
4982 // empty
4983 if (!opts.binary) {
4984 data = '';
4985 } else {
4986 data = binStringToBluffer('', type);
4987 }
4988 }
4989 } else { // non-empty
4990 if (opts.binary) {
4991 data = readAsBlobOrBuffer(buffer, type);
4992 } else {
4993 data = buffer.toString('base64');
4994 }
4995 }
4996 delete att.stub;
4997 delete att.length;
4998 att.data = data;
4999 resolve();
5000 });
5001 });
5002}
5003
5004function fetchAttachments(results, stores, opts) {
5005 var atts = [];
5006 results.forEach(function (row) {
5007 if (!(row.doc && row.doc._attachments)) {
5008 return;
5009 }
5010 var attNames = Object.keys(row.doc._attachments);
5011 attNames.forEach(function (attName) {
5012 var att = row.doc._attachments[attName];
5013 if (!('data' in att)) {
5014 atts.push(att);
5015 }
5016 });
5017 });
5018
5019 return Promise.all(atts.map(function (att) {
5020 return fetchAttachment(att, stores, opts);
5021 }));
5022}
5023
5024function LevelPouch(opts, callback) {
5025 opts = clone(opts);
5026 var api = this;
5027 var instanceId;
5028 var stores = {};
5029 var revLimit = opts.revs_limit;
5030 var db;
5031 var name = opts.name;
5032 // TODO: this is undocumented and unused probably
5033 /* istanbul ignore else */
5034 if (typeof opts.createIfMissing === 'undefined') {
5035 opts.createIfMissing = true;
5036 }
5037
5038 var leveldown = opts.db;
5039
5040 var dbStore;
5041 var leveldownName = functionName(leveldown);
5042 if (dbStores.has(leveldownName)) {
5043 dbStore = dbStores.get(leveldownName);
5044 } else {
5045 dbStore = new Map();
5046 dbStores.set(leveldownName, dbStore);
5047 }
5048 if (dbStore.has(name)) {
5049 db = dbStore.get(name);
5050 afterDBCreated();
5051 } else {
5052 dbStore.set(name, sublevelPouch(levelup(leveldown(name), opts, function (err) {
5053 /* istanbul ignore if */
5054 if (err) {
5055 dbStore.delete(name);
5056 return callback(err);
5057 }
5058 db = dbStore.get(name);
5059 db._docCount = -1;
5060 db._queue = new Deque();
5061 /* istanbul ignore else */
5062 if (typeof opts.migrate === 'object') { // migration for leveldown
5063 opts.migrate.doMigrationOne(name, db, afterDBCreated);
5064 } else {
5065 afterDBCreated();
5066 }
5067 })));
5068 }
5069
5070 function afterDBCreated() {
5071 stores.docStore = db.sublevel(DOC_STORE, {valueEncoding: safeJsonEncoding});
5072 stores.bySeqStore = db.sublevel(BY_SEQ_STORE, {valueEncoding: 'json'});
5073 stores.attachmentStore =
5074 db.sublevel(ATTACHMENT_STORE, {valueEncoding: 'json'});
5075 stores.binaryStore = db.sublevel(BINARY_STORE, {valueEncoding: 'binary'});
5076 stores.localStore = db.sublevel(LOCAL_STORE, {valueEncoding: 'json'});
5077 stores.metaStore = db.sublevel(META_STORE, {valueEncoding: 'json'});
5078 /* istanbul ignore else */
5079 if (typeof opts.migrate === 'object') { // migration for leveldown
5080 opts.migrate.doMigrationTwo(db, stores, afterLastMigration);
5081 } else {
5082 afterLastMigration();
5083 }
5084 }
5085
5086 function afterLastMigration() {
5087 stores.metaStore.get(UPDATE_SEQ_KEY, function (err, value) {
5088 if (typeof db._updateSeq === 'undefined') {
5089 db._updateSeq = value || 0;
5090 }
5091 stores.metaStore.get(DOC_COUNT_KEY, function (err, value) {
5092 db._docCount = !err ? value : 0;
5093 stores.metaStore.get(UUID_KEY, function (err, value) {
5094 instanceId = !err ? value : uuid$1();
5095 stores.metaStore.put(UUID_KEY, instanceId, function () {
5096 nextTick(function () {
5097 callback(null, api);
5098 });
5099 });
5100 });
5101 });
5102 });
5103 }
5104
5105 function countDocs(callback) {
5106 /* istanbul ignore if */
5107 if (db.isClosed()) {
5108 return callback(new Error('database is closed'));
5109 }
5110 return callback(null, db._docCount); // use cached value
5111 }
5112
5113 api._remote = false;
5114 /* istanbul ignore next */
5115 api.type = function () {
5116 return 'leveldb';
5117 };
5118
5119 api._id = function (callback) {
5120 callback(null, instanceId);
5121 };
5122
5123 api._info = function (callback) {
5124 var res$$1 = {
5125 doc_count: db._docCount,
5126 update_seq: db._updateSeq,
5127 backend_adapter: functionName(leveldown)
5128 };
5129 return nextTick(function () {
5130 callback(null, res$$1);
5131 });
5132 };
5133
5134 function tryCode(fun, args) {
5135 try {
5136 fun.apply(null, args);
5137 } catch (err) {
5138 args[args.length - 1](err);
5139 }
5140 }
5141
5142 function executeNext() {
5143 var firstTask = db._queue.peekFront();
5144
5145 if (firstTask.type === 'read') {
5146 runReadOperation(firstTask);
5147 } else { // write, only do one at a time
5148 runWriteOperation(firstTask);
5149 }
5150 }
5151
5152 function runReadOperation(firstTask) {
5153 // do multiple reads at once simultaneously, because it's safe
5154
5155 var readTasks = [firstTask];
5156 var i = 1;
5157 var nextTask = db._queue.get(i);
5158 while (typeof nextTask !== 'undefined' && nextTask.type === 'read') {
5159 readTasks.push(nextTask);
5160 i++;
5161 nextTask = db._queue.get(i);
5162 }
5163
5164 var numDone = 0;
5165
5166 readTasks.forEach(function (readTask) {
5167 var args = readTask.args;
5168 var callback = args[args.length - 1];
5169 args[args.length - 1] = function (...cbArgs) {
5170 callback.apply(null, cbArgs);
5171 if (++numDone === readTasks.length) {
5172 nextTick(function () {
5173 // all read tasks have finished
5174 readTasks.forEach(function () {
5175 db._queue.shift();
5176 });
5177 if (db._queue.length) {
5178 executeNext();
5179 }
5180 });
5181 }
5182 };
5183 tryCode(readTask.fun, args);
5184 });
5185 }
5186
5187 function runWriteOperation(firstTask) {
5188 var args = firstTask.args;
5189 var callback = args[args.length - 1];
5190 args[args.length - 1] = function (...cbArgs) {
5191 callback.apply(null, cbArgs);
5192 nextTick(function () {
5193 db._queue.shift();
5194 if (db._queue.length) {
5195 executeNext();
5196 }
5197 });
5198 };
5199 tryCode(firstTask.fun, args);
5200 }
5201
5202 // all read/write operations to the database are done in a queue,
5203 // similar to how websql/idb works. this avoids problems such
5204 // as e.g. compaction needing to have a lock on the database while
5205 // it updates stuff. in the future we can revisit this.
5206 function writeLock(fun) {
5207 return function (...args) {
5208 db._queue.push({
5209 fun,
5210 args,
5211 type: 'write'
5212 });
5213
5214 if (db._queue.length === 1) {
5215 nextTick(executeNext);
5216 }
5217 };
5218 }
5219
5220 // same as the writelock, but multiple can run at once
5221 function readLock(fun) {
5222 return function (...args) {
5223 db._queue.push({
5224 fun,
5225 args,
5226 type: 'read'
5227 });
5228
5229 if (db._queue.length === 1) {
5230 nextTick(executeNext);
5231 }
5232 };
5233 }
5234
5235 function formatSeq(n) {
5236 return ('0000000000000000' + n).slice(-16);
5237 }
5238
5239 function parseSeq(s) {
5240 return parseInt(s, 10);
5241 }
5242
5243 api._get = readLock(function (id, opts, callback) {
5244 opts = clone(opts);
5245
5246 stores.docStore.get(id, function (err, metadata) {
5247
5248 if (err || !metadata) {
5249 return callback(createError(MISSING_DOC, 'missing'));
5250 }
5251
5252 var rev$$1;
5253 if (!opts.rev) {
5254 rev$$1 = getWinningRev(metadata);
5255 var deleted = getIsDeleted(metadata, rev$$1);
5256 if (deleted) {
5257 return callback(createError(MISSING_DOC, "deleted"));
5258 }
5259 } else {
5260 rev$$1 = opts.latest ? latest(opts.rev, metadata) : opts.rev;
5261 }
5262
5263 var seq = metadata.rev_map[rev$$1];
5264
5265 stores.bySeqStore.get(formatSeq(seq), function (err, doc) {
5266 if (!doc) {
5267 return callback(createError(MISSING_DOC));
5268 }
5269 /* istanbul ignore if */
5270 if ('_id' in doc && doc._id !== metadata.id) {
5271 // this failing implies something very wrong
5272 return callback(new Error('wrong doc returned'));
5273 }
5274 doc._id = metadata.id;
5275 if ('_rev' in doc) {
5276 /* istanbul ignore if */
5277 if (doc._rev !== rev$$1) {
5278 // this failing implies something very wrong
5279 return callback(new Error('wrong doc returned'));
5280 }
5281 } else {
5282 // we didn't always store this
5283 doc._rev = rev$$1;
5284 }
5285 return callback(null, {doc, metadata});
5286 });
5287 });
5288 });
5289
5290 // not technically part of the spec, but if putAttachment has its own
5291 // method...
5292 api._getAttachment = function (docId, attachId, attachment, opts, callback) {
5293 var digest = attachment.digest;
5294 var type = attachment.content_type;
5295
5296 stores.binaryStore.get(digest, function (err, attach) {
5297 if (err) {
5298 /* istanbul ignore if */
5299 if (err.name !== 'NotFoundError') {
5300 return callback(err);
5301 }
5302 // Empty attachment
5303 return callback(null, opts.binary ? createEmptyBlobOrBuffer(type) : '');
5304 }
5305
5306 if (opts.binary) {
5307 callback(null, readAsBlobOrBuffer(attach, type));
5308 } else {
5309 callback(null, attach.toString('base64'));
5310 }
5311 });
5312 };
5313
5314 api._bulkDocs = writeLock(function (req, opts, callback) {
5315 var newEdits = opts.new_edits;
5316 var results = new Array(req.docs.length);
5317 var fetchedDocs = new Map();
5318 var stemmedRevs = new Map();
5319
5320 var txn = new LevelTransaction();
5321 var docCountDelta = 0;
5322 var newUpdateSeq = db._updateSeq;
5323
5324 // parse the docs and give each a sequence number
5325 var userDocs = req.docs;
5326 var docInfos = userDocs.map(function (doc) {
5327 if (doc._id && isLocalId(doc._id)) {
5328 return doc;
5329 }
5330 var newDoc = parseDoc(doc, newEdits, api.__opts);
5331
5332 if (newDoc.metadata && !newDoc.metadata.rev_map) {
5333 newDoc.metadata.rev_map = {};
5334 }
5335
5336 return newDoc;
5337 });
5338 var infoErrors = docInfos.filter(function (doc) {
5339 return doc.error;
5340 });
5341
5342 if (infoErrors.length) {
5343 return callback(infoErrors[0]);
5344 }
5345
5346 // verify any stub attachments as a precondition test
5347
5348 function verifyAttachment(digest, callback) {
5349 txn.get(stores.attachmentStore, digest, function (levelErr) {
5350 if (levelErr) {
5351 var err = createError(MISSING_STUB,
5352 'unknown stub attachment with digest ' +
5353 digest);
5354 callback(err);
5355 } else {
5356 callback();
5357 }
5358 });
5359 }
5360
5361 function verifyAttachments(finish) {
5362 var digests = [];
5363 userDocs.forEach(function (doc) {
5364 if (doc && doc._attachments) {
5365 Object.keys(doc._attachments).forEach(function (filename) {
5366 var att = doc._attachments[filename];
5367 if (att.stub) {
5368 digests.push(att.digest);
5369 }
5370 });
5371 }
5372 });
5373 if (!digests.length) {
5374 return finish();
5375 }
5376 var numDone = 0;
5377 var err;
5378
5379 digests.forEach(function (digest) {
5380 verifyAttachment(digest, function (attErr) {
5381 if (attErr && !err) {
5382 err = attErr;
5383 }
5384
5385 if (++numDone === digests.length) {
5386 finish(err);
5387 }
5388 });
5389 });
5390 }
5391
5392 function fetchExistingDocs(finish) {
5393 var numDone = 0;
5394 var overallErr;
5395 function checkDone() {
5396 if (++numDone === userDocs.length) {
5397 return finish(overallErr);
5398 }
5399 }
5400
5401 userDocs.forEach(function (doc) {
5402 if (doc._id && isLocalId(doc._id)) {
5403 // skip local docs
5404 return checkDone();
5405 }
5406 txn.get(stores.docStore, doc._id, function (err, info) {
5407 if (err) {
5408 /* istanbul ignore if */
5409 if (err.name !== 'NotFoundError') {
5410 overallErr = err;
5411 }
5412 } else {
5413 fetchedDocs.set(doc._id, info);
5414 }
5415 checkDone();
5416 });
5417 });
5418 }
5419
5420 function compact(revsMap, callback) {
5421 var promise = Promise.resolve();
5422 revsMap.forEach(function (revs, docId) {
5423 // TODO: parallelize, for now need to be sequential to
5424 // pass orphaned attachment tests
5425 promise = promise.then(function () {
5426 return new Promise(function (resolve, reject) {
5427 api._doCompactionNoLock(docId, revs, {ctx: txn}, function (err) {
5428 /* istanbul ignore if */
5429 if (err) {
5430 return reject(err);
5431 }
5432 resolve();
5433 });
5434 });
5435 });
5436 });
5437
5438 promise.then(function () {
5439 callback();
5440 }, callback);
5441 }
5442
5443 function autoCompact(callback) {
5444 var revsMap = new Map();
5445 fetchedDocs.forEach(function (metadata, docId) {
5446 revsMap.set(docId, compactTree(metadata));
5447 });
5448 compact(revsMap, callback);
5449 }
5450
5451 function finish() {
5452 compact(stemmedRevs, function (error) {
5453 /* istanbul ignore if */
5454 if (error) {
5455 complete(error);
5456 }
5457 if (api.auto_compaction) {
5458 return autoCompact(complete);
5459 }
5460 complete();
5461 });
5462 }
5463
5464 function writeDoc(docInfo, winningRev$$1, winningRevIsDeleted, newRevIsDeleted,
5465 isUpdate, delta, resultsIdx, callback2) {
5466 docCountDelta += delta;
5467
5468 var err = null;
5469 var recv = 0;
5470
5471 docInfo.metadata.winningRev = winningRev$$1;
5472 docInfo.metadata.deleted = winningRevIsDeleted;
5473
5474 docInfo.data._id = docInfo.metadata.id;
5475 docInfo.data._rev = docInfo.metadata.rev;
5476
5477 if (newRevIsDeleted) {
5478 docInfo.data._deleted = true;
5479 }
5480
5481 if (docInfo.stemmedRevs.length) {
5482 stemmedRevs.set(docInfo.metadata.id, docInfo.stemmedRevs);
5483 }
5484
5485 var attachments = docInfo.data._attachments ?
5486 Object.keys(docInfo.data._attachments) :
5487 [];
5488
5489 function attachmentSaved(attachmentErr) {
5490 recv++;
5491 if (!err) {
5492 /* istanbul ignore if */
5493 if (attachmentErr) {
5494 err = attachmentErr;
5495 callback2(err);
5496 } else if (recv === attachments.length) {
5497 finish();
5498 }
5499 }
5500 }
5501
5502 function onMD5Load(doc, key, data, attachmentSaved) {
5503 return function (result) {
5504 saveAttachment(doc, MD5_PREFIX + result, key, data, attachmentSaved);
5505 };
5506 }
5507
5508 function doMD5(doc, key, attachmentSaved) {
5509 return function (data) {
5510 binaryMd5(data, onMD5Load(doc, key, data, attachmentSaved));
5511 };
5512 }
5513
5514 for (var i = 0; i < attachments.length; i++) {
5515 var key = attachments[i];
5516 var att = docInfo.data._attachments[key];
5517
5518 if (att.stub) {
5519 // still need to update the refs mapping
5520 var id = docInfo.data._id;
5521 var rev$$1 = docInfo.data._rev;
5522 saveAttachmentRefs(id, rev$$1, att.digest, attachmentSaved);
5523 continue;
5524 }
5525 var data;
5526 if (typeof att.data === 'string') {
5527 // input is assumed to be a base64 string
5528 try {
5529 data = thisAtob(att.data);
5530 } catch (e) {
5531 callback(createError(BAD_ARG,
5532 'Attachment is not a valid base64 string'));
5533 return;
5534 }
5535 doMD5(docInfo, key, attachmentSaved)(data);
5536 } else {
5537 prepareAttachmentForStorage(att.data,
5538 doMD5(docInfo, key, attachmentSaved));
5539 }
5540 }
5541
5542 function finish() {
5543 var seq = docInfo.metadata.rev_map[docInfo.metadata.rev];
5544 /* istanbul ignore if */
5545 if (seq) {
5546 // check that there aren't any existing revisions with the same
5547 // revision id, else we shouldn't do anything
5548 return callback2();
5549 }
5550 seq = ++newUpdateSeq;
5551 docInfo.metadata.rev_map[docInfo.metadata.rev] =
5552 docInfo.metadata.seq = seq;
5553 var seqKey = formatSeq(seq);
5554 var batch = [{
5555 key: seqKey,
5556 value: docInfo.data,
5557 prefix: stores.bySeqStore,
5558 type: 'put'
5559 }, {
5560 key: docInfo.metadata.id,
5561 value: docInfo.metadata,
5562 prefix: stores.docStore,
5563 type: 'put'
5564 }];
5565 txn.batch(batch);
5566 results[resultsIdx] = {
5567 ok: true,
5568 id: docInfo.metadata.id,
5569 rev: docInfo.metadata.rev
5570 };
5571 fetchedDocs.set(docInfo.metadata.id, docInfo.metadata);
5572 callback2();
5573 }
5574
5575 if (!attachments.length) {
5576 finish();
5577 }
5578 }
5579
5580 // attachments are queued per-digest, otherwise the refs could be
5581 // overwritten by concurrent writes in the same bulkDocs session
5582 var attachmentQueues = {};
5583
5584 function saveAttachmentRefs(id, rev$$1, digest, callback) {
5585
5586 function fetchAtt() {
5587 return new Promise(function (resolve, reject) {
5588 txn.get(stores.attachmentStore, digest, function (err, oldAtt) {
5589 /* istanbul ignore if */
5590 if (err && err.name !== 'NotFoundError') {
5591 return reject(err);
5592 }
5593 resolve(oldAtt);
5594 });
5595 });
5596 }
5597
5598 function saveAtt(oldAtt) {
5599 var ref = [id, rev$$1].join('@');
5600 var newAtt = {};
5601
5602 if (oldAtt) {
5603 if (oldAtt.refs) {
5604 // only update references if this attachment already has them
5605 // since we cannot migrate old style attachments here without
5606 // doing a full db scan for references
5607 newAtt.refs = oldAtt.refs;
5608 newAtt.refs[ref] = true;
5609 }
5610 } else {
5611 newAtt.refs = {};
5612 newAtt.refs[ref] = true;
5613 }
5614
5615 return new Promise(function (resolve) {
5616 txn.batch([{
5617 type: 'put',
5618 prefix: stores.attachmentStore,
5619 key: digest,
5620 value: newAtt
5621 }]);
5622 resolve(!oldAtt);
5623 });
5624 }
5625
5626 // put attachments in a per-digest queue, to avoid two docs with the same
5627 // attachment overwriting each other
5628 var queue = attachmentQueues[digest] || Promise.resolve();
5629 attachmentQueues[digest] = queue.then(function () {
5630 return fetchAtt().then(saveAtt).then(function (isNewAttachment) {
5631 callback(null, isNewAttachment);
5632 }, callback);
5633 });
5634 }
5635
5636 function saveAttachment(docInfo, digest, key, data, callback) {
5637 var att = docInfo.data._attachments[key];
5638 delete att.data;
5639 att.digest = digest;
5640 att.length = data.length;
5641 var id = docInfo.metadata.id;
5642 var rev$$1 = docInfo.metadata.rev;
5643 att.revpos = parseInt(rev$$1, 10);
5644
5645 saveAttachmentRefs(id, rev$$1, digest, function (err, isNewAttachment) {
5646 /* istanbul ignore if */
5647 if (err) {
5648 return callback(err);
5649 }
5650 // do not try to store empty attachments
5651 if (data.length === 0) {
5652 return callback(err);
5653 }
5654 if (!isNewAttachment) {
5655 // small optimization - don't bother writing it again
5656 return callback(err);
5657 }
5658 txn.batch([{
5659 type: 'put',
5660 prefix: stores.binaryStore,
5661 key: digest,
5662 value: Buffer.from(data, 'binary')
5663 }]);
5664 callback();
5665 });
5666 }
5667
5668 function complete(err) {
5669 /* istanbul ignore if */
5670 if (err) {
5671 return nextTick(function () {
5672 callback(err);
5673 });
5674 }
5675 txn.batch([
5676 {
5677 prefix: stores.metaStore,
5678 type: 'put',
5679 key: UPDATE_SEQ_KEY,
5680 value: newUpdateSeq
5681 },
5682 {
5683 prefix: stores.metaStore,
5684 type: 'put',
5685 key: DOC_COUNT_KEY,
5686 value: db._docCount + docCountDelta
5687 }
5688 ]);
5689 txn.execute(db, function (err) {
5690 /* istanbul ignore if */
5691 if (err) {
5692 return callback(err);
5693 }
5694 db._docCount += docCountDelta;
5695 db._updateSeq = newUpdateSeq;
5696 levelChanges.notify(name);
5697 nextTick(function () {
5698 callback(null, results);
5699 });
5700 });
5701 }
5702
5703 if (!docInfos.length) {
5704 return callback(null, []);
5705 }
5706
5707 verifyAttachments(function (err) {
5708 if (err) {
5709 return callback(err);
5710 }
5711 fetchExistingDocs(function (err) {
5712 /* istanbul ignore if */
5713 if (err) {
5714 return callback(err);
5715 }
5716 processDocs(revLimit, docInfos, api, fetchedDocs, txn, results,
5717 writeDoc, opts, finish);
5718 });
5719 });
5720 });
5721 api._allDocs = function (opts, callback) {
5722 if ('keys' in opts) {
5723 return allDocsKeysQuery(this, opts);
5724 }
5725 return readLock(function (opts, callback) {
5726 opts = clone(opts);
5727 countDocs(function (err, docCount) {
5728 /* istanbul ignore if */
5729 if (err) {
5730 return callback(err);
5731 }
5732 var readstreamOpts = {};
5733 var skip = opts.skip || 0;
5734 if (opts.startkey) {
5735 readstreamOpts.gte = opts.startkey;
5736 }
5737 if (opts.endkey) {
5738 readstreamOpts.lte = opts.endkey;
5739 }
5740 if (opts.key) {
5741 readstreamOpts.gte = readstreamOpts.lte = opts.key;
5742 }
5743 if (opts.descending) {
5744 readstreamOpts.reverse = true;
5745 // switch start and ends
5746 var tmp = readstreamOpts.lte;
5747 readstreamOpts.lte = readstreamOpts.gte;
5748 readstreamOpts.gte = tmp;
5749 }
5750 var limit;
5751 if (typeof opts.limit === 'number') {
5752 limit = opts.limit;
5753 }
5754 if (limit === 0 ||
5755 ('gte' in readstreamOpts && 'lte' in readstreamOpts &&
5756 readstreamOpts.gte > readstreamOpts.lte)) {
5757 // should return 0 results when start is greater than end.
5758 // normally level would "fix" this for us by reversing the order,
5759 // so short-circuit instead
5760 var returnVal = {
5761 total_rows: docCount,
5762 offset: opts.skip,
5763 rows: []
5764 };
5765 /* istanbul ignore if */
5766 if (opts.update_seq) {
5767 returnVal.update_seq = db._updateSeq;
5768 }
5769 return callback(null, returnVal);
5770 }
5771 var results = [];
5772 var docstream = stores.docStore.readStream(readstreamOpts);
5773
5774 var throughStream = through2.obj(function (entry, _, next) {
5775 var metadata = entry.value;
5776 // winningRev and deleted are performance-killers, but
5777 // in newer versions of PouchDB, they are cached on the metadata
5778 var winningRev$$1 = getWinningRev(metadata);
5779 var deleted = getIsDeleted(metadata, winningRev$$1);
5780 if (!deleted) {
5781 if (skip-- > 0) {
5782 next();
5783 return;
5784 } else if (typeof limit === 'number' && limit-- <= 0) {
5785 docstream.unpipe();
5786 docstream.destroy();
5787 next();
5788 return;
5789 }
5790 } else if (opts.deleted !== 'ok') {
5791 next();
5792 return;
5793 }
5794 function allDocsInner(data) {
5795 var doc = {
5796 id: metadata.id,
5797 key: metadata.id,
5798 value: {
5799 rev: winningRev$$1
5800 }
5801 };
5802 if (opts.include_docs) {
5803 doc.doc = data;
5804 doc.doc._rev = doc.value.rev;
5805 if (opts.conflicts) {
5806 var conflicts = collectConflicts(metadata);
5807 if (conflicts.length) {
5808 doc.doc._conflicts = conflicts;
5809 }
5810 }
5811 for (var att in doc.doc._attachments) {
5812 if (Object.prototype.hasOwnProperty.call(doc.doc._attachments, att)) {
5813 doc.doc._attachments[att].stub = true;
5814 }
5815 }
5816 }
5817 if (opts.inclusive_end === false && metadata.id === opts.endkey) {
5818 return next();
5819 } else if (deleted) {
5820 if (opts.deleted === 'ok') {
5821 doc.value.deleted = true;
5822 doc.doc = null;
5823 } else {
5824 /* istanbul ignore next */
5825 return next();
5826 }
5827 }
5828 results.push(doc);
5829 next();
5830 }
5831 if (opts.include_docs) {
5832 var seq = metadata.rev_map[winningRev$$1];
5833 stores.bySeqStore.get(formatSeq(seq), function (err, data) {
5834 allDocsInner(data);
5835 });
5836 }
5837 else {
5838 allDocsInner();
5839 }
5840 }, function (next) {
5841 Promise.resolve().then(function () {
5842 if (opts.include_docs && opts.attachments) {
5843 return fetchAttachments(results, stores, opts);
5844 }
5845 }).then(function () {
5846 var returnVal = {
5847 total_rows: docCount,
5848 offset: opts.skip,
5849 rows: results
5850 };
5851
5852 /* istanbul ignore if */
5853 if (opts.update_seq) {
5854 returnVal.update_seq = db._updateSeq;
5855 }
5856 callback(null, returnVal);
5857 }, callback);
5858 next();
5859 }).on('unpipe', function () {
5860 throughStream.end();
5861 });
5862
5863 docstream.on('error', callback);
5864
5865 docstream.pipe(throughStream);
5866 });
5867 })(opts, callback);
5868 };
5869
5870 api._changes = function (opts) {
5871 opts = clone(opts);
5872
5873 if (opts.continuous) {
5874 var id = name + ':' + uuid$1();
5875 levelChanges.addListener(name, id, api, opts);
5876 levelChanges.notify(name);
5877 return {
5878 cancel: function () {
5879 levelChanges.removeListener(name, id);
5880 }
5881 };
5882 }
5883
5884 var descending = opts.descending;
5885 var results = [];
5886 var lastSeq = opts.since || 0;
5887 var called = 0;
5888 var streamOpts = {
5889 reverse: descending
5890 };
5891 var limit;
5892 if ('limit' in opts && opts.limit > 0) {
5893 limit = opts.limit;
5894 }
5895 if (!streamOpts.reverse) {
5896 streamOpts.start = formatSeq(opts.since || 0);
5897 }
5898
5899 var docIds = opts.doc_ids && new Set(opts.doc_ids);
5900 var filter = filterChange(opts);
5901 var docIdsToMetadata = new Map();
5902
5903 function complete() {
5904 opts.done = true;
5905 if (opts.return_docs && opts.limit) {
5906 /* istanbul ignore if */
5907 if (opts.limit < results.length) {
5908 results.length = opts.limit;
5909 }
5910 }
5911 changeStream.unpipe(throughStream);
5912 changeStream.destroy();
5913 if (!opts.continuous && !opts.cancelled) {
5914 if (opts.include_docs && opts.attachments && opts.return_docs) {
5915 fetchAttachments(results, stores, opts).then(function () {
5916 opts.complete(null, {results, last_seq: lastSeq});
5917 });
5918 } else {
5919 opts.complete(null, {results, last_seq: lastSeq});
5920 }
5921 }
5922 }
5923 var changeStream = stores.bySeqStore.readStream(streamOpts);
5924 var throughStream = through2.obj(function (data, _, next) {
5925 if (limit && called >= limit) {
5926 complete();
5927 return next();
5928 }
5929 if (opts.cancelled || opts.done) {
5930 return next();
5931 }
5932
5933 var seq = parseSeq(data.key);
5934 var doc = data.value;
5935
5936 if (seq === opts.since && !descending) {
5937 // couchdb ignores `since` if descending=true
5938 return next();
5939 }
5940
5941 if (docIds && !docIds.has(doc._id)) {
5942 return next();
5943 }
5944
5945 var metadata;
5946
5947 function onGetMetadata(metadata) {
5948 var winningRev$$1 = getWinningRev(metadata);
5949
5950 function onGetWinningDoc(winningDoc) {
5951
5952 var change = opts.processChange(winningDoc, metadata, opts);
5953 change.seq = metadata.seq;
5954
5955 var filtered = filter(change);
5956 if (typeof filtered === 'object') {
5957 return opts.complete(filtered);
5958 }
5959
5960 if (filtered) {
5961 called++;
5962
5963 if (opts.attachments && opts.include_docs) {
5964 // fetch attachment immediately for the benefit
5965 // of live listeners
5966 fetchAttachments([change], stores, opts).then(function () {
5967 opts.onChange(change);
5968 });
5969 } else {
5970 opts.onChange(change);
5971 }
5972
5973 if (opts.return_docs) {
5974 results.push(change);
5975 }
5976 }
5977 next();
5978 }
5979
5980 if (metadata.seq !== seq) {
5981 // some other seq is later
5982 return next();
5983 }
5984
5985 lastSeq = seq;
5986
5987 if (winningRev$$1 === doc._rev) {
5988 return onGetWinningDoc(doc);
5989 }
5990
5991 // fetch the winner
5992
5993 var winningSeq = metadata.rev_map[winningRev$$1];
5994
5995 stores.bySeqStore.get(formatSeq(winningSeq), function (err, doc) {
5996 onGetWinningDoc(doc);
5997 });
5998 }
5999
6000 metadata = docIdsToMetadata.get(doc._id);
6001 if (metadata) { // cached
6002 return onGetMetadata(metadata);
6003 }
6004 // metadata not cached, have to go fetch it
6005 stores.docStore.get(doc._id, function (err, metadata) {
6006 /* istanbul ignore if */
6007 if (opts.cancelled || opts.done || db.isClosed() ||
6008 isLocalId(metadata.id)) {
6009 return next();
6010 }
6011 docIdsToMetadata.set(doc._id, metadata);
6012 onGetMetadata(metadata);
6013 });
6014 }, function (next) {
6015 if (opts.cancelled) {
6016 return next();
6017 }
6018 if (opts.return_docs && opts.limit) {
6019 /* istanbul ignore if */
6020 if (opts.limit < results.length) {
6021 results.length = opts.limit;
6022 }
6023 }
6024
6025 next();
6026 }).on('unpipe', function () {
6027 throughStream.end();
6028 complete();
6029 });
6030 changeStream.pipe(throughStream);
6031 return {
6032 cancel: function () {
6033 opts.cancelled = true;
6034 complete();
6035 }
6036 };
6037 };
6038
6039 api._close = function (callback) {
6040 /* istanbul ignore if */
6041 if (db.isClosed()) {
6042 return callback(createError(NOT_OPEN));
6043 }
6044 db.close(function (err) {
6045 /* istanbul ignore if */
6046 if (err) {
6047 callback(err);
6048 } else {
6049 dbStore.delete(name);
6050
6051 var adapterName = functionName(leveldown);
6052 var adapterStore = dbStores.get(adapterName);
6053 var viewNamePrefix = PouchDB.prefix + name + "-mrview-";
6054 var keys = [...adapterStore.keys()].filter(k => k.includes(viewNamePrefix));
6055 keys.forEach(key => {
6056 var eventEmitter = adapterStore.get(key);
6057 eventEmitter.removeAllListeners();
6058 eventEmitter.close();
6059 adapterStore.delete(key);
6060 });
6061
6062 callback();
6063 }
6064 });
6065 };
6066
6067 api._getRevisionTree = function (docId, callback) {
6068 stores.docStore.get(docId, function (err, metadata) {
6069 if (err) {
6070 callback(createError(MISSING_DOC));
6071 } else {
6072 callback(null, metadata.rev_tree);
6073 }
6074 });
6075 };
6076
6077 api._doCompaction = writeLock(function (docId, revs, opts, callback) {
6078 api._doCompactionNoLock(docId, revs, opts, callback);
6079 });
6080
6081 // the NoLock version is for use by bulkDocs
6082 api._doCompactionNoLock = function (docId, revs, opts, callback) {
6083 if (typeof opts === 'function') {
6084 callback = opts;
6085 opts = {};
6086 }
6087
6088 if (!revs.length) {
6089 return callback();
6090 }
6091 var txn = opts.ctx || new LevelTransaction();
6092
6093 txn.get(stores.docStore, docId, function (err, metadata) {
6094 /* istanbul ignore if */
6095 if (err) {
6096 return callback(err);
6097 }
6098 var seqs = revs.map(function (rev$$1) {
6099 var seq = metadata.rev_map[rev$$1];
6100 delete metadata.rev_map[rev$$1];
6101 return seq;
6102 });
6103 traverseRevTree(metadata.rev_tree, function (isLeaf, pos,
6104 revHash, ctx, opts) {
6105 var rev$$1 = pos + '-' + revHash;
6106 if (revs.indexOf(rev$$1) !== -1) {
6107 opts.status = 'missing';
6108 }
6109 });
6110
6111 var batch = [];
6112 batch.push({
6113 key: metadata.id,
6114 value: metadata,
6115 type: 'put',
6116 prefix: stores.docStore
6117 });
6118
6119 var digestMap = {};
6120 var numDone = 0;
6121 var overallErr;
6122 function checkDone(err) {
6123 /* istanbul ignore if */
6124 if (err) {
6125 overallErr = err;
6126 }
6127 if (++numDone === revs.length) { // done
6128 /* istanbul ignore if */
6129 if (overallErr) {
6130 return callback(overallErr);
6131 }
6132 deleteOrphanedAttachments();
6133 }
6134 }
6135
6136 function finish(err) {
6137 /* istanbul ignore if */
6138 if (err) {
6139 return callback(err);
6140 }
6141 txn.batch(batch);
6142 if (opts.ctx) {
6143 // don't execute immediately
6144 return callback();
6145 }
6146 txn.execute(db, callback);
6147 }
6148
6149 function deleteOrphanedAttachments() {
6150 var possiblyOrphanedAttachments = Object.keys(digestMap);
6151 if (!possiblyOrphanedAttachments.length) {
6152 return finish();
6153 }
6154 var numDone = 0;
6155 var overallErr;
6156 function checkDone(err) {
6157 /* istanbul ignore if */
6158 if (err) {
6159 overallErr = err;
6160 }
6161 if (++numDone === possiblyOrphanedAttachments.length) {
6162 finish(overallErr);
6163 }
6164 }
6165 var refsToDelete = new Map();
6166 revs.forEach(function (rev$$1) {
6167 refsToDelete.set(docId + '@' + rev$$1, true);
6168 });
6169 possiblyOrphanedAttachments.forEach(function (digest) {
6170 txn.get(stores.attachmentStore, digest, function (err, attData) {
6171 /* istanbul ignore if */
6172 if (err) {
6173 if (err.name === 'NotFoundError') {
6174 return checkDone();
6175 } else {
6176 return checkDone(err);
6177 }
6178 }
6179 var refs = Object.keys(attData.refs || {}).filter(function (ref) {
6180 return !refsToDelete.has(ref);
6181 });
6182 var newRefs = {};
6183 refs.forEach(function (ref) {
6184 newRefs[ref] = true;
6185 });
6186 if (refs.length) { // not orphaned
6187 batch.push({
6188 key: digest,
6189 type: 'put',
6190 value: {refs: newRefs},
6191 prefix: stores.attachmentStore
6192 });
6193 } else { // orphaned, can safely delete
6194 batch = batch.concat([{
6195 key: digest,
6196 type: 'del',
6197 prefix: stores.attachmentStore
6198 }, {
6199 key: digest,
6200 type: 'del',
6201 prefix: stores.binaryStore
6202 }]);
6203 }
6204 checkDone();
6205 });
6206 });
6207 }
6208
6209 seqs.forEach(function (seq) {
6210 batch.push({
6211 key: formatSeq(seq),
6212 type: 'del',
6213 prefix: stores.bySeqStore
6214 });
6215 txn.get(stores.bySeqStore, formatSeq(seq), function (err, doc) {
6216 /* istanbul ignore if */
6217 if (err) {
6218 if (err.name === 'NotFoundError') {
6219 return checkDone();
6220 } else {
6221 return checkDone(err);
6222 }
6223 }
6224 var atts = Object.keys(doc._attachments || {});
6225 atts.forEach(function (attName) {
6226 var digest = doc._attachments[attName].digest;
6227 digestMap[digest] = true;
6228 });
6229 checkDone();
6230 });
6231 });
6232 });
6233 };
6234
6235 api._getLocal = function (id, callback) {
6236 stores.localStore.get(id, function (err, doc) {
6237 if (err) {
6238 callback(createError(MISSING_DOC));
6239 } else {
6240 callback(null, doc);
6241 }
6242 });
6243 };
6244
6245 api._putLocal = function (doc, opts, callback) {
6246 if (typeof opts === 'function') {
6247 callback = opts;
6248 opts = {};
6249 }
6250 if (opts.ctx) {
6251 api._putLocalNoLock(doc, opts, callback);
6252 } else {
6253 api._putLocalWithLock(doc, opts, callback);
6254 }
6255 };
6256
6257 api._putLocalWithLock = writeLock(function (doc, opts, callback) {
6258 api._putLocalNoLock(doc, opts, callback);
6259 });
6260
6261 // the NoLock version is for use by bulkDocs
6262 api._putLocalNoLock = function (doc, opts, callback) {
6263 delete doc._revisions; // ignore this, trust the rev
6264 var oldRev = doc._rev;
6265 var id = doc._id;
6266
6267 var txn = opts.ctx || new LevelTransaction();
6268
6269 txn.get(stores.localStore, id, function (err, resp) {
6270 if (err && oldRev) {
6271 return callback(createError(REV_CONFLICT));
6272 }
6273 if (resp && resp._rev !== oldRev) {
6274 return callback(createError(REV_CONFLICT));
6275 }
6276 doc._rev =
6277 oldRev ? '0-' + (parseInt(oldRev.split('-')[1], 10) + 1) : '0-1';
6278 var batch = [
6279 {
6280 type: 'put',
6281 prefix: stores.localStore,
6282 key: id,
6283 value: doc
6284 }
6285 ];
6286
6287 txn.batch(batch);
6288 var ret = {ok: true, id: doc._id, rev: doc._rev};
6289
6290 if (opts.ctx) {
6291 // don't execute immediately
6292 return callback(null, ret);
6293 }
6294 txn.execute(db, function (err) {
6295 /* istanbul ignore if */
6296 if (err) {
6297 return callback(err);
6298 }
6299 callback(null, ret);
6300 });
6301 });
6302 };
6303
6304 api._removeLocal = function (doc, opts, callback) {
6305 if (typeof opts === 'function') {
6306 callback = opts;
6307 opts = {};
6308 }
6309 if (opts.ctx) {
6310 api._removeLocalNoLock(doc, opts, callback);
6311 } else {
6312 api._removeLocalWithLock(doc, opts, callback);
6313 }
6314 };
6315
6316 api._removeLocalWithLock = writeLock(function (doc, opts, callback) {
6317 api._removeLocalNoLock(doc, opts, callback);
6318 });
6319
6320 // the NoLock version is for use by bulkDocs
6321 api._removeLocalNoLock = function (doc, opts, callback) {
6322 var txn = opts.ctx || new LevelTransaction();
6323 txn.get(stores.localStore, doc._id, function (err, resp) {
6324 if (err) {
6325 /* istanbul ignore if */
6326 if (err.name !== 'NotFoundError') {
6327 return callback(err);
6328 } else {
6329 return callback(createError(MISSING_DOC));
6330 }
6331 }
6332 if (resp._rev !== doc._rev) {
6333 return callback(createError(REV_CONFLICT));
6334 }
6335 txn.batch([{
6336 prefix: stores.localStore,
6337 type: 'del',
6338 key: doc._id
6339 }]);
6340 var ret = {ok: true, id: doc._id, rev: '0-0'};
6341 if (opts.ctx) {
6342 // don't execute immediately
6343 return callback(null, ret);
6344 }
6345 txn.execute(db, function (err) {
6346 /* istanbul ignore if */
6347 if (err) {
6348 return callback(err);
6349 }
6350 callback(null, ret);
6351 });
6352 });
6353 };
6354
6355 // close and delete open leveldb stores
6356 api._destroy = function (opts, callback) {
6357 var dbStore;
6358 var leveldownName = functionName(leveldown);
6359 /* istanbul ignore else */
6360 if (dbStores.has(leveldownName)) {
6361 dbStore = dbStores.get(leveldownName);
6362 } else {
6363 return callDestroy(name, callback);
6364 }
6365
6366 /* istanbul ignore else */
6367 if (dbStore.has(name)) {
6368 levelChanges.removeAllListeners(name);
6369
6370 dbStore.get(name).close(function () {
6371 dbStore.delete(name);
6372 callDestroy(name, callback);
6373 });
6374 } else {
6375 callDestroy(name, callback);
6376 }
6377 };
6378 function callDestroy(name, cb) {
6379 // May not exist if leveldown is backed by memory adapter
6380 /* istanbul ignore else */
6381 if ('destroy' in leveldown) {
6382 leveldown.destroy(name, cb);
6383 } else {
6384 cb(null);
6385 }
6386 }
6387}
6388
6389// require leveldown. provide verbose output on error as it is the default
6390// nodejs adapter, which we do not provide for the user
6391/* istanbul ignore next */
6392var requireLeveldown = function () {
6393 try {
6394 return require('leveldown');
6395 } catch (err) {
6396 /* eslint no-ex-assign: 0*/
6397 err = err || 'leveldown import error';
6398 if (err.code === 'MODULE_NOT_FOUND') {
6399 // handle leveldown not installed case
6400 return new Error([
6401 'the \'leveldown\' package is not available. install it, or,',
6402 'specify another storage backend using the \'db\' option'
6403 ].join(' '));
6404 } else if (err.message && err.message.match('Module version mismatch')) {
6405 // handle common user environment error
6406 return new Error([
6407 err.message,
6408 'This generally implies that leveldown was built with a different',
6409 'version of node than that which is running now. You may try',
6410 'fully removing and reinstalling PouchDB or leveldown to resolve.'
6411 ].join(' '));
6412 }
6413 // handle general internal nodejs require error
6414 return new Error(err.toString() + ': unable to import leveldown');
6415 }
6416};
6417
6418var stores = [
6419 'document-store',
6420 'by-sequence',
6421 'attach-store',
6422 'attach-binary-store'
6423];
6424function formatSeq(n) {
6425 return ('0000000000000000' + n).slice(-16);
6426}
6427var UPDATE_SEQ_KEY$1 = '_local_last_update_seq';
6428var DOC_COUNT_KEY$1 = '_local_doc_count';
6429var UUID_KEY$1 = '_local_uuid';
6430
6431var doMigrationOne = function (name, db, callback) {
6432 // local require to prevent crashing if leveldown isn't installed.
6433 var leveldown = require("leveldown");
6434
6435 var base = path.resolve(name);
6436 function move(store, index, cb) {
6437 var storePath = path.join(base, store);
6438 var opts;
6439 if (index === 3) {
6440 opts = {
6441 valueEncoding: 'binary'
6442 };
6443 } else {
6444 opts = {
6445 valueEncoding: 'json'
6446 };
6447 }
6448 var sub = db.sublevel(store, opts);
6449 var orig = level(storePath, opts);
6450 var from = orig.createReadStream();
6451 var writeStream = new LevelWriteStream(sub);
6452 var to = writeStream();
6453 from.on('end', function () {
6454 orig.close(function (err) {
6455 cb(err, storePath);
6456 });
6457 });
6458 from.pipe(to);
6459 }
6460 fs.unlink(base + '.uuid', function (err) {
6461 if (err) {
6462 return callback();
6463 }
6464 var todo = 4;
6465 var done = [];
6466 stores.forEach(function (store, i) {
6467 move(store, i, function (err, storePath) {
6468 /* istanbul ignore if */
6469 if (err) {
6470 return callback(err);
6471 }
6472 done.push(storePath);
6473 if (!(--todo)) {
6474 done.forEach(function (item) {
6475 leveldown.destroy(item, function () {
6476 if (++todo === done.length) {
6477 fs.rmdir(base, callback);
6478 }
6479 });
6480 });
6481 }
6482 });
6483 });
6484 });
6485};
6486var doMigrationTwo = function (db, stores, callback) {
6487 var batches = [];
6488 stores.bySeqStore.get(UUID_KEY$1, function (err, value) {
6489 if (err) {
6490 // no uuid key, so don't need to migrate;
6491 return callback();
6492 }
6493 batches.push({
6494 key: UUID_KEY$1,
6495 value,
6496 prefix: stores.metaStore,
6497 type: 'put',
6498 valueEncoding: 'json'
6499 });
6500 batches.push({
6501 key: UUID_KEY$1,
6502 prefix: stores.bySeqStore,
6503 type: 'del'
6504 });
6505 stores.bySeqStore.get(DOC_COUNT_KEY$1, function (err, value) {
6506 if (value) {
6507 // if no doc count key,
6508 // just skip
6509 // we can live with this
6510 batches.push({
6511 key: DOC_COUNT_KEY$1,
6512 value,
6513 prefix: stores.metaStore,
6514 type: 'put',
6515 valueEncoding: 'json'
6516 });
6517 batches.push({
6518 key: DOC_COUNT_KEY$1,
6519 prefix: stores.bySeqStore,
6520 type: 'del'
6521 });
6522 }
6523 stores.bySeqStore.get(UPDATE_SEQ_KEY$1, function (err, value) {
6524 if (value) {
6525 // if no UPDATE_SEQ_KEY
6526 // just skip
6527 // we've gone to far to stop.
6528 batches.push({
6529 key: UPDATE_SEQ_KEY$1,
6530 value,
6531 prefix: stores.metaStore,
6532 type: 'put',
6533 valueEncoding: 'json'
6534 });
6535 batches.push({
6536 key: UPDATE_SEQ_KEY$1,
6537 prefix: stores.bySeqStore,
6538 type: 'del'
6539 });
6540 }
6541 var deletedSeqs = {};
6542 stores.docStore.createReadStream({
6543 startKey: '_',
6544 endKey: '_\xFF'
6545 }).pipe(through2.obj(function (ch, _, next) {
6546 if (!isLocalId(ch.key)) {
6547 return next();
6548 }
6549 batches.push({
6550 key: ch.key,
6551 prefix: stores.docStore,
6552 type: 'del'
6553 });
6554 var winner = winningRev(ch.value);
6555 Object.keys(ch.value.rev_map).forEach(function (key) {
6556 if (key !== 'winner') {
6557 this.push(formatSeq(ch.value.rev_map[key]));
6558 }
6559 }, this);
6560 var winningSeq = ch.value.rev_map[winner];
6561 stores.bySeqStore.get(formatSeq(winningSeq), function (err, value) {
6562 if (!err) {
6563 batches.push({
6564 key: ch.key,
6565 value,
6566 prefix: stores.localStore,
6567 type: 'put',
6568 valueEncoding: 'json'
6569 });
6570 }
6571 next();
6572 });
6573
6574 })).pipe(through2.obj(function (seq, _, next) {
6575 /* istanbul ignore if */
6576 if (deletedSeqs[seq]) {
6577 return next();
6578 }
6579 deletedSeqs[seq] = true;
6580 stores.bySeqStore.get(seq, function (err, resp) {
6581 /* istanbul ignore if */
6582 if (err || !isLocalId(resp._id)) {
6583 return next();
6584 }
6585 batches.push({
6586 key: seq,
6587 prefix: stores.bySeqStore,
6588 type: 'del'
6589 });
6590 next();
6591 });
6592 }, function () {
6593 db.batch(batches, callback);
6594 }));
6595 });
6596 });
6597 });
6598
6599};
6600
6601var migrate = {
6602 doMigrationOne,
6603 doMigrationTwo
6604};
6605
6606function LevelDownPouch(opts, callback) {
6607
6608 // Users can pass in their own leveldown alternative here, in which case
6609 // it overrides the default one. (This is in addition to the custom builds.)
6610 var leveldown = opts.db;
6611
6612 /* istanbul ignore else */
6613 if (!leveldown) {
6614 leveldown = requireLeveldown();
6615
6616 /* istanbul ignore if */
6617 if (leveldown instanceof Error) {
6618 return callback(leveldown);
6619 }
6620 }
6621
6622 var _opts = Object.assign({
6623 db: leveldown,
6624 migrate
6625 }, opts);
6626
6627 LevelPouch.call(this, _opts, callback);
6628}
6629
6630// overrides for normal LevelDB behavior on Node
6631LevelDownPouch.valid = function () {
6632 return true;
6633};
6634LevelDownPouch.use_prefix = false;
6635
6636function LevelPouch$1 (PouchDB) {
6637 PouchDB.adapter('leveldb', LevelDownPouch, true);
6638}
6639
6640// dead simple promise pool, inspired by https://github.com/timdp/es6-promise-pool
6641// but much smaller in code size. limits the number of concurrent promises that are executed
6642
6643
6644function pool(promiseFactories, limit) {
6645 return new Promise(function (resolve, reject) {
6646 var running = 0;
6647 var current = 0;
6648 var done = 0;
6649 var len = promiseFactories.length;
6650 var err;
6651
6652 function runNext() {
6653 running++;
6654 promiseFactories[current++]().then(onSuccess, onError);
6655 }
6656
6657 function doNext() {
6658 if (++done === len) {
6659 /* istanbul ignore if */
6660 if (err) {
6661 reject(err);
6662 } else {
6663 resolve();
6664 }
6665 } else {
6666 runNextBatch();
6667 }
6668 }
6669
6670 function onSuccess() {
6671 running--;
6672 doNext();
6673 }
6674
6675 /* istanbul ignore next */
6676 function onError(thisErr) {
6677 running--;
6678 err = err || thisErr;
6679 doNext();
6680 }
6681
6682 function runNextBatch() {
6683 while (running < limit && current < len) {
6684 runNext();
6685 }
6686 }
6687
6688 runNextBatch();
6689 });
6690}
6691
6692const CHANGES_BATCH_SIZE = 25;
6693const MAX_SIMULTANEOUS_REVS = 50;
6694const CHANGES_TIMEOUT_BUFFER = 5000;
6695const DEFAULT_HEARTBEAT = 10000;
6696
6697const supportsBulkGetMap = {};
6698
6699function readAttachmentsAsBlobOrBuffer(row) {
6700 const doc = row.doc || row.ok;
6701 const atts = doc && doc._attachments;
6702 if (!atts) {
6703 return;
6704 }
6705 Object.keys(atts).forEach(function (filename) {
6706 const att = atts[filename];
6707 att.data = b64ToBluffer(att.data, att.content_type);
6708 });
6709}
6710
6711function encodeDocId(id) {
6712 if (/^_design/.test(id)) {
6713 return '_design/' + encodeURIComponent(id.slice(8));
6714 }
6715 if (id.startsWith('_local/')) {
6716 return '_local/' + encodeURIComponent(id.slice(7));
6717 }
6718 return encodeURIComponent(id);
6719}
6720
6721function preprocessAttachments$1(doc) {
6722 if (!doc._attachments || !Object.keys(doc._attachments)) {
6723 return Promise.resolve();
6724 }
6725
6726 return Promise.all(Object.keys(doc._attachments).map(function (key) {
6727 const attachment = doc._attachments[key];
6728 if (attachment.data && typeof attachment.data !== 'string') {
6729 return new Promise(function (resolve) {
6730 blobToBase64(attachment.data, resolve);
6731 }).then(function (b64) {
6732 attachment.data = b64;
6733 });
6734 }
6735 }));
6736}
6737
6738function hasUrlPrefix(opts) {
6739 if (!opts.prefix) {
6740 return false;
6741 }
6742 const protocol = parseUri(opts.prefix).protocol;
6743 return protocol === 'http' || protocol === 'https';
6744}
6745
6746// Get all the information you possibly can about the URI given by name and
6747// return it as a suitable object.
6748function getHost(name, opts) {
6749 // encode db name if opts.prefix is a url (#5574)
6750 if (hasUrlPrefix(opts)) {
6751 const dbName = opts.name.substr(opts.prefix.length);
6752 // Ensure prefix has a trailing slash
6753 const prefix = opts.prefix.replace(/\/?$/, '/');
6754 name = prefix + encodeURIComponent(dbName);
6755 }
6756
6757 const uri = parseUri(name);
6758 if (uri.user || uri.password) {
6759 uri.auth = {username: uri.user, password: uri.password};
6760 }
6761
6762 // Split the path part of the URI into parts using '/' as the delimiter
6763 // after removing any leading '/' and any trailing '/'
6764 const parts = uri.path.replace(/(^\/|\/$)/g, '').split('/');
6765
6766 uri.db = parts.pop();
6767 // Prevent double encoding of URI component
6768 if (uri.db.indexOf('%') === -1) {
6769 uri.db = encodeURIComponent(uri.db);
6770 }
6771
6772 uri.path = parts.join('/');
6773
6774 return uri;
6775}
6776
6777// Generate a URL with the host data given by opts and the given path
6778function genDBUrl(opts, path$$1) {
6779 return genUrl(opts, opts.db + '/' + path$$1);
6780}
6781
6782// Generate a URL with the host data given by opts and the given path
6783function genUrl(opts, path$$1) {
6784 // If the host already has a path, then we need to have a path delimiter
6785 // Otherwise, the path delimiter is the empty string
6786 const pathDel = !opts.path ? '' : '/';
6787
6788 // If the host already has a path, then we need to have a path delimiter
6789 // Otherwise, the path delimiter is the empty string
6790 return opts.protocol + '://' + opts.host +
6791 (opts.port ? (':' + opts.port) : '') +
6792 '/' + opts.path + pathDel + path$$1;
6793}
6794
6795function paramsToStr(params) {
6796 const paramKeys = Object.keys(params);
6797 if (paramKeys.length === 0) {
6798 return '';
6799 }
6800
6801 return '?' + paramKeys.map(key => key + '=' + encodeURIComponent(params[key])).join('&');
6802}
6803
6804function shouldCacheBust(opts) {
6805 const ua = (typeof navigator !== 'undefined' && navigator.userAgent) ?
6806 navigator.userAgent.toLowerCase() : '';
6807 const isIE = ua.indexOf('msie') !== -1;
6808 const isTrident = ua.indexOf('trident') !== -1;
6809 const isEdge = ua.indexOf('edge') !== -1;
6810 const isGET = !('method' in opts) || opts.method === 'GET';
6811 return (isIE || isTrident || isEdge) && isGET;
6812}
6813
6814// Implements the PouchDB API for dealing with CouchDB instances over HTTP
6815function HttpPouch(opts, callback) {
6816
6817 // The functions that will be publicly available for HttpPouch
6818 const api = this;
6819
6820 const host = getHost(opts.name, opts);
6821 const dbUrl = genDBUrl(host, '');
6822
6823 opts = clone(opts);
6824
6825 const ourFetch = async function (url, options) {
6826
6827 options = options || {};
6828 options.headers = options.headers || new nodeFetch.Headers();
6829
6830 options.credentials = 'include';
6831
6832 if (opts.auth || host.auth) {
6833 const nAuth = opts.auth || host.auth;
6834 const str = nAuth.username + ':' + nAuth.password;
6835 const token = thisBtoa(unescape(encodeURIComponent(str)));
6836 options.headers.set('Authorization', 'Basic ' + token);
6837 }
6838
6839 const headers = opts.headers || {};
6840 Object.keys(headers).forEach(function (key) {
6841 options.headers.append(key, headers[key]);
6842 });
6843
6844 /* istanbul ignore if */
6845 if (shouldCacheBust(options)) {
6846 url += (url.indexOf('?') === -1 ? '?' : '&') + '_nonce=' + Date.now();
6847 }
6848
6849 const fetchFun = opts.fetch || fetch;
6850 return await fetchFun(url, options);
6851 };
6852
6853 function adapterFun$$1(name, fun) {
6854 return adapterFun(name, function (...args) {
6855 setup().then(function () {
6856 return fun.apply(this, args);
6857 }).catch(function (e) {
6858 const callback = args.pop();
6859 callback(e);
6860 });
6861 }).bind(api);
6862 }
6863
6864 async function fetchJSON(url, options) {
6865
6866 const result = {};
6867
6868 options = options || {};
6869 options.headers = options.headers || new nodeFetch.Headers();
6870
6871 if (!options.headers.get('Content-Type')) {
6872 options.headers.set('Content-Type', 'application/json');
6873 }
6874 if (!options.headers.get('Accept')) {
6875 options.headers.set('Accept', 'application/json');
6876 }
6877
6878 const response = await ourFetch(url, options);
6879 result.ok = response.ok;
6880 result.status = response.status;
6881 const json = await response.json();
6882
6883 result.data = json;
6884 if (!result.ok) {
6885 result.data.status = result.status;
6886 const err = generateErrorFromResponse(result.data);
6887 throw err;
6888 }
6889
6890 if (Array.isArray(result.data)) {
6891 result.data = result.data.map(function (v) {
6892 if (v.error || v.missing) {
6893 return generateErrorFromResponse(v);
6894 } else {
6895 return v;
6896 }
6897 });
6898 }
6899
6900 return result;
6901 }
6902
6903 let setupPromise;
6904
6905 async function setup() {
6906 if (opts.skip_setup) {
6907 return Promise.resolve();
6908 }
6909
6910 // If there is a setup in process or previous successful setup
6911 // done then we will use that
6912 // If previous setups have been rejected we will try again
6913 if (setupPromise) {
6914 return setupPromise;
6915 }
6916
6917 setupPromise = fetchJSON(dbUrl).catch(function (err) {
6918 if (err && err.status && err.status === 404) {
6919 return fetchJSON(dbUrl, {method: 'PUT'});
6920 } else {
6921 return Promise.reject(err);
6922 }
6923 }).catch(function (err) {
6924 // If we try to create a database that already exists, skipped in
6925 // istanbul since its catching a race condition.
6926 /* istanbul ignore if */
6927 if (err && err.status && err.status === 412) {
6928 return true;
6929 }
6930 return Promise.reject(err);
6931 });
6932
6933 setupPromise.catch(function () {
6934 setupPromise = null;
6935 });
6936
6937 return setupPromise;
6938 }
6939
6940 nextTick(function () {
6941 callback(null, api);
6942 });
6943
6944 api._remote = true;
6945
6946 /* istanbul ignore next */
6947 api.type = function () {
6948 return 'http';
6949 };
6950
6951 api.id = adapterFun$$1('id', async function (callback) {
6952 let result;
6953 try {
6954 const response = await ourFetch(genUrl(host, ''));
6955 result = await response.json();
6956 } catch (err) {
6957 result = {};
6958 }
6959
6960 // Bad response or missing `uuid` should not prevent ID generation.
6961 const uuid$$1 = (result && result.uuid) ? (result.uuid + host.db) : genDBUrl(host, '');
6962 callback(null, uuid$$1);
6963 });
6964
6965 // Sends a POST request to the host calling the couchdb _compact function
6966 // version: The version of CouchDB it is running
6967 api.compact = adapterFun$$1('compact', async function (opts, callback) {
6968 if (typeof opts === 'function') {
6969 callback = opts;
6970 opts = {};
6971 }
6972 opts = clone(opts);
6973
6974 await fetchJSON(genDBUrl(host, '_compact'), {method: 'POST'});
6975
6976 function ping() {
6977 api.info(function (err, res$$1) {
6978 // CouchDB may send a "compact_running:true" if it's
6979 // already compacting. PouchDB Server doesn't.
6980 /* istanbul ignore else */
6981 if (res$$1 && !res$$1.compact_running) {
6982 callback(null, {ok: true});
6983 } else {
6984 setTimeout(ping, opts.interval || 200);
6985 }
6986 });
6987 }
6988 // Ping the http if it's finished compaction
6989 ping();
6990 });
6991
6992 api.bulkGet = adapterFun('bulkGet', function (opts, callback) {
6993 const self = this;
6994
6995 async function doBulkGet(cb) {
6996 const params = {};
6997 if (opts.revs) {
6998 params.revs = true;
6999 }
7000 if (opts.attachments) {
7001 /* istanbul ignore next */
7002 params.attachments = true;
7003 }
7004 if (opts.latest) {
7005 params.latest = true;
7006 }
7007 try {
7008 const result = await fetchJSON(genDBUrl(host, '_bulk_get' + paramsToStr(params)), {
7009 method: 'POST',
7010 body: JSON.stringify({ docs: opts.docs})
7011 });
7012
7013 if (opts.attachments && opts.binary) {
7014 result.data.results.forEach(function (res$$1) {
7015 res$$1.docs.forEach(readAttachmentsAsBlobOrBuffer);
7016 });
7017 }
7018 cb(null, result.data);
7019 } catch (error) {
7020 cb(error);
7021 }
7022 }
7023
7024 /* istanbul ignore next */
7025 function doBulkGetShim() {
7026 // avoid "url too long error" by splitting up into multiple requests
7027 const batchSize = MAX_SIMULTANEOUS_REVS;
7028 const numBatches = Math.ceil(opts.docs.length / batchSize);
7029 let numDone = 0;
7030 const results = new Array(numBatches);
7031
7032 function onResult(batchNum) {
7033 return function (err, res$$1) {
7034 // err is impossible because shim returns a list of errs in that case
7035 results[batchNum] = res$$1.results;
7036 if (++numDone === numBatches) {
7037 callback(null, {results: results.flat()});
7038 }
7039 };
7040 }
7041
7042 for (let i = 0; i < numBatches; i++) {
7043 const subOpts = pick(opts, ['revs', 'attachments', 'binary', 'latest']);
7044 subOpts.docs = opts.docs.slice(i * batchSize,
7045 Math.min(opts.docs.length, (i + 1) * batchSize));
7046 bulkGet(self, subOpts, onResult(i));
7047 }
7048 }
7049
7050 // mark the whole database as either supporting or not supporting _bulk_get
7051 const dbUrl = genUrl(host, '');
7052 const supportsBulkGet = supportsBulkGetMap[dbUrl];
7053
7054 /* istanbul ignore next */
7055 if (typeof supportsBulkGet !== 'boolean') {
7056 // check if this database supports _bulk_get
7057 doBulkGet(function (err, res$$1) {
7058 if (err) {
7059 supportsBulkGetMap[dbUrl] = false;
7060 res(
7061 err.status,
7062 'PouchDB is just detecting if the remote ' +
7063 'supports the _bulk_get API.'
7064 );
7065 doBulkGetShim();
7066 } else {
7067 supportsBulkGetMap[dbUrl] = true;
7068 callback(null, res$$1);
7069 }
7070 });
7071 } else if (supportsBulkGet) {
7072 doBulkGet(callback);
7073 } else {
7074 doBulkGetShim();
7075 }
7076 });
7077
7078 // Calls GET on the host, which gets back a JSON string containing
7079 // couchdb: A welcome string
7080 // version: The version of CouchDB it is running
7081 api._info = async function (callback) {
7082 try {
7083 await setup();
7084 const response = await ourFetch(genDBUrl(host, ''));
7085 const info = await response.json();
7086 info.host = genDBUrl(host, '');
7087 callback(null, info);
7088 } catch (err) {
7089 callback(err);
7090 }
7091 };
7092
7093 api.fetch = async function (path$$1, options) {
7094 await setup();
7095 const url = path$$1.substring(0, 1) === '/' ?
7096 genUrl(host, path$$1.substring(1)) :
7097 genDBUrl(host, path$$1);
7098 return ourFetch(url, options);
7099 };
7100
7101 // Get the document with the given id from the database given by host.
7102 // The id could be solely the _id in the database, or it may be a
7103 // _design/ID or _local/ID path
7104 api.get = adapterFun$$1('get', async function (id, opts, callback) {
7105 // If no options were given, set the callback to the second parameter
7106 if (typeof opts === 'function') {
7107 callback = opts;
7108 opts = {};
7109 }
7110 opts = clone(opts);
7111
7112 // List of parameters to add to the GET request
7113 const params = {};
7114
7115 if (opts.revs) {
7116 params.revs = true;
7117 }
7118
7119 if (opts.revs_info) {
7120 params.revs_info = true;
7121 }
7122
7123 if (opts.latest) {
7124 params.latest = true;
7125 }
7126
7127 if (opts.open_revs) {
7128 if (opts.open_revs !== "all") {
7129 opts.open_revs = JSON.stringify(opts.open_revs);
7130 }
7131 params.open_revs = opts.open_revs;
7132 }
7133
7134 if (opts.rev) {
7135 params.rev = opts.rev;
7136 }
7137
7138 if (opts.conflicts) {
7139 params.conflicts = opts.conflicts;
7140 }
7141
7142 /* istanbul ignore if */
7143 if (opts.update_seq) {
7144 params.update_seq = opts.update_seq;
7145 }
7146
7147 id = encodeDocId(id);
7148
7149 function fetchAttachments(doc) {
7150 const atts = doc._attachments;
7151 const filenames = atts && Object.keys(atts);
7152 if (!atts || !filenames.length) {
7153 return;
7154 }
7155 // we fetch these manually in separate XHRs, because
7156 // Sync Gateway would normally send it back as multipart/mixed,
7157 // which we cannot parse. Also, this is more efficient than
7158 // receiving attachments as base64-encoded strings.
7159 async function fetchData(filename) {
7160 const att = atts[filename];
7161 const path$$1 = encodeDocId(doc._id) + '/' + encodeAttachmentId(filename) +
7162 '?rev=' + doc._rev;
7163
7164 const response = await ourFetch(genDBUrl(host, path$$1));
7165
7166 let blob;
7167 if ('buffer' in response) {
7168 blob = await response.buffer();
7169 } else {
7170 /* istanbul ignore next */
7171 blob = await response.blob();
7172 }
7173
7174 let data;
7175 if (opts.binary) {
7176 const typeFieldDescriptor = Object.getOwnPropertyDescriptor(blob.__proto__, 'type');
7177 if (!typeFieldDescriptor || typeFieldDescriptor.set) {
7178 blob.type = att.content_type;
7179 }
7180 data = blob;
7181 } else {
7182 data = await new Promise(function (resolve) {
7183 blobToBase64(blob, resolve);
7184 });
7185 }
7186
7187 delete att.stub;
7188 delete att.length;
7189 att.data = data;
7190 }
7191
7192 const promiseFactories = filenames.map(function (filename) {
7193 return function () {
7194 return fetchData(filename);
7195 };
7196 });
7197
7198 // This limits the number of parallel xhr requests to 5 any time
7199 // to avoid issues with maximum browser request limits
7200 return pool(promiseFactories, 5);
7201 }
7202
7203 function fetchAllAttachments(docOrDocs) {
7204 if (Array.isArray(docOrDocs)) {
7205 return Promise.all(docOrDocs.map(function (doc) {
7206 if (doc.ok) {
7207 return fetchAttachments(doc.ok);
7208 }
7209 }));
7210 }
7211 return fetchAttachments(docOrDocs);
7212 }
7213
7214 const url = genDBUrl(host, id + paramsToStr(params));
7215 try {
7216 const res$$1 = await fetchJSON(url);
7217 if (opts.attachments) {
7218 await fetchAllAttachments(res$$1.data);
7219 }
7220 callback(null, res$$1.data);
7221 } catch (error) {
7222 error.docId = id;
7223 callback(error);
7224 }
7225 });
7226
7227
7228 // Delete the document given by doc from the database given by host.
7229 api.remove = adapterFun$$1('remove', async function (docOrId, optsOrRev, opts, cb) {
7230 let doc;
7231 if (typeof optsOrRev === 'string') {
7232 // id, rev, opts, callback style
7233 doc = {
7234 _id: docOrId,
7235 _rev: optsOrRev
7236 };
7237 if (typeof opts === 'function') {
7238 cb = opts;
7239 opts = {};
7240 }
7241 } else {
7242 // doc, opts, callback style
7243 doc = docOrId;
7244 if (typeof optsOrRev === 'function') {
7245 cb = optsOrRev;
7246 opts = {};
7247 } else {
7248 cb = opts;
7249 opts = optsOrRev;
7250 }
7251 }
7252
7253 const rev$$1 = (doc._rev || opts.rev);
7254 const url = genDBUrl(host, encodeDocId(doc._id)) + '?rev=' + rev$$1;
7255
7256 try {
7257 const result = await fetchJSON(url, {method: 'DELETE'});
7258 cb(null, result.data);
7259 } catch (error) {
7260 cb(error);
7261 }
7262 });
7263
7264 function encodeAttachmentId(attachmentId) {
7265 return attachmentId.split("/").map(encodeURIComponent).join("/");
7266 }
7267
7268 // Get the attachment
7269 api.getAttachment = adapterFun$$1('getAttachment', async function (docId, attachmentId,
7270 opts, callback) {
7271 if (typeof opts === 'function') {
7272 callback = opts;
7273 opts = {};
7274 }
7275 const params = opts.rev ? ('?rev=' + opts.rev) : '';
7276 const url = genDBUrl(host, encodeDocId(docId)) + '/' +
7277 encodeAttachmentId(attachmentId) + params;
7278 let contentType;
7279 try {
7280 const response = await ourFetch(url, {method: 'GET'});
7281
7282 if (!response.ok) {
7283 throw response;
7284 }
7285
7286 contentType = response.headers.get('content-type');
7287 let blob;
7288 if (typeof process !== 'undefined' && !process.browser && typeof response.buffer === 'function') {
7289 blob = await response.buffer();
7290 } else {
7291 /* istanbul ignore next */
7292 blob = await response.blob();
7293 }
7294
7295 // TODO: also remove
7296 if (typeof process !== 'undefined' && !process.browser) {
7297 const typeFieldDescriptor = Object.getOwnPropertyDescriptor(blob.__proto__, 'type');
7298 if (!typeFieldDescriptor || typeFieldDescriptor.set) {
7299 blob.type = contentType;
7300 }
7301 }
7302 callback(null, blob);
7303 } catch (err) {
7304 callback(err);
7305 }
7306 });
7307
7308 // Remove the attachment given by the id and rev
7309 api.removeAttachment = adapterFun$$1('removeAttachment', async function (
7310 docId,
7311 attachmentId,
7312 rev$$1,
7313 callback,
7314 ) {
7315 const url = genDBUrl(host, encodeDocId(docId) + '/' + encodeAttachmentId(attachmentId)) + '?rev=' + rev$$1;
7316
7317 try {
7318 const result = await fetchJSON(url, {method: 'DELETE'});
7319 callback(null, result.data);
7320 } catch (error) {
7321 callback(error);
7322 }
7323 });
7324
7325 // Add the attachment given by blob and its contentType property
7326 // to the document with the given id, the revision given by rev, and
7327 // add it to the database given by host.
7328 api.putAttachment = adapterFun$$1('putAttachment', async function (
7329 docId,
7330 attachmentId,
7331 rev$$1,
7332 blob,
7333 type,
7334 callback,
7335 ) {
7336 if (typeof type === 'function') {
7337 callback = type;
7338 type = blob;
7339 blob = rev$$1;
7340 rev$$1 = null;
7341 }
7342 const id = encodeDocId(docId) + '/' + encodeAttachmentId(attachmentId);
7343 let url = genDBUrl(host, id);
7344 if (rev$$1) {
7345 url += '?rev=' + rev$$1;
7346 }
7347
7348 if (typeof blob === 'string') {
7349 // input is assumed to be a base64 string
7350 let binary;
7351 try {
7352 binary = thisAtob(blob);
7353 } catch (err) {
7354 return callback(createError(BAD_ARG,
7355 'Attachment is not a valid base64 string'));
7356 }
7357 blob = binary ? binStringToBluffer(binary, type) : '';
7358 }
7359
7360 try {
7361 // Add the attachment
7362 const result = await fetchJSON(url, {
7363 headers: new nodeFetch.Headers({'Content-Type': type}),
7364 method: 'PUT',
7365 body: blob
7366 });
7367 callback(null, result.data);
7368 } catch (error) {
7369 callback(error);
7370 }
7371 });
7372
7373 // Update/create multiple documents given by req in the database
7374 // given by host.
7375 api._bulkDocs = async function (req, opts, callback) {
7376 // If new_edits=false then it prevents the database from creating
7377 // new revision numbers for the documents. Instead it just uses
7378 // the old ones. This is used in database replication.
7379 req.new_edits = opts.new_edits;
7380
7381 try {
7382 await setup();
7383 await Promise.all(req.docs.map(preprocessAttachments$1));
7384
7385 // Update/create the documents
7386 const result = await fetchJSON(genDBUrl(host, '_bulk_docs'), {
7387 method: 'POST',
7388 body: JSON.stringify(req)
7389 });
7390 callback(null, result.data);
7391 } catch (error) {
7392 callback(error);
7393 }
7394 };
7395
7396 // Update/create document
7397 api._put = async function (doc, opts, callback) {
7398 try {
7399 await setup();
7400 await preprocessAttachments$1(doc);
7401
7402 const result = await fetchJSON(genDBUrl(host, encodeDocId(doc._id)), {
7403 method: 'PUT',
7404 body: JSON.stringify(doc)
7405 });
7406 callback(null, result.data);
7407 } catch (error) {
7408 error.docId = doc && doc._id;
7409 callback(error);
7410 }
7411 };
7412
7413
7414 // Get a listing of the documents in the database given
7415 // by host and ordered by increasing id.
7416 api.allDocs = adapterFun$$1('allDocs', async function (opts, callback) {
7417 if (typeof opts === 'function') {
7418 callback = opts;
7419 opts = {};
7420 }
7421 opts = clone(opts);
7422
7423 // List of parameters to add to the GET request
7424 const params = {};
7425 let body;
7426 let method = 'GET';
7427
7428 if (opts.conflicts) {
7429 params.conflicts = true;
7430 }
7431
7432 /* istanbul ignore if */
7433 if (opts.update_seq) {
7434 params.update_seq = true;
7435 }
7436
7437 if (opts.descending) {
7438 params.descending = true;
7439 }
7440
7441 if (opts.include_docs) {
7442 params.include_docs = true;
7443 }
7444
7445 // added in CouchDB 1.6.0
7446 if (opts.attachments) {
7447 params.attachments = true;
7448 }
7449
7450 if (opts.key) {
7451 params.key = JSON.stringify(opts.key);
7452 }
7453
7454 if (opts.start_key) {
7455 opts.startkey = opts.start_key;
7456 }
7457
7458 if (opts.startkey) {
7459 params.startkey = JSON.stringify(opts.startkey);
7460 }
7461
7462 if (opts.end_key) {
7463 opts.endkey = opts.end_key;
7464 }
7465
7466 if (opts.endkey) {
7467 params.endkey = JSON.stringify(opts.endkey);
7468 }
7469
7470 if (typeof opts.inclusive_end !== 'undefined') {
7471 params.inclusive_end = !!opts.inclusive_end;
7472 }
7473
7474 if (typeof opts.limit !== 'undefined') {
7475 params.limit = opts.limit;
7476 }
7477
7478 if (typeof opts.skip !== 'undefined') {
7479 params.skip = opts.skip;
7480 }
7481
7482 const paramStr = paramsToStr(params);
7483
7484 if (typeof opts.keys !== 'undefined') {
7485 method = 'POST';
7486 body = {keys: opts.keys};
7487 }
7488
7489 try {
7490 const result = await fetchJSON(genDBUrl(host, '_all_docs' + paramStr), {
7491 method,
7492 body: JSON.stringify(body)
7493 });
7494 if (opts.include_docs && opts.attachments && opts.binary) {
7495 result.data.rows.forEach(readAttachmentsAsBlobOrBuffer);
7496 }
7497 callback(null, result.data);
7498 } catch (error) {
7499 callback(error);
7500 }
7501 });
7502
7503 // Get a list of changes made to documents in the database given by host.
7504 // TODO According to the README, there should be two other methods here,
7505 // api.changes.addListener and api.changes.removeListener.
7506 api._changes = function (opts) {
7507
7508 // We internally page the results of a changes request, this means
7509 // if there is a large set of changes to be returned we can start
7510 // processing them quicker instead of waiting on the entire
7511 // set of changes to return and attempting to process them at once
7512 const batchSize = 'batch_size' in opts ? opts.batch_size : CHANGES_BATCH_SIZE;
7513
7514 opts = clone(opts);
7515
7516 if (opts.continuous && !('heartbeat' in opts)) {
7517 opts.heartbeat = DEFAULT_HEARTBEAT;
7518 }
7519
7520 let requestTimeout = ('timeout' in opts) ? opts.timeout : 30 * 1000;
7521
7522 // ensure CHANGES_TIMEOUT_BUFFER applies
7523 if ('timeout' in opts && opts.timeout &&
7524 (requestTimeout - opts.timeout) < CHANGES_TIMEOUT_BUFFER) {
7525 requestTimeout = opts.timeout + CHANGES_TIMEOUT_BUFFER;
7526 }
7527
7528 /* istanbul ignore if */
7529 if ('heartbeat' in opts && opts.heartbeat &&
7530 (requestTimeout - opts.heartbeat) < CHANGES_TIMEOUT_BUFFER) {
7531 requestTimeout = opts.heartbeat + CHANGES_TIMEOUT_BUFFER;
7532 }
7533
7534 const params = {};
7535 if ('timeout' in opts && opts.timeout) {
7536 params.timeout = opts.timeout;
7537 }
7538
7539 const limit = (typeof opts.limit !== 'undefined') ? opts.limit : false;
7540 let leftToFetch = limit;
7541
7542 if (opts.style) {
7543 params.style = opts.style;
7544 }
7545
7546 if (opts.include_docs || opts.filter && typeof opts.filter === 'function') {
7547 params.include_docs = true;
7548 }
7549
7550 if (opts.attachments) {
7551 params.attachments = true;
7552 }
7553
7554 if (opts.continuous) {
7555 params.feed = 'longpoll';
7556 }
7557
7558 if (opts.seq_interval) {
7559 params.seq_interval = opts.seq_interval;
7560 }
7561
7562 if (opts.conflicts) {
7563 params.conflicts = true;
7564 }
7565
7566 if (opts.descending) {
7567 params.descending = true;
7568 }
7569
7570 /* istanbul ignore if */
7571 if (opts.update_seq) {
7572 params.update_seq = true;
7573 }
7574
7575 if ('heartbeat' in opts) {
7576 // If the heartbeat value is false, it disables the default heartbeat
7577 if (opts.heartbeat) {
7578 params.heartbeat = opts.heartbeat;
7579 }
7580 }
7581
7582 if (opts.filter && typeof opts.filter === 'string') {
7583 params.filter = opts.filter;
7584 }
7585
7586 if (opts.view && typeof opts.view === 'string') {
7587 params.filter = '_view';
7588 params.view = opts.view;
7589 }
7590
7591 // If opts.query_params exists, pass it through to the changes request.
7592 // These parameters may be used by the filter on the source database.
7593 if (opts.query_params && typeof opts.query_params === 'object') {
7594 for (const param_name in opts.query_params) {
7595 /* istanbul ignore else */
7596 if (Object.prototype.hasOwnProperty.call(opts.query_params, param_name)) {
7597 params[param_name] = opts.query_params[param_name];
7598 }
7599 }
7600 }
7601
7602 let method = 'GET';
7603 let body;
7604
7605 if (opts.doc_ids) {
7606 // set this automagically for the user; it's annoying that couchdb
7607 // requires both a "filter" and a "doc_ids" param.
7608 params.filter = '_doc_ids';
7609 method = 'POST';
7610 body = {doc_ids: opts.doc_ids };
7611 }
7612 /* istanbul ignore next */
7613 else if (opts.selector) {
7614 // set this automagically for the user, similar to above
7615 params.filter = '_selector';
7616 method = 'POST';
7617 body = {selector: opts.selector };
7618 }
7619
7620 const controller = new AbortController();
7621 let lastFetchedSeq;
7622
7623 // Get all the changes starting with the one immediately after the
7624 // sequence number given by since.
7625 const fetchData = async function (since, callback) {
7626 if (opts.aborted) {
7627 return;
7628 }
7629 params.since = since;
7630 // "since" can be any kind of json object in Cloudant/CouchDB 2.x
7631 /* istanbul ignore next */
7632 if (typeof params.since === "object") {
7633 params.since = JSON.stringify(params.since);
7634 }
7635
7636 if (opts.descending) {
7637 if (limit) {
7638 params.limit = leftToFetch;
7639 }
7640 } else {
7641 params.limit = (!limit || leftToFetch > batchSize) ?
7642 batchSize : leftToFetch;
7643 }
7644
7645 // Set the options for the ajax call
7646 const url = genDBUrl(host, '_changes' + paramsToStr(params));
7647 const fetchOpts = {
7648 signal: controller.signal,
7649 method,
7650 body: JSON.stringify(body)
7651 };
7652 lastFetchedSeq = since;
7653
7654 /* istanbul ignore if */
7655 if (opts.aborted) {
7656 return;
7657 }
7658
7659 // Get the changes
7660 try {
7661 await setup();
7662 const result = await fetchJSON(url, fetchOpts);
7663 callback(null, result.data);
7664 } catch (error) {
7665 callback(error);
7666 }
7667 };
7668
7669 // If opts.since exists, get all the changes from the sequence
7670 // number given by opts.since. Otherwise, get all the changes
7671 // from the sequence number 0.
7672 const results = {results: []};
7673
7674 const fetched = function (err, res$$1) {
7675 if (opts.aborted) {
7676 return;
7677 }
7678 let raw_results_length = 0;
7679 // If the result of the ajax call (res) contains changes (res.results)
7680 if (res$$1 && res$$1.results) {
7681 raw_results_length = res$$1.results.length;
7682 results.last_seq = res$$1.last_seq;
7683 let pending = null;
7684 let lastSeq = null;
7685 // Attach 'pending' property if server supports it (CouchDB 2.0+)
7686 /* istanbul ignore if */
7687 if (typeof res$$1.pending === 'number') {
7688 pending = res$$1.pending;
7689 }
7690 if (typeof results.last_seq === 'string' || typeof results.last_seq === 'number') {
7691 lastSeq = results.last_seq;
7692 }
7693 // For each change
7694 const req = {};
7695 req.query = opts.query_params;
7696 res$$1.results = res$$1.results.filter(function (c) {
7697 leftToFetch--;
7698 const ret = filterChange(opts)(c);
7699 if (ret) {
7700 if (opts.include_docs && opts.attachments && opts.binary) {
7701 readAttachmentsAsBlobOrBuffer(c);
7702 }
7703 if (opts.return_docs) {
7704 results.results.push(c);
7705 }
7706 opts.onChange(c, pending, lastSeq);
7707 }
7708 return ret;
7709 });
7710 } else if (err) {
7711 // In case of an error, stop listening for changes and call
7712 // opts.complete
7713 opts.aborted = true;
7714 opts.complete(err);
7715 return;
7716 }
7717
7718 // The changes feed may have timed out with no results
7719 // if so reuse last update sequence
7720 if (res$$1 && res$$1.last_seq) {
7721 lastFetchedSeq = res$$1.last_seq;
7722 }
7723
7724 const finished = (limit && leftToFetch <= 0) ||
7725 (res$$1 && raw_results_length < batchSize) ||
7726 (opts.descending);
7727
7728 if ((opts.continuous && !(limit && leftToFetch <= 0)) || !finished) {
7729 // Queue a call to fetch again with the newest sequence number
7730 nextTick(function () { fetchData(lastFetchedSeq, fetched); });
7731 } else {
7732 // We're done, call the callback
7733 opts.complete(null, results);
7734 }
7735 };
7736
7737 fetchData(opts.since || 0, fetched);
7738
7739 // Return a method to cancel this method from processing any more
7740 return {
7741 cancel: function () {
7742 opts.aborted = true;
7743 controller.abort();
7744 }
7745 };
7746 };
7747
7748 // Given a set of document/revision IDs (given by req), tets the subset of
7749 // those that do NOT correspond to revisions stored in the database.
7750 // See http://wiki.apache.org/couchdb/HttpPostRevsDiff
7751 api.revsDiff = adapterFun$$1('revsDiff', async function (req, opts, callback) {
7752 // If no options were given, set the callback to be the second parameter
7753 if (typeof opts === 'function') {
7754 callback = opts;
7755 opts = {};
7756 }
7757
7758 try {
7759 // Get the missing document/revision IDs
7760 const result = await fetchJSON(genDBUrl(host, '_revs_diff'), {
7761 method: 'POST',
7762 body: JSON.stringify(req)
7763 });
7764 callback(null, result.data);
7765 } catch (error) {
7766 callback(error);
7767 }
7768 });
7769
7770 api._close = function (callback) {
7771 callback();
7772 };
7773
7774 api._destroy = async function (options, callback) {
7775 try {
7776 const json = await fetchJSON(genDBUrl(host, ''), {method: 'DELETE'});
7777 callback(null, json);
7778 } catch (error) {
7779 if (error.status === 404) {
7780 callback(null, {ok: true});
7781 } else {
7782 callback(error);
7783 }
7784 }
7785 };
7786}
7787
7788// HttpPouch is a valid adapter.
7789HttpPouch.valid = function () {
7790 return true;
7791};
7792
7793function HttpPouch$1 (PouchDB) {
7794 PouchDB.adapter('http', HttpPouch, false);
7795 PouchDB.adapter('https', HttpPouch, false);
7796}
7797
7798class QueryParseError extends Error {
7799 constructor(message) {
7800 super();
7801 this.status = 400;
7802 this.name = 'query_parse_error';
7803 this.message = message;
7804 this.error = true;
7805 try {
7806 Error.captureStackTrace(this, QueryParseError);
7807 } catch (e) {}
7808 }
7809}
7810
7811class NotFoundError$1 extends Error {
7812 constructor(message) {
7813 super();
7814 this.status = 404;
7815 this.name = 'not_found';
7816 this.message = message;
7817 this.error = true;
7818 try {
7819 Error.captureStackTrace(this, NotFoundError$1);
7820 } catch (e) {}
7821 }
7822}
7823
7824class BuiltInError extends Error {
7825 constructor(message) {
7826 super();
7827 this.status = 500;
7828 this.name = 'invalid_value';
7829 this.message = message;
7830 this.error = true;
7831 try {
7832 Error.captureStackTrace(this, BuiltInError);
7833 } catch (e) {}
7834 }
7835}
7836
7837function promisedCallback(promise, callback) {
7838 if (callback) {
7839 promise.then(function (res$$1) {
7840 nextTick(function () {
7841 callback(null, res$$1);
7842 });
7843 }, function (reason) {
7844 nextTick(function () {
7845 callback(reason);
7846 });
7847 });
7848 }
7849 return promise;
7850}
7851
7852function callbackify(fun) {
7853 return function (...args) {
7854 var cb = args.pop();
7855 var promise = fun.apply(this, args);
7856 if (typeof cb === 'function') {
7857 promisedCallback(promise, cb);
7858 }
7859 return promise;
7860 };
7861}
7862
7863// Promise finally util similar to Q.finally
7864function fin(promise, finalPromiseFactory) {
7865 return promise.then(function (res$$1) {
7866 return finalPromiseFactory().then(function () {
7867 return res$$1;
7868 });
7869 }, function (reason) {
7870 return finalPromiseFactory().then(function () {
7871 throw reason;
7872 });
7873 });
7874}
7875
7876function sequentialize(queue, promiseFactory) {
7877 return function () {
7878 var args = arguments;
7879 var that = this;
7880 return queue.add(function () {
7881 return promiseFactory.apply(that, args);
7882 });
7883 };
7884}
7885
7886// uniq an array of strings, order not guaranteed
7887// similar to underscore/lodash _.uniq
7888function uniq(arr) {
7889 var theSet = new Set(arr);
7890 var result = new Array(theSet.size);
7891 var index = -1;
7892 theSet.forEach(function (value) {
7893 result[++index] = value;
7894 });
7895 return result;
7896}
7897
7898function mapToKeysArray(map) {
7899 var result = new Array(map.size);
7900 var index = -1;
7901 map.forEach(function (value, key) {
7902 result[++index] = key;
7903 });
7904 return result;
7905}
7906
7907function createBuiltInError(name) {
7908 var message = 'builtin ' + name +
7909 ' function requires map values to be numbers' +
7910 ' or number arrays';
7911 return new BuiltInError(message);
7912}
7913
7914function sum(values) {
7915 var result = 0;
7916 for (var i = 0, len = values.length; i < len; i++) {
7917 var num = values[i];
7918 if (typeof num !== 'number') {
7919 if (Array.isArray(num)) {
7920 // lists of numbers are also allowed, sum them separately
7921 result = typeof result === 'number' ? [result] : result;
7922 for (var j = 0, jLen = num.length; j < jLen; j++) {
7923 var jNum = num[j];
7924 if (typeof jNum !== 'number') {
7925 throw createBuiltInError('_sum');
7926 } else if (typeof result[j] === 'undefined') {
7927 result.push(jNum);
7928 } else {
7929 result[j] += jNum;
7930 }
7931 }
7932 } else { // not array/number
7933 throw createBuiltInError('_sum');
7934 }
7935 } else if (typeof result === 'number') {
7936 result += num;
7937 } else { // add number to array
7938 result[0] += num;
7939 }
7940 }
7941 return result;
7942}
7943
7944// Inside of 'vm' for Node, we need a way to translate a pseudo-error
7945// back into a real error once it's out of the VM.
7946function createBuiltInErrorInVm(name) {
7947 return {
7948 builtInError: true,
7949 name
7950 };
7951}
7952
7953function convertToTrueError(err) {
7954 return createBuiltInError(err.name);
7955}
7956
7957function isBuiltInError(obj) {
7958 return obj && obj.builtInError;
7959}
7960
7961// All of this vm hullaballoo is to be able to run arbitrary code in a sandbox
7962// for security reasons.
7963function evalFunctionInVm(func, emit) {
7964 return function (arg1, arg2, arg3) {
7965 var code = '(function() {"use strict";' +
7966 'var createBuiltInError = ' + createBuiltInErrorInVm.toString() + ';' +
7967 'var sum = ' + sum.toString() + ';' +
7968 'var log = function () {};' +
7969 'var isArray = Array.isArray;' +
7970 'var toJSON = JSON.parse;' +
7971 'var __emitteds__ = [];' +
7972 'var emit = function (key, value) {__emitteds__.push([key, value]);};' +
7973 'var __result__ = (' +
7974 func.replace(/;\s*$/, '') + ')' + '(' +
7975 JSON.stringify(arg1) + ',' +
7976 JSON.stringify(arg2) + ',' +
7977 JSON.stringify(arg3) + ');' +
7978 'return {result: __result__, emitteds: __emitteds__};' +
7979 '})()';
7980
7981 var output = vm.runInNewContext(code);
7982
7983 output.emitteds.forEach(function (emitted) {
7984 emit(emitted[0], emitted[1]);
7985 });
7986 if (isBuiltInError(output.result)) {
7987 output.result = convertToTrueError(output.result);
7988 }
7989 return output.result;
7990 };
7991}
7992
7993var log = guardedConsole.bind(null, 'log');
7994var toJSON = JSON.parse;
7995
7996// The "stringify, then execute in a VM" strategy totally breaks Istanbul due
7997// to missing __coverage global objects. As a solution, export different
7998// code during coverage testing and during regular execution.
7999// Note that this doesn't get shipped to consumers because Rollup replaces it
8000// with rollup-plugin-replace, so false is replaced with `false`
8001var evalFunc;
8002/* istanbul ignore else */
8003{
8004 evalFunc = evalFunctionInVm;
8005}
8006
8007var evalFunction = evalFunc;
8008
8009/*
8010 * Simple task queue to sequentialize actions. Assumes
8011 * callbacks will eventually fire (once).
8012 */
8013
8014class TaskQueue$1 {
8015 constructor() {
8016 this.promise = Promise.resolve();
8017 }
8018
8019 add(promiseFactory) {
8020 this.promise = this.promise
8021 // just recover
8022 .catch(() => { })
8023 .then(() => promiseFactory());
8024 return this.promise;
8025 }
8026
8027 finish() {
8028 return this.promise;
8029 }
8030}
8031
8032function stringify(input) {
8033 if (!input) {
8034 return 'undefined'; // backwards compat for empty reduce
8035 }
8036 // for backwards compat with mapreduce, functions/strings are stringified
8037 // as-is. everything else is JSON-stringified.
8038 switch (typeof input) {
8039 case 'function':
8040 // e.g. a mapreduce map
8041 return input.toString();
8042 case 'string':
8043 // e.g. a mapreduce built-in _reduce function
8044 return input.toString();
8045 default:
8046 // e.g. a JSON object in the case of mango queries
8047 return JSON.stringify(input);
8048 }
8049}
8050
8051/* create a string signature for a view so we can cache it and uniq it */
8052function createViewSignature(mapFun, reduceFun) {
8053 // the "undefined" part is for backwards compatibility
8054 return stringify(mapFun) + stringify(reduceFun) + 'undefined';
8055}
8056
8057async function createView(sourceDB, viewName, mapFun, reduceFun, temporary, localDocName) {
8058 const viewSignature = createViewSignature(mapFun, reduceFun);
8059
8060 let cachedViews;
8061 if (!temporary) {
8062 // cache this to ensure we don't try to update the same view twice
8063 cachedViews = sourceDB._cachedViews = sourceDB._cachedViews || {};
8064 if (cachedViews[viewSignature]) {
8065 return cachedViews[viewSignature];
8066 }
8067 }
8068
8069 const promiseForView = sourceDB.info().then(async function (info) {
8070 const depDbName = info.db_name + '-mrview-' +
8071 (temporary ? 'temp' : stringMd5(viewSignature));
8072
8073 // save the view name in the source db so it can be cleaned up if necessary
8074 // (e.g. when the _design doc is deleted, remove all associated view data)
8075 function diffFunction(doc) {
8076 doc.views = doc.views || {};
8077 let fullViewName = viewName;
8078 if (fullViewName.indexOf('/') === -1) {
8079 fullViewName = viewName + '/' + viewName;
8080 }
8081 const depDbs = doc.views[fullViewName] = doc.views[fullViewName] || {};
8082 /* istanbul ignore if */
8083 if (depDbs[depDbName]) {
8084 return; // no update necessary
8085 }
8086 depDbs[depDbName] = true;
8087 return doc;
8088 }
8089 await upsert(sourceDB, '_local/' + localDocName, diffFunction);
8090 const res$$1 = await sourceDB.registerDependentDatabase(depDbName);
8091 const db = res$$1.db;
8092 db.auto_compaction = true;
8093 const view = {
8094 name: depDbName,
8095 db,
8096 sourceDB,
8097 adapter: sourceDB.adapter,
8098 mapFun,
8099 reduceFun
8100 };
8101
8102 let lastSeqDoc;
8103 try {
8104 lastSeqDoc = await view.db.get('_local/lastSeq');
8105 } catch (err) {
8106 /* istanbul ignore if */
8107 if (err.status !== 404) {
8108 throw err;
8109 }
8110 }
8111
8112 view.seq = lastSeqDoc ? lastSeqDoc.seq : 0;
8113 if (cachedViews) {
8114 view.db.once('destroyed', function () {
8115 delete cachedViews[viewSignature];
8116 });
8117 }
8118 return view;
8119 });
8120
8121 if (cachedViews) {
8122 cachedViews[viewSignature] = promiseForView;
8123 }
8124 return promiseForView;
8125}
8126
8127const persistentQueues = {};
8128const tempViewQueue = new TaskQueue$1();
8129const CHANGES_BATCH_SIZE$1 = 50;
8130
8131function parseViewName(name) {
8132 // can be either 'ddocname/viewname' or just 'viewname'
8133 // (where the ddoc name is the same)
8134 return name.indexOf('/') === -1 ? [name, name] : name.split('/');
8135}
8136
8137function isGenOne(changes) {
8138 // only return true if the current change is 1-
8139 // and there are no other leafs
8140 return changes.length === 1 && /^1-/.test(changes[0].rev);
8141}
8142
8143function emitError(db, e, data) {
8144 try {
8145 db.emit('error', e);
8146 } catch (err) {
8147 guardedConsole('error',
8148 'The user\'s map/reduce function threw an uncaught error.\n' +
8149 'You can debug this error by doing:\n' +
8150 'myDatabase.on(\'error\', function (err) { debugger; });\n' +
8151 'Please double-check your map/reduce function.');
8152 guardedConsole('error', e, data);
8153 }
8154}
8155
8156/**
8157 * Returns an "abstract" mapreduce object of the form:
8158 *
8159 * {
8160 * query: queryFun,
8161 * viewCleanup: viewCleanupFun
8162 * }
8163 *
8164 * Arguments are:
8165 *
8166 * localDoc: string
8167 * This is for the local doc that gets saved in order to track the
8168 * "dependent" DBs and clean them up for viewCleanup. It should be
8169 * unique, so that indexer plugins don't collide with each other.
8170 * mapper: function (mapFunDef, emit)
8171 * Returns a map function based on the mapFunDef, which in the case of
8172 * normal map/reduce is just the de-stringified function, but may be
8173 * something else, such as an object in the case of pouchdb-find.
8174 * reducer: function (reduceFunDef)
8175 * Ditto, but for reducing. Modules don't have to support reducing
8176 * (e.g. pouchdb-find).
8177 * ddocValidator: function (ddoc, viewName)
8178 * Throws an error if the ddoc or viewName is not valid.
8179 * This could be a way to communicate to the user that the configuration for the
8180 * indexer is invalid.
8181 */
8182function createAbstractMapReduce(localDocName, mapper, reducer, ddocValidator) {
8183
8184 function tryMap(db, fun, doc) {
8185 // emit an event if there was an error thrown by a map function.
8186 // putting try/catches in a single function also avoids deoptimizations.
8187 try {
8188 fun(doc);
8189 } catch (e) {
8190 emitError(db, e, {fun, doc});
8191 }
8192 }
8193
8194 function tryReduce(db, fun, keys, values, rereduce) {
8195 // same as above, but returning the result or an error. there are two separate
8196 // functions to avoid extra memory allocations since the tryCode() case is used
8197 // for custom map functions (common) vs this function, which is only used for
8198 // custom reduce functions (rare)
8199 try {
8200 return {output : fun(keys, values, rereduce)};
8201 } catch (e) {
8202 emitError(db, e, {fun, keys, values, rereduce});
8203 return {error: e};
8204 }
8205 }
8206
8207 function sortByKeyThenValue(x, y) {
8208 const keyCompare = collate(x.key, y.key);
8209 return keyCompare !== 0 ? keyCompare : collate(x.value, y.value);
8210 }
8211
8212 function sliceResults(results, limit, skip) {
8213 skip = skip || 0;
8214 if (typeof limit === 'number') {
8215 return results.slice(skip, limit + skip);
8216 } else if (skip > 0) {
8217 return results.slice(skip);
8218 }
8219 return results;
8220 }
8221
8222 function rowToDocId(row) {
8223 const val = row.value;
8224 // Users can explicitly specify a joined doc _id, or it
8225 // defaults to the doc _id that emitted the key/value.
8226 const docId = (val && typeof val === 'object' && val._id) || row.id;
8227 return docId;
8228 }
8229
8230 function readAttachmentsAsBlobOrBuffer(res$$1) {
8231 for (const row of res$$1.rows) {
8232 const atts = row.doc && row.doc._attachments;
8233 if (!atts) {
8234 continue;
8235 }
8236 for (const filename of Object.keys(atts)) {
8237 const att = atts[filename];
8238 atts[filename].data = b64ToBluffer(att.data, att.content_type);
8239 }
8240 }
8241 }
8242
8243 function postprocessAttachments(opts) {
8244 return function (res$$1) {
8245 if (opts.include_docs && opts.attachments && opts.binary) {
8246 readAttachmentsAsBlobOrBuffer(res$$1);
8247 }
8248 return res$$1;
8249 };
8250 }
8251
8252 function addHttpParam(paramName, opts, params, asJson) {
8253 // add an http param from opts to params, optionally json-encoded
8254 let val = opts[paramName];
8255 if (typeof val !== 'undefined') {
8256 if (asJson) {
8257 val = encodeURIComponent(JSON.stringify(val));
8258 }
8259 params.push(paramName + '=' + val);
8260 }
8261 }
8262
8263 function coerceInteger(integerCandidate) {
8264 if (typeof integerCandidate !== 'undefined') {
8265 const asNumber = Number(integerCandidate);
8266 // prevents e.g. '1foo' or '1.1' being coerced to 1
8267 if (!isNaN(asNumber) && asNumber === parseInt(integerCandidate, 10)) {
8268 return asNumber;
8269 } else {
8270 return integerCandidate;
8271 }
8272 }
8273 }
8274
8275 function coerceOptions(opts) {
8276 opts.group_level = coerceInteger(opts.group_level);
8277 opts.limit = coerceInteger(opts.limit);
8278 opts.skip = coerceInteger(opts.skip);
8279 return opts;
8280 }
8281
8282 function checkPositiveInteger(number) {
8283 if (number) {
8284 if (typeof number !== 'number') {
8285 return new QueryParseError(`Invalid value for integer: "${number}"`);
8286 }
8287 if (number < 0) {
8288 return new QueryParseError(`Invalid value for positive integer: "${number}"`);
8289 }
8290 }
8291 }
8292
8293 function checkQueryParseError(options, fun) {
8294 const startkeyName = options.descending ? 'endkey' : 'startkey';
8295 const endkeyName = options.descending ? 'startkey' : 'endkey';
8296
8297 if (typeof options[startkeyName] !== 'undefined' &&
8298 typeof options[endkeyName] !== 'undefined' &&
8299 collate(options[startkeyName], options[endkeyName]) > 0) {
8300 throw new QueryParseError('No rows can match your key range, ' +
8301 'reverse your start_key and end_key or set {descending : true}');
8302 } else if (fun.reduce && options.reduce !== false) {
8303 if (options.include_docs) {
8304 throw new QueryParseError('{include_docs:true} is invalid for reduce');
8305 } else if (options.keys && options.keys.length > 1 &&
8306 !options.group && !options.group_level) {
8307 throw new QueryParseError('Multi-key fetches for reduce views must use ' +
8308 '{group: true}');
8309 }
8310 }
8311 for (const optionName of ['group_level', 'limit', 'skip']) {
8312 const error = checkPositiveInteger(options[optionName]);
8313 if (error) {
8314 throw error;
8315 }
8316 }
8317 }
8318
8319 async function httpQuery(db, fun, opts) {
8320 // List of parameters to add to the PUT request
8321 let params = [];
8322 let body;
8323 let method = 'GET';
8324 let ok;
8325
8326 // If opts.reduce exists and is defined, then add it to the list
8327 // of parameters.
8328 // If reduce=false then the results are that of only the map function
8329 // not the final result of map and reduce.
8330 addHttpParam('reduce', opts, params);
8331 addHttpParam('include_docs', opts, params);
8332 addHttpParam('attachments', opts, params);
8333 addHttpParam('limit', opts, params);
8334 addHttpParam('descending', opts, params);
8335 addHttpParam('group', opts, params);
8336 addHttpParam('group_level', opts, params);
8337 addHttpParam('skip', opts, params);
8338 addHttpParam('stale', opts, params);
8339 addHttpParam('conflicts', opts, params);
8340 addHttpParam('startkey', opts, params, true);
8341 addHttpParam('start_key', opts, params, true);
8342 addHttpParam('endkey', opts, params, true);
8343 addHttpParam('end_key', opts, params, true);
8344 addHttpParam('inclusive_end', opts, params);
8345 addHttpParam('key', opts, params, true);
8346 addHttpParam('update_seq', opts, params);
8347
8348 // Format the list of parameters into a valid URI query string
8349 params = params.join('&');
8350 params = params === '' ? '' : '?' + params;
8351
8352 // If keys are supplied, issue a POST to circumvent GET query string limits
8353 // see http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options
8354 if (typeof opts.keys !== 'undefined') {
8355 const MAX_URL_LENGTH = 2000;
8356 // according to http://stackoverflow.com/a/417184/680742,
8357 // the de facto URL length limit is 2000 characters
8358
8359 const keysAsString = `keys=${encodeURIComponent(JSON.stringify(opts.keys))}`;
8360 if (keysAsString.length + params.length + 1 <= MAX_URL_LENGTH) {
8361 // If the keys are short enough, do a GET. we do this to work around
8362 // Safari not understanding 304s on POSTs (see pouchdb/pouchdb#1239)
8363 params += (params[0] === '?' ? '&' : '?') + keysAsString;
8364 } else {
8365 method = 'POST';
8366 if (typeof fun === 'string') {
8367 body = {keys: opts.keys};
8368 } else { // fun is {map : mapfun}, so append to this
8369 fun.keys = opts.keys;
8370 }
8371 }
8372 }
8373
8374 // We are referencing a query defined in the design doc
8375 if (typeof fun === 'string') {
8376 const parts = parseViewName(fun);
8377
8378 const response = await db.fetch('_design/' + parts[0] + '/_view/' + parts[1] + params, {
8379 headers: new nodeFetch.Headers({'Content-Type': 'application/json'}),
8380 method,
8381 body: JSON.stringify(body)
8382 });
8383 ok = response.ok;
8384 // status = response.status;
8385 const result = await response.json();
8386
8387 if (!ok) {
8388 result.status = response.status;
8389 throw generateErrorFromResponse(result);
8390 }
8391
8392 // fail the entire request if the result contains an error
8393 for (const row of result.rows) {
8394 /* istanbul ignore if */
8395 if (row.value && row.value.error && row.value.error === "builtin_reduce_error") {
8396 throw new Error(row.reason);
8397 }
8398 }
8399
8400 return new Promise(function (resolve) {
8401 resolve(result);
8402 }).then(postprocessAttachments(opts));
8403 }
8404
8405 // We are using a temporary view, terrible for performance, good for testing
8406 body = body || {};
8407 for (const key of Object.keys(fun)) {
8408 if (Array.isArray(fun[key])) {
8409 body[key] = fun[key];
8410 } else {
8411 body[key] = fun[key].toString();
8412 }
8413 }
8414
8415 const response = await db.fetch('_temp_view' + params, {
8416 headers: new nodeFetch.Headers({'Content-Type': 'application/json'}),
8417 method: 'POST',
8418 body: JSON.stringify(body)
8419 });
8420
8421 ok = response.ok;
8422 // status = response.status;
8423 const result = await response.json();
8424 if (!ok) {
8425 result.status = response.status;
8426 throw generateErrorFromResponse(result);
8427 }
8428
8429 return new Promise(function (resolve) {
8430 resolve(result);
8431 }).then(postprocessAttachments(opts));
8432 }
8433
8434 // custom adapters can define their own api._query
8435 // and override the default behavior
8436 /* istanbul ignore next */
8437 function customQuery(db, fun, opts) {
8438 return new Promise(function (resolve, reject) {
8439 db._query(fun, opts, function (err, res$$1) {
8440 if (err) {
8441 return reject(err);
8442 }
8443 resolve(res$$1);
8444 });
8445 });
8446 }
8447
8448 // custom adapters can define their own api._viewCleanup
8449 // and override the default behavior
8450 /* istanbul ignore next */
8451 function customViewCleanup(db) {
8452 return new Promise(function (resolve, reject) {
8453 db._viewCleanup(function (err, res$$1) {
8454 if (err) {
8455 return reject(err);
8456 }
8457 resolve(res$$1);
8458 });
8459 });
8460 }
8461
8462 function defaultsTo(value) {
8463 return function (reason) {
8464 /* istanbul ignore else */
8465 if (reason.status === 404) {
8466 return value;
8467 } else {
8468 throw reason;
8469 }
8470 };
8471 }
8472
8473 // returns a promise for a list of docs to update, based on the input docId.
8474 // the order doesn't matter, because post-3.2.0, bulkDocs
8475 // is an atomic operation in all three adapters.
8476 async function getDocsToPersist(docId, view, docIdsToChangesAndEmits) {
8477 const metaDocId = '_local/doc_' + docId;
8478 const defaultMetaDoc = {_id: metaDocId, keys: []};
8479 const docData = docIdsToChangesAndEmits.get(docId);
8480 const indexableKeysToKeyValues = docData[0];
8481 const changes = docData[1];
8482
8483 function getMetaDoc() {
8484 if (isGenOne(changes)) {
8485 // generation 1, so we can safely assume initial state
8486 // for performance reasons (avoids unnecessary GETs)
8487 return Promise.resolve(defaultMetaDoc);
8488 }
8489 return view.db.get(metaDocId).catch(defaultsTo(defaultMetaDoc));
8490 }
8491
8492 function getKeyValueDocs(metaDoc) {
8493 if (!metaDoc.keys.length) {
8494 // no keys, no need for a lookup
8495 return Promise.resolve({rows: []});
8496 }
8497 return view.db.allDocs({
8498 keys: metaDoc.keys,
8499 include_docs: true
8500 });
8501 }
8502
8503 function processKeyValueDocs(metaDoc, kvDocsRes) {
8504 const kvDocs = [];
8505 const oldKeys = new Set();
8506
8507 for (const row of kvDocsRes.rows) {
8508 const doc = row.doc;
8509 if (!doc) { // deleted
8510 continue;
8511 }
8512 kvDocs.push(doc);
8513 oldKeys.add(doc._id);
8514 doc._deleted = !indexableKeysToKeyValues.has(doc._id);
8515 if (!doc._deleted) {
8516 const keyValue = indexableKeysToKeyValues.get(doc._id);
8517 if ('value' in keyValue) {
8518 doc.value = keyValue.value;
8519 }
8520 }
8521 }
8522 const newKeys = mapToKeysArray(indexableKeysToKeyValues);
8523 for (const key of newKeys) {
8524 if (!oldKeys.has(key)) {
8525 // new doc
8526 const kvDoc = {
8527 _id: key
8528 };
8529 const keyValue = indexableKeysToKeyValues.get(key);
8530 if ('value' in keyValue) {
8531 kvDoc.value = keyValue.value;
8532 }
8533 kvDocs.push(kvDoc);
8534 }
8535 }
8536 metaDoc.keys = uniq(newKeys.concat(metaDoc.keys));
8537 kvDocs.push(metaDoc);
8538
8539 return kvDocs;
8540 }
8541
8542 const metaDoc = await getMetaDoc();
8543 const keyValueDocs = await getKeyValueDocs(metaDoc);
8544 return processKeyValueDocs(metaDoc, keyValueDocs);
8545 }
8546
8547 function updatePurgeSeq(view) {
8548 // with this approach, we just assume to have processed all missing purges and write the latest
8549 // purgeSeq into the _local/purgeSeq doc.
8550 return view.sourceDB.get('_local/purges').then(function (res$$1) {
8551 const purgeSeq = res$$1.purgeSeq;
8552 return view.db.get('_local/purgeSeq').then(function (res$$1) {
8553 return res$$1._rev;
8554 })
8555 .catch(defaultsTo(undefined))
8556 .then(function (rev$$1) {
8557 return view.db.put({
8558 _id: '_local/purgeSeq',
8559 _rev: rev$$1,
8560 purgeSeq,
8561 });
8562 });
8563 }).catch(function (err) {
8564 if (err.status !== 404) {
8565 throw err;
8566 }
8567 });
8568 }
8569
8570 // updates all emitted key/value docs and metaDocs in the mrview database
8571 // for the given batch of documents from the source database
8572 function saveKeyValues(view, docIdsToChangesAndEmits, seq) {
8573 var seqDocId = '_local/lastSeq';
8574 return view.db.get(seqDocId)
8575 .catch(defaultsTo({_id: seqDocId, seq: 0}))
8576 .then(function (lastSeqDoc) {
8577 var docIds = mapToKeysArray(docIdsToChangesAndEmits);
8578 return Promise.all(docIds.map(function (docId) {
8579 return getDocsToPersist(docId, view, docIdsToChangesAndEmits);
8580 })).then(function (listOfDocsToPersist) {
8581 var docsToPersist = listOfDocsToPersist.flat();
8582 lastSeqDoc.seq = seq;
8583 docsToPersist.push(lastSeqDoc);
8584 // write all docs in a single operation, update the seq once
8585 return view.db.bulkDocs({docs : docsToPersist});
8586 })
8587 // TODO: this should be placed somewhere else, probably? we're querying both docs twice
8588 // (first time when getting the actual purges).
8589 .then(() => updatePurgeSeq(view));
8590 });
8591 }
8592
8593 function getQueue(view) {
8594 const viewName = typeof view === 'string' ? view : view.name;
8595 let queue = persistentQueues[viewName];
8596 if (!queue) {
8597 queue = persistentQueues[viewName] = new TaskQueue$1();
8598 }
8599 return queue;
8600 }
8601
8602 async function updateView(view, opts) {
8603 return sequentialize(getQueue(view), function () {
8604 return updateViewInQueue(view, opts);
8605 })();
8606 }
8607
8608 async function updateViewInQueue(view, opts) {
8609 // bind the emit function once
8610 let mapResults;
8611 let doc;
8612 let taskId;
8613
8614 function emit(key, value) {
8615 const output = {id: doc._id, key: normalizeKey(key)};
8616 // Don't explicitly store the value unless it's defined and non-null.
8617 // This saves on storage space, because often people don't use it.
8618 if (typeof value !== 'undefined' && value !== null) {
8619 output.value = normalizeKey(value);
8620 }
8621 mapResults.push(output);
8622 }
8623
8624 const mapFun = mapper(view.mapFun, emit);
8625
8626 let currentSeq = view.seq || 0;
8627
8628 function createTask() {
8629 return view.sourceDB.info().then(function (info) {
8630 taskId = view.sourceDB.activeTasks.add({
8631 name: 'view_indexing',
8632 total_items: info.update_seq - currentSeq,
8633 });
8634 });
8635 }
8636
8637 function processChange(docIdsToChangesAndEmits, seq) {
8638 return function () {
8639 return saveKeyValues(view, docIdsToChangesAndEmits, seq);
8640 };
8641 }
8642
8643 let indexed_docs = 0;
8644 const progress = {
8645 view: view.name,
8646 indexed_docs
8647 };
8648 view.sourceDB.emit('indexing', progress);
8649
8650 const queue = new TaskQueue$1();
8651
8652 async function processNextBatch() {
8653 const response = await view.sourceDB.changes({
8654 return_docs: true,
8655 conflicts: true,
8656 include_docs: true,
8657 style: 'all_docs',
8658 since: currentSeq,
8659 limit: opts.changes_batch_size
8660 });
8661 const purges = await getRecentPurges();
8662 return processBatch(response, purges);
8663 }
8664
8665 function getRecentPurges() {
8666 return view.db.get('_local/purgeSeq').then(function (res$$1) {
8667 return res$$1.purgeSeq;
8668 })
8669 .catch(defaultsTo(-1))
8670 .then(function (purgeSeq) {
8671 return view.sourceDB.get('_local/purges').then(function (res$$1) {
8672 const recentPurges = res$$1.purges.filter(function (purge, index) {
8673 return index > purgeSeq;
8674 }).map((purge) => purge.docId);
8675
8676 const uniquePurges = recentPurges.filter(function (docId, index) {
8677 return recentPurges.indexOf(docId) === index;
8678 });
8679
8680 return Promise.all(uniquePurges.map(function (docId) {
8681 return view.sourceDB.get(docId).then(function (doc) {
8682 return { docId, doc };
8683 })
8684 .catch(defaultsTo({ docId }));
8685 }));
8686 })
8687 .catch(defaultsTo([]));
8688 });
8689 }
8690
8691 function processBatch(response, purges) {
8692 const results = response.results;
8693 if (!results.length && !purges.length) {
8694 return;
8695 }
8696
8697 for (const purge of purges) {
8698 const index = results.findIndex(function (change) {
8699 return change.id === purge.docId;
8700 });
8701 if (index < 0) {
8702 // mimic a db.remove() on the changes feed
8703 const entry = {
8704 _id: purge.docId,
8705 doc: {
8706 _id: purge.docId,
8707 _deleted: 1,
8708 },
8709 changes: [],
8710 };
8711
8712 if (purge.doc) {
8713 // update with new winning rev after purge
8714 entry.doc = purge.doc;
8715 entry.changes.push({ rev: purge.doc._rev });
8716 }
8717
8718 results.push(entry);
8719 }
8720 }
8721
8722 const docIdsToChangesAndEmits = createDocIdsToChangesAndEmits(results);
8723
8724 queue.add(processChange(docIdsToChangesAndEmits, currentSeq));
8725
8726 indexed_docs = indexed_docs + results.length;
8727 const progress = {
8728 view: view.name,
8729 last_seq: response.last_seq,
8730 results_count: results.length,
8731 indexed_docs
8732 };
8733 view.sourceDB.emit('indexing', progress);
8734 view.sourceDB.activeTasks.update(taskId, {completed_items: indexed_docs});
8735
8736 if (results.length < opts.changes_batch_size) {
8737 return;
8738 }
8739 return processNextBatch();
8740 }
8741
8742 function createDocIdsToChangesAndEmits(results) {
8743 const docIdsToChangesAndEmits = new Map();
8744 for (const change of results) {
8745 if (change.doc._id[0] !== '_') {
8746 mapResults = [];
8747 doc = change.doc;
8748
8749 if (!doc._deleted) {
8750 tryMap(view.sourceDB, mapFun, doc);
8751 }
8752 mapResults.sort(sortByKeyThenValue);
8753
8754 const indexableKeysToKeyValues = createIndexableKeysToKeyValues(mapResults);
8755 docIdsToChangesAndEmits.set(change.doc._id, [
8756 indexableKeysToKeyValues,
8757 change.changes
8758 ]);
8759 }
8760 currentSeq = change.seq;
8761 }
8762 return docIdsToChangesAndEmits;
8763 }
8764
8765 function createIndexableKeysToKeyValues(mapResults) {
8766 const indexableKeysToKeyValues = new Map();
8767 let lastKey;
8768 for (let i = 0, len = mapResults.length; i < len; i++) {
8769 const emittedKeyValue = mapResults[i];
8770 const complexKey = [emittedKeyValue.key, emittedKeyValue.id];
8771 if (i > 0 && collate(emittedKeyValue.key, lastKey) === 0) {
8772 complexKey.push(i); // dup key+id, so make it unique
8773 }
8774 indexableKeysToKeyValues.set(toIndexableString(complexKey), emittedKeyValue);
8775 lastKey = emittedKeyValue.key;
8776 }
8777 return indexableKeysToKeyValues;
8778 }
8779
8780 try {
8781 await createTask();
8782 await processNextBatch();
8783 await queue.finish();
8784 view.seq = currentSeq;
8785 view.sourceDB.activeTasks.remove(taskId);
8786 } catch (error) {
8787 view.sourceDB.activeTasks.remove(taskId, error);
8788 }
8789 }
8790
8791 function reduceView(view, results, options) {
8792 if (options.group_level === 0) {
8793 delete options.group_level;
8794 }
8795
8796 const shouldGroup = options.group || options.group_level;
8797 const reduceFun = reducer(view.reduceFun);
8798 const groups = [];
8799 const lvl = isNaN(options.group_level)
8800 ? Number.POSITIVE_INFINITY
8801 : options.group_level;
8802
8803 for (const result of results) {
8804 const last = groups[groups.length - 1];
8805 let groupKey = shouldGroup ? result.key : null;
8806
8807 // only set group_level for array keys
8808 if (shouldGroup && Array.isArray(groupKey)) {
8809 groupKey = groupKey.slice(0, lvl);
8810 }
8811
8812 if (last && collate(last.groupKey, groupKey) === 0) {
8813 last.keys.push([result.key, result.id]);
8814 last.values.push(result.value);
8815 continue;
8816 }
8817 groups.push({
8818 keys: [[result.key, result.id]],
8819 values: [result.value],
8820 groupKey
8821 });
8822 }
8823
8824 results = [];
8825 for (const group of groups) {
8826 const reduceTry = tryReduce(view.sourceDB, reduceFun, group.keys, group.values, false);
8827 if (reduceTry.error && reduceTry.error instanceof BuiltInError) {
8828 // CouchDB returns an error if a built-in errors out
8829 throw reduceTry.error;
8830 }
8831 results.push({
8832 // CouchDB just sets the value to null if a non-built-in errors out
8833 value: reduceTry.error ? null : reduceTry.output,
8834 key: group.groupKey
8835 });
8836 }
8837 // no total_rows/offset when reducing
8838 return { rows: sliceResults(results, options.limit, options.skip) };
8839 }
8840
8841 function queryView(view, opts) {
8842 return sequentialize(getQueue(view), function () {
8843 return queryViewInQueue(view, opts);
8844 })();
8845 }
8846
8847 async function queryViewInQueue(view, opts) {
8848 let totalRows;
8849 const shouldReduce = view.reduceFun && opts.reduce !== false;
8850 const skip = opts.skip || 0;
8851 if (typeof opts.keys !== 'undefined' && !opts.keys.length) {
8852 // equivalent query
8853 opts.limit = 0;
8854 delete opts.keys;
8855 }
8856
8857 async function fetchFromView(viewOpts) {
8858 viewOpts.include_docs = true;
8859 const res$$1 = await view.db.allDocs(viewOpts);
8860 totalRows = res$$1.total_rows;
8861
8862 return res$$1.rows.map(function (result) {
8863 // implicit migration - in older versions of PouchDB,
8864 // we explicitly stored the doc as {id: ..., key: ..., value: ...}
8865 // this is tested in a migration test
8866 /* istanbul ignore next */
8867 if ('value' in result.doc && typeof result.doc.value === 'object' &&
8868 result.doc.value !== null) {
8869 const keys = Object.keys(result.doc.value).sort();
8870 // this detection method is not perfect, but it's unlikely the user
8871 // emitted a value which was an object with these 3 exact keys
8872 const expectedKeys = ['id', 'key', 'value'];
8873 if (!(keys < expectedKeys || keys > expectedKeys)) {
8874 return result.doc.value;
8875 }
8876 }
8877
8878 const parsedKeyAndDocId = parseIndexableString(result.doc._id);
8879 return {
8880 key: parsedKeyAndDocId[0],
8881 id: parsedKeyAndDocId[1],
8882 value: ('value' in result.doc ? result.doc.value : null)
8883 };
8884 });
8885 }
8886
8887 async function onMapResultsReady(rows) {
8888 let finalResults;
8889 if (shouldReduce) {
8890 finalResults = reduceView(view, rows, opts);
8891 } else if (typeof opts.keys === 'undefined') {
8892 finalResults = {
8893 total_rows: totalRows,
8894 offset: skip,
8895 rows
8896 };
8897 } else {
8898 // support limit, skip for keys query
8899 finalResults = {
8900 total_rows: totalRows,
8901 offset: skip,
8902 rows: sliceResults(rows,opts.limit,opts.skip)
8903 };
8904 }
8905 /* istanbul ignore if */
8906 if (opts.update_seq) {
8907 finalResults.update_seq = view.seq;
8908 }
8909 if (opts.include_docs) {
8910 const docIds = uniq(rows.map(rowToDocId));
8911
8912 const allDocsRes = await view.sourceDB.allDocs({
8913 keys: docIds,
8914 include_docs: true,
8915 conflicts: opts.conflicts,
8916 attachments: opts.attachments,
8917 binary: opts.binary
8918 });
8919 const docIdsToDocs = new Map();
8920 for (const row of allDocsRes.rows) {
8921 docIdsToDocs.set(row.id, row.doc);
8922 }
8923 for (const row of rows) {
8924 const docId = rowToDocId(row);
8925 const doc = docIdsToDocs.get(docId);
8926 if (doc) {
8927 row.doc = doc;
8928 }
8929 }
8930 }
8931 return finalResults;
8932 }
8933
8934 if (typeof opts.keys !== 'undefined') {
8935 const keys = opts.keys;
8936 const fetchPromises = keys.map(function (key) {
8937 const viewOpts = {
8938 startkey : toIndexableString([key]),
8939 endkey : toIndexableString([key, {}])
8940 };
8941 /* istanbul ignore if */
8942 if (opts.update_seq) {
8943 viewOpts.update_seq = true;
8944 }
8945 return fetchFromView(viewOpts);
8946 });
8947 const result = await Promise.all(fetchPromises);
8948 const flattenedResult = result.flat();
8949 return onMapResultsReady(flattenedResult);
8950 } else { // normal query, no 'keys'
8951 const viewOpts = {
8952 descending : opts.descending
8953 };
8954 /* istanbul ignore if */
8955 if (opts.update_seq) {
8956 viewOpts.update_seq = true;
8957 }
8958 let startkey;
8959 let endkey;
8960 if ('start_key' in opts) {
8961 startkey = opts.start_key;
8962 }
8963 if ('startkey' in opts) {
8964 startkey = opts.startkey;
8965 }
8966 if ('end_key' in opts) {
8967 endkey = opts.end_key;
8968 }
8969 if ('endkey' in opts) {
8970 endkey = opts.endkey;
8971 }
8972 if (typeof startkey !== 'undefined') {
8973 viewOpts.startkey = opts.descending ?
8974 toIndexableString([startkey, {}]) :
8975 toIndexableString([startkey]);
8976 }
8977 if (typeof endkey !== 'undefined') {
8978 let inclusiveEnd = opts.inclusive_end !== false;
8979 if (opts.descending) {
8980 inclusiveEnd = !inclusiveEnd;
8981 }
8982
8983 viewOpts.endkey = toIndexableString(
8984 inclusiveEnd ? [endkey, {}] : [endkey]);
8985 }
8986 if (typeof opts.key !== 'undefined') {
8987 const keyStart = toIndexableString([opts.key]);
8988 const keyEnd = toIndexableString([opts.key, {}]);
8989 if (viewOpts.descending) {
8990 viewOpts.endkey = keyStart;
8991 viewOpts.startkey = keyEnd;
8992 } else {
8993 viewOpts.startkey = keyStart;
8994 viewOpts.endkey = keyEnd;
8995 }
8996 }
8997 if (!shouldReduce) {
8998 if (typeof opts.limit === 'number') {
8999 viewOpts.limit = opts.limit;
9000 }
9001 viewOpts.skip = skip;
9002 }
9003
9004 const result = await fetchFromView(viewOpts);
9005 return onMapResultsReady(result);
9006 }
9007 }
9008
9009 async function httpViewCleanup(db) {
9010 const response = await db.fetch('_view_cleanup', {
9011 headers: new nodeFetch.Headers({'Content-Type': 'application/json'}),
9012 method: 'POST'
9013 });
9014 return response.json();
9015 }
9016
9017 async function localViewCleanup(db) {
9018 try {
9019 const metaDoc = await db.get('_local/' + localDocName);
9020 const docsToViews = new Map();
9021
9022 for (const fullViewName of Object.keys(metaDoc.views)) {
9023 const parts = parseViewName(fullViewName);
9024 const designDocName = '_design/' + parts[0];
9025 const viewName = parts[1];
9026 let views = docsToViews.get(designDocName);
9027 if (!views) {
9028 views = new Set();
9029 docsToViews.set(designDocName, views);
9030 }
9031 views.add(viewName);
9032 }
9033 const opts = {
9034 keys : mapToKeysArray(docsToViews),
9035 include_docs : true
9036 };
9037
9038 const res$$1 = await db.allDocs(opts);
9039 const viewsToStatus = {};
9040 for (const row of res$$1.rows) {
9041 const ddocName = row.key.substring(8); // cuts off '_design/'
9042 for (const viewName of docsToViews.get(row.key)) {
9043 let fullViewName = ddocName + '/' + viewName;
9044 /* istanbul ignore if */
9045 if (!metaDoc.views[fullViewName]) {
9046 // new format, without slashes, to support PouchDB 2.2.0
9047 // migration test in pouchdb's browser.migration.js verifies this
9048 fullViewName = viewName;
9049 }
9050 const viewDBNames = Object.keys(metaDoc.views[fullViewName]);
9051 // design doc deleted, or view function nonexistent
9052 const statusIsGood = row.doc && row.doc.views &&
9053 row.doc.views[viewName];
9054 for (const viewDBName of viewDBNames) {
9055 viewsToStatus[viewDBName] = viewsToStatus[viewDBName] || statusIsGood;
9056 }
9057 }
9058 }
9059
9060 const dbsToDelete = Object.keys(viewsToStatus)
9061 .filter(function (viewDBName) { return !viewsToStatus[viewDBName]; });
9062
9063 const destroyPromises = dbsToDelete.map(function (viewDBName) {
9064 return sequentialize(getQueue(viewDBName), function () {
9065 return new db.constructor(viewDBName, db.__opts).destroy();
9066 })();
9067 });
9068
9069 return Promise.all(destroyPromises).then(function () {
9070 return {ok: true};
9071 });
9072 } catch (err) {
9073 if (err.status === 404) {
9074 return {ok: true};
9075 } else {
9076 throw err;
9077 }
9078 }
9079 }
9080
9081 async function queryPromised(db, fun, opts) {
9082 /* istanbul ignore next */
9083 if (typeof db._query === 'function') {
9084 return customQuery(db, fun, opts);
9085 }
9086 if (isRemote(db)) {
9087 return httpQuery(db, fun, opts);
9088 }
9089
9090 const updateViewOpts = {
9091 changes_batch_size: db.__opts.view_update_changes_batch_size || CHANGES_BATCH_SIZE$1
9092 };
9093
9094 if (typeof fun !== 'string') {
9095 // temp_view
9096 checkQueryParseError(opts, fun);
9097
9098 tempViewQueue.add(async function () {
9099 const view = await createView(
9100 /* sourceDB */ db,
9101 /* viewName */ 'temp_view/temp_view',
9102 /* mapFun */ fun.map,
9103 /* reduceFun */ fun.reduce,
9104 /* temporary */ true,
9105 /* localDocName */ localDocName);
9106
9107 return fin(updateView(view, updateViewOpts).then(
9108 function () { return queryView(view, opts); }),
9109 function () { return view.db.destroy(); }
9110 );
9111 });
9112 return tempViewQueue.finish();
9113 } else {
9114 // persistent view
9115 const fullViewName = fun;
9116 const parts = parseViewName(fullViewName);
9117 const designDocName = parts[0];
9118 const viewName = parts[1];
9119
9120 const doc = await db.get('_design/' + designDocName);
9121 fun = doc.views && doc.views[viewName];
9122
9123 if (!fun) {
9124 // basic validator; it's assumed that every subclass would want this
9125 throw new NotFoundError$1(`ddoc ${doc._id} has no view named ${viewName}`);
9126 }
9127
9128 ddocValidator(doc, viewName);
9129 checkQueryParseError(opts, fun);
9130
9131 const view = await createView(
9132 /* sourceDB */ db,
9133 /* viewName */ fullViewName,
9134 /* mapFun */ fun.map,
9135 /* reduceFun */ fun.reduce,
9136 /* temporary */ false,
9137 /* localDocName */ localDocName);
9138
9139 if (opts.stale === 'ok' || opts.stale === 'update_after') {
9140 if (opts.stale === 'update_after') {
9141 nextTick(function () {
9142 updateView(view, updateViewOpts);
9143 });
9144 }
9145 return queryView(view, opts);
9146 } else { // stale not ok
9147 await updateView(view, updateViewOpts);
9148 return queryView(view, opts);
9149 }
9150 }
9151 }
9152
9153 function abstractQuery(fun, opts, callback) {
9154 const db = this;
9155 if (typeof opts === 'function') {
9156 callback = opts;
9157 opts = {};
9158 }
9159 opts = opts ? coerceOptions(opts) : {};
9160
9161 if (typeof fun === 'function') {
9162 fun = {map : fun};
9163 }
9164
9165 const promise = Promise.resolve().then(function () {
9166 return queryPromised(db, fun, opts);
9167 });
9168 promisedCallback(promise, callback);
9169 return promise;
9170 }
9171
9172 const abstractViewCleanup = callbackify(function () {
9173 const db = this;
9174 /* istanbul ignore next */
9175 if (typeof db._viewCleanup === 'function') {
9176 return customViewCleanup(db);
9177 }
9178 if (isRemote(db)) {
9179 return httpViewCleanup(db);
9180 }
9181 return localViewCleanup(db);
9182 });
9183
9184 return {
9185 query: abstractQuery,
9186 viewCleanup: abstractViewCleanup
9187 };
9188}
9189
9190var builtInReduce = {
9191 _sum: function (keys, values) {
9192 return sum(values);
9193 },
9194
9195 _count: function (keys, values) {
9196 return values.length;
9197 },
9198
9199 _stats: function (keys, values) {
9200 // no need to implement rereduce=true, because Pouch
9201 // will never call it
9202 function sumsqr(values) {
9203 var _sumsqr = 0;
9204 for (var i = 0, len = values.length; i < len; i++) {
9205 var num = values[i];
9206 _sumsqr += (num * num);
9207 }
9208 return _sumsqr;
9209 }
9210 return {
9211 sum : sum(values),
9212 min : Math.min.apply(null, values),
9213 max : Math.max.apply(null, values),
9214 count : values.length,
9215 sumsqr : sumsqr(values)
9216 };
9217 }
9218};
9219
9220function getBuiltIn(reduceFunString) {
9221 if (/^_sum/.test(reduceFunString)) {
9222 return builtInReduce._sum;
9223 } else if (/^_count/.test(reduceFunString)) {
9224 return builtInReduce._count;
9225 } else if (/^_stats/.test(reduceFunString)) {
9226 return builtInReduce._stats;
9227 } else if (/^_/.test(reduceFunString)) {
9228 throw new Error(reduceFunString + ' is not a supported reduce function.');
9229 }
9230}
9231
9232function mapper(mapFun, emit) {
9233 // for temp_views one can use emit(doc, emit), see #38
9234 if (typeof mapFun === "function" && mapFun.length === 2) {
9235 var origMap = mapFun;
9236 return function (doc) {
9237 return origMap(doc, emit);
9238 };
9239 } else {
9240 return evalFunction(mapFun.toString(), emit);
9241 }
9242}
9243
9244function reducer(reduceFun) {
9245 var reduceFunString = reduceFun.toString();
9246 var builtIn = getBuiltIn(reduceFunString);
9247 if (builtIn) {
9248 return builtIn;
9249 } else {
9250 return evalFunction(reduceFunString);
9251 }
9252}
9253
9254function ddocValidator(ddoc, viewName) {
9255 var fun = ddoc.views && ddoc.views[viewName];
9256 if (typeof fun.map !== 'string') {
9257 throw new NotFoundError$1('ddoc ' + ddoc._id + ' has no string view named ' +
9258 viewName + ', instead found object of type: ' + typeof fun.map);
9259 }
9260}
9261
9262var localDocName = 'mrviews';
9263var abstract = createAbstractMapReduce(localDocName, mapper, reducer, ddocValidator);
9264
9265function query(fun, opts, callback) {
9266 return abstract.query.call(this, fun, opts, callback);
9267}
9268
9269function viewCleanup(callback) {
9270 return abstract.viewCleanup.call(this, callback);
9271}
9272
9273var mapreduce = {
9274 query,
9275 viewCleanup
9276};
9277
9278function fileHasChanged(localDoc, remoteDoc, filename) {
9279 return !localDoc._attachments ||
9280 !localDoc._attachments[filename] ||
9281 localDoc._attachments[filename].digest !== remoteDoc._attachments[filename].digest;
9282}
9283
9284function getDocAttachments(db, doc) {
9285 var filenames = Object.keys(doc._attachments);
9286 return Promise.all(filenames.map(function (filename) {
9287 return db.getAttachment(doc._id, filename, {rev: doc._rev});
9288 }));
9289}
9290
9291function getDocAttachmentsFromTargetOrSource(target, src, doc) {
9292 var doCheckForLocalAttachments = isRemote(src) && !isRemote(target);
9293 var filenames = Object.keys(doc._attachments);
9294
9295 if (!doCheckForLocalAttachments) {
9296 return getDocAttachments(src, doc);
9297 }
9298
9299 return target.get(doc._id).then(function (localDoc) {
9300 return Promise.all(filenames.map(function (filename) {
9301 if (fileHasChanged(localDoc, doc, filename)) {
9302 return src.getAttachment(doc._id, filename);
9303 }
9304
9305 return target.getAttachment(localDoc._id, filename);
9306 }));
9307 }).catch(function (error) {
9308 /* istanbul ignore if */
9309 if (error.status !== 404) {
9310 throw error;
9311 }
9312
9313 return getDocAttachments(src, doc);
9314 });
9315}
9316
9317function createBulkGetOpts(diffs) {
9318 var requests = [];
9319 Object.keys(diffs).forEach(function (id) {
9320 var missingRevs = diffs[id].missing;
9321 missingRevs.forEach(function (missingRev) {
9322 requests.push({
9323 id,
9324 rev: missingRev
9325 });
9326 });
9327 });
9328
9329 return {
9330 docs: requests,
9331 revs: true,
9332 latest: true
9333 };
9334}
9335
9336//
9337// Fetch all the documents from the src as described in the "diffs",
9338// which is a mapping of docs IDs to revisions. If the state ever
9339// changes to "cancelled", then the returned promise will be rejected.
9340// Else it will be resolved with a list of fetched documents.
9341//
9342function getDocs(src, target, diffs, state) {
9343 diffs = clone(diffs); // we do not need to modify this
9344
9345 var resultDocs = [],
9346 ok = true;
9347
9348 function getAllDocs() {
9349
9350 var bulkGetOpts = createBulkGetOpts(diffs);
9351
9352 if (!bulkGetOpts.docs.length) { // optimization: skip empty requests
9353 return;
9354 }
9355
9356 return src.bulkGet(bulkGetOpts).then(function (bulkGetResponse) {
9357 /* istanbul ignore if */
9358 if (state.cancelled) {
9359 throw new Error('cancelled');
9360 }
9361 return Promise.all(bulkGetResponse.results.map(function (bulkGetInfo) {
9362 return Promise.all(bulkGetInfo.docs.map(function (doc) {
9363 var remoteDoc = doc.ok;
9364
9365 if (doc.error) {
9366 // when AUTO_COMPACTION is set, docs can be returned which look
9367 // like this: {"missing":"1-7c3ac256b693c462af8442f992b83696"}
9368 ok = false;
9369 }
9370
9371 if (!remoteDoc || !remoteDoc._attachments) {
9372 return remoteDoc;
9373 }
9374
9375 return getDocAttachmentsFromTargetOrSource(target, src, remoteDoc).then((attachments) => {
9376 var filenames = Object.keys(remoteDoc._attachments);
9377 attachments.forEach(function (attachment, i) {
9378 var att = remoteDoc._attachments[filenames[i]];
9379 delete att.stub;
9380 delete att.length;
9381 att.data = attachment;
9382 });
9383
9384 return remoteDoc;
9385 });
9386 }));
9387 }))
9388
9389 .then(function (results) {
9390 resultDocs = resultDocs.concat(results.flat().filter(Boolean));
9391 });
9392 });
9393 }
9394
9395 function returnResult() {
9396 return { ok, docs:resultDocs };
9397 }
9398
9399 return Promise.resolve()
9400 .then(getAllDocs)
9401 .then(returnResult);
9402}
9403
9404var CHECKPOINT_VERSION = 1;
9405var REPLICATOR = "pouchdb";
9406// This is an arbitrary number to limit the
9407// amount of replication history we save in the checkpoint.
9408// If we save too much, the checkpoint docs will become very big,
9409// if we save fewer, we'll run a greater risk of having to
9410// read all the changes from 0 when checkpoint PUTs fail
9411// CouchDB 2.0 has a more involved history pruning,
9412// but let's go for the simple version for now.
9413var CHECKPOINT_HISTORY_SIZE = 5;
9414var LOWEST_SEQ = 0;
9415
9416function updateCheckpoint(db, id, checkpoint, session, returnValue) {
9417 return db.get(id).catch(function (err) {
9418 if (err.status === 404) {
9419 if (db.adapter === 'http' || db.adapter === 'https') ;
9420 return {
9421 session_id: session,
9422 _id: id,
9423 history: [],
9424 replicator: REPLICATOR,
9425 version: CHECKPOINT_VERSION
9426 };
9427 }
9428 throw err;
9429 }).then(function (doc) {
9430 if (returnValue.cancelled) {
9431 return;
9432 }
9433
9434 // if the checkpoint has not changed, do not update
9435 if (doc.last_seq === checkpoint) {
9436 return;
9437 }
9438
9439 // Filter out current entry for this replication
9440 doc.history = (doc.history || []).filter(function (item) {
9441 return item.session_id !== session;
9442 });
9443
9444 // Add the latest checkpoint to history
9445 doc.history.unshift({
9446 last_seq: checkpoint,
9447 session_id: session
9448 });
9449
9450 // Just take the last pieces in history, to
9451 // avoid really big checkpoint docs.
9452 // see comment on history size above
9453 doc.history = doc.history.slice(0, CHECKPOINT_HISTORY_SIZE);
9454
9455 doc.version = CHECKPOINT_VERSION;
9456 doc.replicator = REPLICATOR;
9457
9458 doc.session_id = session;
9459 doc.last_seq = checkpoint;
9460
9461 return db.put(doc).catch(function (err) {
9462 if (err.status === 409) {
9463 // retry; someone is trying to write a checkpoint simultaneously
9464 return updateCheckpoint(db, id, checkpoint, session, returnValue);
9465 }
9466 throw err;
9467 });
9468 });
9469}
9470
9471class CheckpointerInternal {
9472 constructor(src, target, id, returnValue, opts = {
9473 writeSourceCheckpoint: true,
9474 writeTargetCheckpoint: true,
9475 }) {
9476 this.src = src;
9477 this.target = target;
9478 this.id = id;
9479 this.returnValue = returnValue;
9480 this.opts = opts;
9481
9482 if (typeof opts.writeSourceCheckpoint === "undefined") {
9483 opts.writeSourceCheckpoint = true;
9484 }
9485
9486 if (typeof opts.writeTargetCheckpoint === "undefined") {
9487 opts.writeTargetCheckpoint = true;
9488 }
9489 }
9490
9491 writeCheckpoint(checkpoint, session) {
9492 var self = this;
9493 return this.updateTarget(checkpoint, session).then(function () {
9494 return self.updateSource(checkpoint, session);
9495 });
9496 }
9497
9498 updateTarget(checkpoint, session) {
9499 if (this.opts.writeTargetCheckpoint) {
9500 return updateCheckpoint(this.target, this.id, checkpoint,
9501 session, this.returnValue);
9502 } else {
9503 return Promise.resolve(true);
9504 }
9505 }
9506
9507 updateSource(checkpoint, session) {
9508 if (this.opts.writeSourceCheckpoint) {
9509 var self = this;
9510 return updateCheckpoint(this.src, this.id, checkpoint,
9511 session, this.returnValue)
9512 .catch(function (err) {
9513 if (isForbiddenError(err)) {
9514 self.opts.writeSourceCheckpoint = false;
9515 return true;
9516 }
9517 throw err;
9518 });
9519 } else {
9520 return Promise.resolve(true);
9521 }
9522 }
9523
9524 getCheckpoint() {
9525 var self = this;
9526
9527 if (!self.opts.writeSourceCheckpoint && !self.opts.writeTargetCheckpoint) {
9528 return Promise.resolve(LOWEST_SEQ);
9529 }
9530
9531 if (self.opts && self.opts.writeSourceCheckpoint && !self.opts.writeTargetCheckpoint) {
9532 return self.src.get(self.id).then(function (sourceDoc) {
9533 return sourceDoc.last_seq || LOWEST_SEQ;
9534 }).catch(function (err) {
9535 /* istanbul ignore if */
9536 if (err.status !== 404) {
9537 throw err;
9538 }
9539 return LOWEST_SEQ;
9540 });
9541 }
9542
9543 return self.target.get(self.id).then(function (targetDoc) {
9544 if (self.opts && self.opts.writeTargetCheckpoint && !self.opts.writeSourceCheckpoint) {
9545 return targetDoc.last_seq || LOWEST_SEQ;
9546 }
9547
9548 return self.src.get(self.id).then(function (sourceDoc) {
9549 // Since we can't migrate an old version doc to a new one
9550 // (no session id), we just go with the lowest seq in this case
9551 /* istanbul ignore if */
9552 if (targetDoc.version !== sourceDoc.version) {
9553 return LOWEST_SEQ;
9554 }
9555
9556 var version;
9557 if (targetDoc.version) {
9558 version = targetDoc.version.toString();
9559 } else {
9560 version = "undefined";
9561 }
9562
9563 if (version in comparisons) {
9564 return comparisons[version](targetDoc, sourceDoc);
9565 }
9566 /* istanbul ignore next */
9567 return LOWEST_SEQ;
9568 }, function (err) {
9569 if (err.status === 404 && targetDoc.last_seq) {
9570 return self.src.put({
9571 _id: self.id,
9572 last_seq: LOWEST_SEQ
9573 }).then(function () {
9574 return LOWEST_SEQ;
9575 }, function (err) {
9576 if (isForbiddenError(err)) {
9577 self.opts.writeSourceCheckpoint = false;
9578 return targetDoc.last_seq;
9579 }
9580 /* istanbul ignore next */
9581 return LOWEST_SEQ;
9582 });
9583 }
9584 throw err;
9585 });
9586 }).catch(function (err) {
9587 if (err.status !== 404) {
9588 throw err;
9589 }
9590 return LOWEST_SEQ;
9591 });
9592 }
9593}
9594
9595var comparisons = {
9596 "undefined": function (targetDoc, sourceDoc) {
9597 // This is the previous comparison function
9598 if (collate(targetDoc.last_seq, sourceDoc.last_seq) === 0) {
9599 return sourceDoc.last_seq;
9600 }
9601 /* istanbul ignore next */
9602 return 0;
9603 },
9604 "1": function (targetDoc, sourceDoc) {
9605 // This is the comparison function ported from CouchDB
9606 return compareReplicationLogs(sourceDoc, targetDoc).last_seq;
9607 }
9608};
9609
9610// This checkpoint comparison is ported from CouchDBs source
9611// they come from here:
9612// https://github.com/apache/couchdb-couch-replicator/blob/master/src/couch_replicator.erl#L863-L906
9613
9614function compareReplicationLogs(srcDoc, tgtDoc) {
9615 if (srcDoc.session_id === tgtDoc.session_id) {
9616 return {
9617 last_seq: srcDoc.last_seq,
9618 history: srcDoc.history
9619 };
9620 }
9621
9622 return compareReplicationHistory(srcDoc.history, tgtDoc.history);
9623}
9624
9625function compareReplicationHistory(sourceHistory, targetHistory) {
9626 // the erlang loop via function arguments is not so easy to repeat in JS
9627 // therefore, doing this as recursion
9628 var S = sourceHistory[0];
9629 var sourceRest = sourceHistory.slice(1);
9630 var T = targetHistory[0];
9631 var targetRest = targetHistory.slice(1);
9632
9633 if (!S || targetHistory.length === 0) {
9634 return {
9635 last_seq: LOWEST_SEQ,
9636 history: []
9637 };
9638 }
9639
9640 var sourceId = S.session_id;
9641 /* istanbul ignore if */
9642 if (hasSessionId(sourceId, targetHistory)) {
9643 return {
9644 last_seq: S.last_seq,
9645 history: sourceHistory
9646 };
9647 }
9648
9649 var targetId = T.session_id;
9650 if (hasSessionId(targetId, sourceRest)) {
9651 return {
9652 last_seq: T.last_seq,
9653 history: targetRest
9654 };
9655 }
9656
9657 return compareReplicationHistory(sourceRest, targetRest);
9658}
9659
9660function hasSessionId(sessionId, history) {
9661 var props = history[0];
9662 var rest = history.slice(1);
9663
9664 if (!sessionId || history.length === 0) {
9665 return false;
9666 }
9667
9668 if (sessionId === props.session_id) {
9669 return true;
9670 }
9671
9672 return hasSessionId(sessionId, rest);
9673}
9674
9675function isForbiddenError(err) {
9676 return typeof err.status === 'number' && Math.floor(err.status / 100) === 4;
9677}
9678
9679function Checkpointer(src, target, id, returnValue, opts) {
9680 if (!(this instanceof CheckpointerInternal)) {
9681 return new CheckpointerInternal(src, target, id, returnValue, opts);
9682 }
9683 return Checkpointer;
9684}
9685
9686var STARTING_BACK_OFF = 0;
9687
9688function backOff(opts, returnValue, error, callback) {
9689 if (opts.retry === false) {
9690 returnValue.emit('error', error);
9691 returnValue.removeAllListeners();
9692 return;
9693 }
9694 /* istanbul ignore if */
9695 if (typeof opts.back_off_function !== 'function') {
9696 opts.back_off_function = defaultBackOff;
9697 }
9698 returnValue.emit('requestError', error);
9699 if (returnValue.state === 'active' || returnValue.state === 'pending') {
9700 returnValue.emit('paused', error);
9701 returnValue.state = 'stopped';
9702 var backOffSet = function backoffTimeSet() {
9703 opts.current_back_off = STARTING_BACK_OFF;
9704 };
9705 var removeBackOffSetter = function removeBackOffTimeSet() {
9706 returnValue.removeListener('active', backOffSet);
9707 };
9708 returnValue.once('paused', removeBackOffSetter);
9709 returnValue.once('active', backOffSet);
9710 }
9711
9712 opts.current_back_off = opts.current_back_off || STARTING_BACK_OFF;
9713 opts.current_back_off = opts.back_off_function(opts.current_back_off);
9714 setTimeout(callback, opts.current_back_off);
9715}
9716
9717function sortObjectPropertiesByKey(queryParams) {
9718 return Object.keys(queryParams).sort(collate).reduce(function (result, key) {
9719 result[key] = queryParams[key];
9720 return result;
9721 }, {});
9722}
9723
9724// Generate a unique id particular to this replication.
9725// Not guaranteed to align perfectly with CouchDB's rep ids.
9726function generateReplicationId(src, target, opts) {
9727 var docIds = opts.doc_ids ? opts.doc_ids.sort(collate) : '';
9728 var filterFun = opts.filter ? opts.filter.toString() : '';
9729 var queryParams = '';
9730 var filterViewName = '';
9731 var selector = '';
9732
9733 // possibility for checkpoints to be lost here as behaviour of
9734 // JSON.stringify is not stable (see #6226)
9735 /* istanbul ignore if */
9736 if (opts.selector) {
9737 selector = JSON.stringify(opts.selector);
9738 }
9739
9740 if (opts.filter && opts.query_params) {
9741 queryParams = JSON.stringify(sortObjectPropertiesByKey(opts.query_params));
9742 }
9743
9744 if (opts.filter && opts.filter === '_view') {
9745 filterViewName = opts.view.toString();
9746 }
9747
9748 return Promise.all([src.id(), target.id()]).then(function (res) {
9749 var queryData = res[0] + res[1] + filterFun + filterViewName +
9750 queryParams + docIds + selector;
9751 return new Promise(function (resolve) {
9752 binaryMd5(queryData, resolve);
9753 });
9754 }).then(function (md5sum) {
9755 // can't use straight-up md5 alphabet, because
9756 // the char '/' is interpreted as being for attachments,
9757 // and + is also not url-safe
9758 md5sum = md5sum.replace(/\//g, '.').replace(/\+/g, '_');
9759 return '_local/' + md5sum;
9760 });
9761}
9762
9763function replicate(src, target, opts, returnValue, result) {
9764 var batches = []; // list of batches to be processed
9765 var currentBatch; // the batch currently being processed
9766 var pendingBatch = {
9767 seq: 0,
9768 changes: [],
9769 docs: []
9770 }; // next batch, not yet ready to be processed
9771 var writingCheckpoint = false; // true while checkpoint is being written
9772 var changesCompleted = false; // true when all changes received
9773 var replicationCompleted = false; // true when replication has completed
9774 // initial_last_seq is the state of the source db before
9775 // replication started, and it is _not_ updated during
9776 // replication or used anywhere else, as opposed to last_seq
9777 var initial_last_seq = 0;
9778 var last_seq = 0;
9779 var continuous = opts.continuous || opts.live || false;
9780 var batch_size = opts.batch_size || 100;
9781 var batches_limit = opts.batches_limit || 10;
9782 var style = opts.style || 'all_docs';
9783 var changesPending = false; // true while src.changes is running
9784 var doc_ids = opts.doc_ids;
9785 var selector = opts.selector;
9786 var repId;
9787 var checkpointer;
9788 var changedDocs = [];
9789 // Like couchdb, every replication gets a unique session id
9790 var session = uuid$1();
9791 var taskId;
9792
9793 result = result || {
9794 ok: true,
9795 start_time: new Date().toISOString(),
9796 docs_read: 0,
9797 docs_written: 0,
9798 doc_write_failures: 0,
9799 errors: []
9800 };
9801
9802 var changesOpts = {};
9803 returnValue.ready(src, target);
9804
9805 function initCheckpointer() {
9806 if (checkpointer) {
9807 return Promise.resolve();
9808 }
9809 return generateReplicationId(src, target, opts).then(function (res$$1) {
9810 repId = res$$1;
9811
9812 var checkpointOpts = {};
9813 if (opts.checkpoint === false) {
9814 checkpointOpts = { writeSourceCheckpoint: false, writeTargetCheckpoint: false };
9815 } else if (opts.checkpoint === 'source') {
9816 checkpointOpts = { writeSourceCheckpoint: true, writeTargetCheckpoint: false };
9817 } else if (opts.checkpoint === 'target') {
9818 checkpointOpts = { writeSourceCheckpoint: false, writeTargetCheckpoint: true };
9819 } else {
9820 checkpointOpts = { writeSourceCheckpoint: true, writeTargetCheckpoint: true };
9821 }
9822
9823 checkpointer = new Checkpointer(src, target, repId, returnValue, checkpointOpts);
9824 });
9825 }
9826
9827 function writeDocs() {
9828 changedDocs = [];
9829
9830 if (currentBatch.docs.length === 0) {
9831 return;
9832 }
9833 var docs = currentBatch.docs;
9834 var bulkOpts = {timeout: opts.timeout};
9835 return target.bulkDocs({docs, new_edits: false}, bulkOpts).then(function (res$$1) {
9836 /* istanbul ignore if */
9837 if (returnValue.cancelled) {
9838 completeReplication();
9839 throw new Error('cancelled');
9840 }
9841
9842 // `res` doesn't include full documents (which live in `docs`), so we create a map of
9843 // (id -> error), and check for errors while iterating over `docs`
9844 var errorsById = Object.create(null);
9845 res$$1.forEach(function (res$$1) {
9846 if (res$$1.error) {
9847 errorsById[res$$1.id] = res$$1;
9848 }
9849 });
9850
9851 var errorsNo = Object.keys(errorsById).length;
9852 result.doc_write_failures += errorsNo;
9853 result.docs_written += docs.length - errorsNo;
9854
9855 docs.forEach(function (doc) {
9856 var error = errorsById[doc._id];
9857 if (error) {
9858 result.errors.push(error);
9859 // Normalize error name. i.e. 'Unauthorized' -> 'unauthorized' (eg Sync Gateway)
9860 var errorName = (error.name || '').toLowerCase();
9861 if (errorName === 'unauthorized' || errorName === 'forbidden') {
9862 returnValue.emit('denied', clone(error));
9863 } else {
9864 throw error;
9865 }
9866 } else {
9867 changedDocs.push(doc);
9868 }
9869 });
9870
9871 }, function (err) {
9872 result.doc_write_failures += docs.length;
9873 throw err;
9874 });
9875 }
9876
9877 function finishBatch() {
9878 if (currentBatch.error) {
9879 throw new Error('There was a problem getting docs.');
9880 }
9881 result.last_seq = last_seq = currentBatch.seq;
9882 var outResult = clone(result);
9883 if (changedDocs.length) {
9884 outResult.docs = changedDocs;
9885 // Attach 'pending' property if server supports it (CouchDB 2.0+)
9886 /* istanbul ignore if */
9887 if (typeof currentBatch.pending === 'number') {
9888 outResult.pending = currentBatch.pending;
9889 delete currentBatch.pending;
9890 }
9891 returnValue.emit('change', outResult);
9892 }
9893 writingCheckpoint = true;
9894
9895 src.info().then(function (info) {
9896 var task = src.activeTasks.get(taskId);
9897 if (!currentBatch || !task) {
9898 return;
9899 }
9900
9901 var completed = task.completed_items || 0;
9902 var total_items = parseInt(info.update_seq, 10) - parseInt(initial_last_seq, 10);
9903 src.activeTasks.update(taskId, {
9904 completed_items: completed + currentBatch.changes.length,
9905 total_items
9906 });
9907 });
9908
9909 return checkpointer.writeCheckpoint(currentBatch.seq,
9910 session).then(function () {
9911 returnValue.emit('checkpoint', { 'checkpoint': currentBatch.seq });
9912 writingCheckpoint = false;
9913 /* istanbul ignore if */
9914 if (returnValue.cancelled) {
9915 completeReplication();
9916 throw new Error('cancelled');
9917 }
9918 currentBatch = undefined;
9919 getChanges();
9920 }).catch(function (err) {
9921 onCheckpointError(err);
9922 throw err;
9923 });
9924 }
9925
9926 function getDiffs() {
9927 var diff = {};
9928 currentBatch.changes.forEach(function (change) {
9929 returnValue.emit('checkpoint', { 'revs_diff': change });
9930 // Couchbase Sync Gateway emits these, but we can ignore them
9931 /* istanbul ignore if */
9932 if (change.id === "_user/") {
9933 return;
9934 }
9935 diff[change.id] = change.changes.map(function (x) {
9936 return x.rev;
9937 });
9938 });
9939 return target.revsDiff(diff).then(function (diffs) {
9940 /* istanbul ignore if */
9941 if (returnValue.cancelled) {
9942 completeReplication();
9943 throw new Error('cancelled');
9944 }
9945 // currentBatch.diffs elements are deleted as the documents are written
9946 currentBatch.diffs = diffs;
9947 });
9948 }
9949
9950 function getBatchDocs() {
9951 return getDocs(src, target, currentBatch.diffs, returnValue).then(function (got) {
9952 currentBatch.error = !got.ok;
9953 got.docs.forEach(function (doc) {
9954 delete currentBatch.diffs[doc._id];
9955 result.docs_read++;
9956 currentBatch.docs.push(doc);
9957 });
9958 });
9959 }
9960
9961 function startNextBatch() {
9962 if (returnValue.cancelled || currentBatch) {
9963 return;
9964 }
9965 if (batches.length === 0) {
9966 processPendingBatch(true);
9967 return;
9968 }
9969 currentBatch = batches.shift();
9970 returnValue.emit('checkpoint', { 'start_next_batch': currentBatch.seq });
9971 getDiffs()
9972 .then(getBatchDocs)
9973 .then(writeDocs)
9974 .then(finishBatch)
9975 .then(startNextBatch)
9976 .catch(function (err) {
9977 abortReplication('batch processing terminated with error', err);
9978 });
9979 }
9980
9981
9982 function processPendingBatch(immediate) {
9983 if (pendingBatch.changes.length === 0) {
9984 if (batches.length === 0 && !currentBatch) {
9985 if ((continuous && changesOpts.live) || changesCompleted) {
9986 returnValue.state = 'pending';
9987 returnValue.emit('paused');
9988 }
9989 if (changesCompleted) {
9990 completeReplication();
9991 }
9992 }
9993 return;
9994 }
9995 if (
9996 immediate ||
9997 changesCompleted ||
9998 pendingBatch.changes.length >= batch_size
9999 ) {
10000 batches.push(pendingBatch);
10001 pendingBatch = {
10002 seq: 0,
10003 changes: [],
10004 docs: []
10005 };
10006 if (returnValue.state === 'pending' || returnValue.state === 'stopped') {
10007 returnValue.state = 'active';
10008 returnValue.emit('active');
10009 }
10010 startNextBatch();
10011 }
10012 }
10013
10014
10015 function abortReplication(reason, err) {
10016 if (replicationCompleted) {
10017 return;
10018 }
10019 if (!err.message) {
10020 err.message = reason;
10021 }
10022 result.ok = false;
10023 result.status = 'aborting';
10024 batches = [];
10025 pendingBatch = {
10026 seq: 0,
10027 changes: [],
10028 docs: []
10029 };
10030 completeReplication(err);
10031 }
10032
10033
10034 function completeReplication(fatalError) {
10035 if (replicationCompleted) {
10036 return;
10037 }
10038 /* istanbul ignore if */
10039 if (returnValue.cancelled) {
10040 result.status = 'cancelled';
10041 if (writingCheckpoint) {
10042 return;
10043 }
10044 }
10045 result.status = result.status || 'complete';
10046 result.end_time = new Date().toISOString();
10047 result.last_seq = last_seq;
10048 replicationCompleted = true;
10049
10050 src.activeTasks.remove(taskId, fatalError);
10051
10052 if (fatalError) {
10053 // need to extend the error because Firefox considers ".result" read-only
10054 fatalError = createError(fatalError);
10055 fatalError.result = result;
10056
10057 // Normalize error name. i.e. 'Unauthorized' -> 'unauthorized' (eg Sync Gateway)
10058 var errorName = (fatalError.name || '').toLowerCase();
10059 if (errorName === 'unauthorized' || errorName === 'forbidden') {
10060 returnValue.emit('error', fatalError);
10061 returnValue.removeAllListeners();
10062 } else {
10063 backOff(opts, returnValue, fatalError, function () {
10064 replicate(src, target, opts, returnValue);
10065 });
10066 }
10067 } else {
10068 returnValue.emit('complete', result);
10069 returnValue.removeAllListeners();
10070 }
10071 }
10072
10073 function onChange(change, pending, lastSeq) {
10074 /* istanbul ignore if */
10075 if (returnValue.cancelled) {
10076 return completeReplication();
10077 }
10078 // Attach 'pending' property if server supports it (CouchDB 2.0+)
10079 /* istanbul ignore if */
10080 if (typeof pending === 'number') {
10081 pendingBatch.pending = pending;
10082 }
10083
10084 var filter = filterChange(opts)(change);
10085 if (!filter) {
10086 // update processed items count by 1
10087 var task = src.activeTasks.get(taskId);
10088 if (task) {
10089 // we can assume that task exists here? shouldn't be deleted by here.
10090 var completed = task.completed_items || 0;
10091 src.activeTasks.update(taskId, {completed_items: ++completed});
10092 }
10093 return;
10094 }
10095 pendingBatch.seq = change.seq || lastSeq;
10096 pendingBatch.changes.push(change);
10097 returnValue.emit('checkpoint', { 'pending_batch': pendingBatch.seq });
10098 nextTick(function () {
10099 processPendingBatch(batches.length === 0 && changesOpts.live);
10100 });
10101 }
10102
10103
10104 function onChangesComplete(changes) {
10105 changesPending = false;
10106 /* istanbul ignore if */
10107 if (returnValue.cancelled) {
10108 return completeReplication();
10109 }
10110
10111 // if no results were returned then we're done,
10112 // else fetch more
10113 if (changes.results.length > 0) {
10114 changesOpts.since = changes.results[changes.results.length - 1].seq;
10115 getChanges();
10116 processPendingBatch(true);
10117 } else {
10118
10119 var complete = function () {
10120 if (continuous) {
10121 changesOpts.live = true;
10122 getChanges();
10123 } else {
10124 changesCompleted = true;
10125 }
10126 processPendingBatch(true);
10127 };
10128
10129 // update the checkpoint so we start from the right seq next time
10130 if (!currentBatch && changes.results.length === 0) {
10131 writingCheckpoint = true;
10132 checkpointer.writeCheckpoint(changes.last_seq,
10133 session).then(function () {
10134 writingCheckpoint = false;
10135 result.last_seq = last_seq = changes.last_seq;
10136 if (returnValue.cancelled) {
10137 completeReplication();
10138 throw new Error('cancelled');
10139 } else {
10140 complete();
10141 }
10142 })
10143 .catch(onCheckpointError);
10144 } else {
10145 complete();
10146 }
10147 }
10148 }
10149
10150
10151 function onChangesError(err) {
10152 changesPending = false;
10153 /* istanbul ignore if */
10154 if (returnValue.cancelled) {
10155 return completeReplication();
10156 }
10157 abortReplication('changes rejected', err);
10158 }
10159
10160
10161 function getChanges() {
10162 if (!(
10163 !changesPending &&
10164 !changesCompleted &&
10165 batches.length < batches_limit
10166 )) {
10167 return;
10168 }
10169 changesPending = true;
10170 function abortChanges() {
10171 changes.cancel();
10172 }
10173 function removeListener() {
10174 returnValue.removeListener('cancel', abortChanges);
10175 }
10176
10177 if (returnValue._changes) { // remove old changes() and listeners
10178 returnValue.removeListener('cancel', returnValue._abortChanges);
10179 returnValue._changes.cancel();
10180 }
10181 returnValue.once('cancel', abortChanges);
10182
10183 var changes = src.changes(changesOpts)
10184 .on('change', onChange);
10185 changes.then(removeListener, removeListener);
10186 changes.then(onChangesComplete)
10187 .catch(onChangesError);
10188
10189 if (opts.retry) {
10190 // save for later so we can cancel if necessary
10191 returnValue._changes = changes;
10192 returnValue._abortChanges = abortChanges;
10193 }
10194 }
10195
10196 function createTask(checkpoint) {
10197 return src.info().then(function (info) {
10198 var total_items = typeof opts.since === 'undefined' ?
10199 parseInt(info.update_seq, 10) - parseInt(checkpoint, 10) :
10200 parseInt(info.update_seq, 10);
10201
10202 taskId = src.activeTasks.add({
10203 name: `${continuous ? 'continuous ' : ''}replication from ${info.db_name}` ,
10204 total_items,
10205 });
10206
10207 return checkpoint;
10208 });
10209 }
10210
10211 function startChanges() {
10212 initCheckpointer().then(function () {
10213 /* istanbul ignore if */
10214 if (returnValue.cancelled) {
10215 completeReplication();
10216 return;
10217 }
10218 return checkpointer.getCheckpoint().then(createTask).then(function (checkpoint) {
10219 last_seq = checkpoint;
10220 initial_last_seq = checkpoint;
10221 changesOpts = {
10222 since: last_seq,
10223 limit: batch_size,
10224 batch_size,
10225 style,
10226 doc_ids,
10227 selector,
10228 return_docs: true // required so we know when we're done
10229 };
10230 if (opts.filter) {
10231 if (typeof opts.filter !== 'string') {
10232 // required for the client-side filter in onChange
10233 changesOpts.include_docs = true;
10234 } else { // ddoc filter
10235 changesOpts.filter = opts.filter;
10236 }
10237 }
10238 if ('heartbeat' in opts) {
10239 changesOpts.heartbeat = opts.heartbeat;
10240 }
10241 if ('timeout' in opts) {
10242 changesOpts.timeout = opts.timeout;
10243 }
10244 if (opts.query_params) {
10245 changesOpts.query_params = opts.query_params;
10246 }
10247 if (opts.view) {
10248 changesOpts.view = opts.view;
10249 }
10250 getChanges();
10251 });
10252 }).catch(function (err) {
10253 abortReplication('getCheckpoint rejected with ', err);
10254 });
10255 }
10256
10257 /* istanbul ignore next */
10258 function onCheckpointError(err) {
10259 writingCheckpoint = false;
10260 abortReplication('writeCheckpoint completed with error', err);
10261 }
10262
10263 /* istanbul ignore if */
10264 if (returnValue.cancelled) { // cancelled immediately
10265 completeReplication();
10266 return;
10267 }
10268
10269 if (!returnValue._addedListeners) {
10270 returnValue.once('cancel', completeReplication);
10271
10272 if (typeof opts.complete === 'function') {
10273 returnValue.once('error', opts.complete);
10274 returnValue.once('complete', function (result) {
10275 opts.complete(null, result);
10276 });
10277 }
10278 returnValue._addedListeners = true;
10279 }
10280
10281 if (typeof opts.since === 'undefined') {
10282 startChanges();
10283 } else {
10284 initCheckpointer().then(function () {
10285 writingCheckpoint = true;
10286 return checkpointer.writeCheckpoint(opts.since, session);
10287 }).then(function () {
10288 writingCheckpoint = false;
10289 /* istanbul ignore if */
10290 if (returnValue.cancelled) {
10291 completeReplication();
10292 return;
10293 }
10294 last_seq = opts.since;
10295 startChanges();
10296 }).catch(onCheckpointError);
10297 }
10298}
10299
10300// We create a basic promise so the caller can cancel the replication possibly
10301// before we have actually started listening to changes etc
10302class Replication extends EE {
10303 constructor() {
10304 super();
10305 this.cancelled = false;
10306 this.state = 'pending';
10307 const promise = new Promise((fulfill, reject) => {
10308 this.once('complete', fulfill);
10309 this.once('error', reject);
10310 });
10311 this.then = function (resolve, reject) {
10312 return promise.then(resolve, reject);
10313 };
10314 this.catch = function (reject) {
10315 return promise.catch(reject);
10316 };
10317 // As we allow error handling via "error" event as well,
10318 // put a stub in here so that rejecting never throws UnhandledError.
10319 this.catch(function () {});
10320 }
10321
10322 cancel() {
10323 this.cancelled = true;
10324 this.state = 'cancelled';
10325 this.emit('cancel');
10326 }
10327
10328 ready(src, target) {
10329 if (this._readyCalled) {
10330 return;
10331 }
10332 this._readyCalled = true;
10333
10334 const onDestroy = () => {
10335 this.cancel();
10336 };
10337 src.once('destroyed', onDestroy);
10338 target.once('destroyed', onDestroy);
10339 function cleanup() {
10340 src.removeListener('destroyed', onDestroy);
10341 target.removeListener('destroyed', onDestroy);
10342 }
10343 this.once('complete', cleanup);
10344 this.once('error', cleanup);
10345 }
10346}
10347
10348function toPouch(db, opts) {
10349 var PouchConstructor = opts.PouchConstructor;
10350 if (typeof db === 'string') {
10351 return new PouchConstructor(db, opts);
10352 } else {
10353 return db;
10354 }
10355}
10356
10357function replicateWrapper(src, target, opts, callback) {
10358
10359 if (typeof opts === 'function') {
10360 callback = opts;
10361 opts = {};
10362 }
10363 if (typeof opts === 'undefined') {
10364 opts = {};
10365 }
10366
10367 if (opts.doc_ids && !Array.isArray(opts.doc_ids)) {
10368 throw createError(BAD_REQUEST,
10369 "`doc_ids` filter parameter is not a list.");
10370 }
10371
10372 opts.complete = callback;
10373 opts = clone(opts);
10374 opts.continuous = opts.continuous || opts.live;
10375 opts.retry = ('retry' in opts) ? opts.retry : false;
10376 opts.PouchConstructor = opts.PouchConstructor || this;
10377 var replicateRet = new Replication(opts);
10378 var srcPouch = toPouch(src, opts);
10379 var targetPouch = toPouch(target, opts);
10380 replicate(srcPouch, targetPouch, opts, replicateRet);
10381 return replicateRet;
10382}
10383
10384function sync(src, target, opts, callback) {
10385 if (typeof opts === 'function') {
10386 callback = opts;
10387 opts = {};
10388 }
10389 if (typeof opts === 'undefined') {
10390 opts = {};
10391 }
10392 opts = clone(opts);
10393 opts.PouchConstructor = opts.PouchConstructor || this;
10394 src = toPouch(src, opts);
10395 target = toPouch(target, opts);
10396 return new Sync(src, target, opts, callback);
10397}
10398
10399class Sync extends EE {
10400 constructor(src, target, opts, callback) {
10401 super();
10402 this.canceled = false;
10403
10404 const optsPush = opts.push ? Object.assign({}, opts, opts.push) : opts;
10405 const optsPull = opts.pull ? Object.assign({}, opts, opts.pull) : opts;
10406
10407 this.push = replicateWrapper(src, target, optsPush);
10408 this.pull = replicateWrapper(target, src, optsPull);
10409
10410 this.pushPaused = true;
10411 this.pullPaused = true;
10412
10413 const pullChange = (change) => {
10414 this.emit('change', {
10415 direction: 'pull',
10416 change
10417 });
10418 };
10419 const pushChange = (change) => {
10420 this.emit('change', {
10421 direction: 'push',
10422 change
10423 });
10424 };
10425 const pushDenied = (doc) => {
10426 this.emit('denied', {
10427 direction: 'push',
10428 doc
10429 });
10430 };
10431 const pullDenied = (doc) => {
10432 this.emit('denied', {
10433 direction: 'pull',
10434 doc
10435 });
10436 };
10437 const pushPaused = () => {
10438 this.pushPaused = true;
10439 /* istanbul ignore if */
10440 if (this.pullPaused) {
10441 this.emit('paused');
10442 }
10443 };
10444 const pullPaused = () => {
10445 this.pullPaused = true;
10446 /* istanbul ignore if */
10447 if (this.pushPaused) {
10448 this.emit('paused');
10449 }
10450 };
10451 const pushActive = () => {
10452 this.pushPaused = false;
10453 /* istanbul ignore if */
10454 if (this.pullPaused) {
10455 this.emit('active', {
10456 direction: 'push'
10457 });
10458 }
10459 };
10460 const pullActive = () => {
10461 this.pullPaused = false;
10462 /* istanbul ignore if */
10463 if (this.pushPaused) {
10464 this.emit('active', {
10465 direction: 'pull'
10466 });
10467 }
10468 };
10469
10470 let removed = {};
10471
10472 const removeAll = (type) => { // type is 'push' or 'pull'
10473 return (event, func) => {
10474 const isChange = event === 'change' &&
10475 (func === pullChange || func === pushChange);
10476 const isDenied = event === 'denied' &&
10477 (func === pullDenied || func === pushDenied);
10478 const isPaused = event === 'paused' &&
10479 (func === pullPaused || func === pushPaused);
10480 const isActive = event === 'active' &&
10481 (func === pullActive || func === pushActive);
10482
10483 if (isChange || isDenied || isPaused || isActive) {
10484 if (!(event in removed)) {
10485 removed[event] = {};
10486 }
10487 removed[event][type] = true;
10488 if (Object.keys(removed[event]).length === 2) {
10489 // both push and pull have asked to be removed
10490 this.removeAllListeners(event);
10491 }
10492 }
10493 };
10494 };
10495
10496 if (opts.live) {
10497 this.push.on('complete', this.pull.cancel.bind(this.pull));
10498 this.pull.on('complete', this.push.cancel.bind(this.push));
10499 }
10500
10501 function addOneListener(ee, event, listener) {
10502 if (ee.listeners(event).indexOf(listener) == -1) {
10503 ee.on(event, listener);
10504 }
10505 }
10506
10507 this.on('newListener', function (event) {
10508 if (event === 'change') {
10509 addOneListener(this.pull, 'change', pullChange);
10510 addOneListener(this.push, 'change', pushChange);
10511 } else if (event === 'denied') {
10512 addOneListener(this.pull, 'denied', pullDenied);
10513 addOneListener(this.push, 'denied', pushDenied);
10514 } else if (event === 'active') {
10515 addOneListener(this.pull, 'active', pullActive);
10516 addOneListener(this.push, 'active', pushActive);
10517 } else if (event === 'paused') {
10518 addOneListener(this.pull, 'paused', pullPaused);
10519 addOneListener(this.push, 'paused', pushPaused);
10520 }
10521 });
10522
10523 this.on('removeListener', function (event) {
10524 if (event === 'change') {
10525 this.pull.removeListener('change', pullChange);
10526 this.push.removeListener('change', pushChange);
10527 } else if (event === 'denied') {
10528 this.pull.removeListener('denied', pullDenied);
10529 this.push.removeListener('denied', pushDenied);
10530 } else if (event === 'active') {
10531 this.pull.removeListener('active', pullActive);
10532 this.push.removeListener('active', pushActive);
10533 } else if (event === 'paused') {
10534 this.pull.removeListener('paused', pullPaused);
10535 this.push.removeListener('paused', pushPaused);
10536 }
10537 });
10538
10539 this.pull.on('removeListener', removeAll('pull'));
10540 this.push.on('removeListener', removeAll('push'));
10541
10542 const promise = Promise.all([
10543 this.push,
10544 this.pull
10545 ]).then((resp) => {
10546 const out = {
10547 push: resp[0],
10548 pull: resp[1]
10549 };
10550 this.emit('complete', out);
10551 if (callback) {
10552 callback(null, out);
10553 }
10554 this.removeAllListeners();
10555 return out;
10556 }, (err) => {
10557 this.cancel();
10558 if (callback) {
10559 // if there's a callback, then the callback can receive
10560 // the error event
10561 callback(err);
10562 } else {
10563 // if there's no callback, then we're safe to emit an error
10564 // event, which would otherwise throw an unhandled error
10565 // due to 'error' being a special event in EventEmitters
10566 this.emit('error', err);
10567 }
10568 this.removeAllListeners();
10569 if (callback) {
10570 // no sense throwing if we're already emitting an 'error' event
10571 throw err;
10572 }
10573 });
10574
10575 this.then = function (success, err) {
10576 return promise.then(success, err);
10577 };
10578
10579 this.catch = function (err) {
10580 return promise.catch(err);
10581 };
10582 }
10583
10584 cancel() {
10585 if (!this.canceled) {
10586 this.canceled = true;
10587 this.push.cancel();
10588 this.pull.cancel();
10589 }
10590 }
10591}
10592
10593function replication(PouchDB) {
10594 PouchDB.replicate = replicateWrapper;
10595 PouchDB.sync = sync;
10596
10597 Object.defineProperty(PouchDB.prototype, 'replicate', {
10598 get: function () {
10599 var self = this;
10600 if (typeof this.replicateMethods === 'undefined') {
10601 this.replicateMethods = {
10602 from: function (other, opts, callback) {
10603 return self.constructor.replicate(other, self, opts, callback);
10604 },
10605 to: function (other, opts, callback) {
10606 return self.constructor.replicate(self, other, opts, callback);
10607 }
10608 };
10609 }
10610 return this.replicateMethods;
10611 }
10612 });
10613
10614 PouchDB.prototype.sync = function (dbName, opts, callback) {
10615 return this.constructor.sync(this, dbName, opts, callback);
10616 };
10617}
10618
10619PouchDB.plugin(LevelPouch$1)
10620 .plugin(HttpPouch$1)
10621 .plugin(mapreduce)
10622 .plugin(replication);
10623
10624// Pull from src because pouchdb-node/pouchdb-browser themselves
10625
10626module.exports = PouchDB;
10627
\No newline at end of file