UNPKG

300 kBJavaScriptView Raw
1import crypto from 'crypto';
2import nodeFetch, { Headers } from 'node-fetch';
3import fetchCookie from 'fetch-cookie';
4import { v4 } from 'uuid';
5import levelup from 'levelup';
6import ltgt from 'ltgt';
7import Codec from 'level-codec';
8import ReadableStreamCore from 'readable-stream';
9import Deque from 'double-ended-queue';
10import vuvuzela from 'vuvuzela';
11import fs from 'fs';
12import path from 'path';
13import level from 'level';
14import { obj } from 'through2';
15import LevelWriteStream from 'level-write-stream';
16import vm from 'vm';
17import EE from 'events';
18
19function isBinaryObject(object) {
20 return object instanceof Buffer;
21}
22
23var cloneBinaryObject = (buffer) => Buffer.from(buffer);
24
25// most of this is borrowed from lodash.isPlainObject:
26// https://github.com/fis-components/lodash.isplainobject/
27// blob/29c358140a74f252aeb08c9eb28bef86f2217d4a/index.js
28
29var funcToString = Function.prototype.toString;
30var objectCtorString = funcToString.call(Object);
31
32function isPlainObject(value) {
33 var proto = Object.getPrototypeOf(value);
34 /* istanbul ignore if */
35 if (proto === null) { // not sure when this happens, but I guess it can
36 return true;
37 }
38 var Ctor = proto.constructor;
39 return (typeof Ctor == 'function' &&
40 Ctor instanceof Ctor && funcToString.call(Ctor) == objectCtorString);
41}
42
43function clone(object) {
44 var newObject;
45 var i;
46 var len;
47
48 if (!object || typeof object !== 'object') {
49 return object;
50 }
51
52 if (Array.isArray(object)) {
53 newObject = [];
54 for (i = 0, len = object.length; i < len; i++) {
55 newObject[i] = clone(object[i]);
56 }
57 return newObject;
58 }
59
60 // special case: to avoid inconsistencies between IndexedDB
61 // and other backends, we automatically stringify Dates
62 if (object instanceof Date && isFinite(object)) {
63 return object.toISOString();
64 }
65
66 if (isBinaryObject(object)) {
67 return cloneBinaryObject(object);
68 }
69
70 if (!isPlainObject(object)) {
71 return object; // don't clone objects like Workers
72 }
73
74 newObject = {};
75 for (i in object) {
76 /* istanbul ignore else */
77 if (Object.prototype.hasOwnProperty.call(object, i)) {
78 var value = clone(object[i]);
79 if (typeof value !== 'undefined') {
80 newObject[i] = value;
81 }
82 }
83 }
84 return newObject;
85}
86
87function once(fun) {
88 var called = false;
89 return function (...args) {
90 /* istanbul ignore if */
91 if (called) {
92 // this is a smoke test and should never actually happen
93 throw new Error('once called more than once');
94 } else {
95 called = true;
96 fun.apply(this, args);
97 }
98 };
99}
100
101function toPromise(func) {
102 //create the function we will be returning
103 return function (...args) {
104 // Clone arguments
105 args = clone(args);
106 var self = this;
107 // if the last argument is a function, assume its a callback
108 var usedCB = (typeof args[args.length - 1] === 'function') ? args.pop() : false;
109 var promise = new Promise(function (fulfill, reject) {
110 var resp;
111 try {
112 var callback = once(function (err, mesg) {
113 if (err) {
114 reject(err);
115 } else {
116 fulfill(mesg);
117 }
118 });
119 // create a callback for this invocation
120 // apply the function in the orig context
121 args.push(callback);
122 resp = func.apply(self, args);
123 if (resp && typeof resp.then === 'function') {
124 fulfill(resp);
125 }
126 } catch (e) {
127 reject(e);
128 }
129 });
130 // if there is a callback, call it back
131 if (usedCB) {
132 promise.then(function (result) {
133 usedCB(null, result);
134 }, usedCB);
135 }
136 return promise;
137 };
138}
139
140function logApiCall(self, name, args) {
141 /* istanbul ignore if */
142 if (self.constructor.listeners('debug').length) {
143 var logArgs = ['api', self.name, name];
144 for (var i = 0; i < args.length - 1; i++) {
145 logArgs.push(args[i]);
146 }
147 self.constructor.emit('debug', logArgs);
148
149 // override the callback itself to log the response
150 var origCallback = args[args.length - 1];
151 args[args.length - 1] = function (err, res) {
152 var responseArgs = ['api', self.name, name];
153 responseArgs = responseArgs.concat(
154 err ? ['error', err] : ['success', res]
155 );
156 self.constructor.emit('debug', responseArgs);
157 origCallback(err, res);
158 };
159 }
160}
161
162function adapterFun(name, callback) {
163 return toPromise(function (...args) {
164 if (this._closed) {
165 return Promise.reject(new Error('database is closed'));
166 }
167 if (this._destroyed) {
168 return Promise.reject(new Error('database is destroyed'));
169 }
170 var self = this;
171 logApiCall(self, name, args);
172 if (!this.taskqueue.isReady) {
173 return new Promise(function (fulfill, reject) {
174 self.taskqueue.addTask(function (failed) {
175 if (failed) {
176 reject(failed);
177 } else {
178 fulfill(self[name].apply(self, args));
179 }
180 });
181 });
182 }
183 return callback.apply(this, args);
184 });
185}
186
187// like underscore/lodash _.pick()
188function pick(obj$$1, arr) {
189 var res = {};
190 for (var i = 0, len = arr.length; i < len; i++) {
191 var prop = arr[i];
192 if (prop in obj$$1) {
193 res[prop] = obj$$1[prop];
194 }
195 }
196 return res;
197}
198
199// Most browsers throttle concurrent requests at 6, so it's silly
200// to shim _bulk_get by trying to launch potentially hundreds of requests
201// and then letting the majority time out. We can handle this ourselves.
202var MAX_NUM_CONCURRENT_REQUESTS = 6;
203
204function identityFunction(x) {
205 return x;
206}
207
208function formatResultForOpenRevsGet(result) {
209 return [{
210 ok: result
211 }];
212}
213
214// shim for P/CouchDB adapters that don't directly implement _bulk_get
215function bulkGet(db, opts, callback) {
216 var requests = opts.docs;
217
218 // consolidate into one request per doc if possible
219 var requestsById = new Map();
220 requests.forEach(function (request) {
221 if (requestsById.has(request.id)) {
222 requestsById.get(request.id).push(request);
223 } else {
224 requestsById.set(request.id, [request]);
225 }
226 });
227
228 var numDocs = requestsById.size;
229 var numDone = 0;
230 var perDocResults = new Array(numDocs);
231
232 function collapseResultsAndFinish() {
233 var results = [];
234 perDocResults.forEach(function (res) {
235 res.docs.forEach(function (info) {
236 results.push({
237 id: res.id,
238 docs: [info]
239 });
240 });
241 });
242 callback(null, {results});
243 }
244
245 function checkDone() {
246 if (++numDone === numDocs) {
247 collapseResultsAndFinish();
248 }
249 }
250
251 function gotResult(docIndex, id, docs) {
252 perDocResults[docIndex] = {id, docs};
253 checkDone();
254 }
255
256 var allRequests = [];
257 requestsById.forEach(function (value, key) {
258 allRequests.push(key);
259 });
260
261 var i = 0;
262
263 function nextBatch() {
264
265 if (i >= allRequests.length) {
266 return;
267 }
268
269 var upTo = Math.min(i + MAX_NUM_CONCURRENT_REQUESTS, allRequests.length);
270 var batch = allRequests.slice(i, upTo);
271 processBatch(batch, i);
272 i += batch.length;
273 }
274
275 function processBatch(batch, offset) {
276 batch.forEach(function (docId, j) {
277 var docIdx = offset + j;
278 var docRequests = requestsById.get(docId);
279
280 // just use the first request as the "template"
281 // TODO: The _bulk_get API allows for more subtle use cases than this,
282 // but for now it is unlikely that there will be a mix of different
283 // "atts_since" or "attachments" in the same request, since it's just
284 // replicate.js that is using this for the moment.
285 // Also, atts_since is aspirational, since we don't support it yet.
286 var docOpts = pick(docRequests[0], ['atts_since', 'attachments']);
287 docOpts.open_revs = docRequests.map(function (request) {
288 // rev is optional, open_revs disallowed
289 return request.rev;
290 });
291
292 // remove falsey / undefined revisions
293 docOpts.open_revs = docOpts.open_revs.filter(identityFunction);
294
295 var formatResult = identityFunction;
296
297 if (docOpts.open_revs.length === 0) {
298 delete docOpts.open_revs;
299
300 // when fetching only the "winning" leaf,
301 // transform the result so it looks like an open_revs
302 // request
303 formatResult = formatResultForOpenRevsGet;
304 }
305
306 // globally-supplied options
307 ['revs', 'attachments', 'binary', 'ajax', 'latest'].forEach(function (param) {
308 if (param in opts) {
309 docOpts[param] = opts[param];
310 }
311 });
312 db.get(docId, docOpts, function (err, res) {
313 var result;
314 /* istanbul ignore if */
315 if (err) {
316 result = [{error: err}];
317 } else {
318 result = formatResult(res);
319 }
320 gotResult(docIdx, docId, result);
321 nextBatch();
322 });
323 });
324 }
325
326 nextBatch();
327
328}
329
330// in Node of course this is false
331function hasLocalStorage() {
332 return false;
333}
334
335const nextTick = typeof queueMicrotask === "function"
336 ? queueMicrotask
337 : function nextTick(fn) {
338 Promise.resolve().then(fn);
339 };
340
341class Changes extends EE {
342 constructor() {
343 super();
344
345 this._listeners = {};
346
347 if (hasLocalStorage()) {
348 addEventListener("storage", (e) => {
349 this.emit(e.key);
350 });
351 }
352 }
353
354 addListener(dbName, id, db, opts) {
355 if (this._listeners[id]) {
356 return;
357 }
358 var inprogress = false;
359 var self = this;
360 function eventFunction() {
361 if (!self._listeners[id]) {
362 return;
363 }
364 if (inprogress) {
365 inprogress = 'waiting';
366 return;
367 }
368 inprogress = true;
369 var changesOpts = pick(opts, [
370 'style', 'include_docs', 'attachments', 'conflicts', 'filter',
371 'doc_ids', 'view', 'since', 'query_params', 'binary', 'return_docs'
372 ]);
373
374 function onError() {
375 inprogress = false;
376 }
377
378 db.changes(changesOpts).on('change', function (c) {
379 if (c.seq > opts.since && !opts.cancelled) {
380 opts.since = c.seq;
381 opts.onChange(c);
382 }
383 }).on('complete', function () {
384 if (inprogress === 'waiting') {
385 nextTick(eventFunction);
386 }
387 inprogress = false;
388 }).on('error', onError);
389 }
390 this._listeners[id] = eventFunction;
391 this.on(dbName, eventFunction);
392 }
393
394 removeListener(dbName, id) {
395 if (!(id in this._listeners)) {
396 return;
397 }
398 super.removeListener(dbName, this._listeners[id]);
399 delete this._listeners[id];
400 }
401
402 notifyLocalWindows(dbName) {
403 //do a useless change on a storage thing
404 //in order to get other windows's listeners to activate
405 if (hasLocalStorage()) {
406 localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a";
407 }
408 }
409
410 notify(dbName) {
411 this.emit(dbName);
412 this.notifyLocalWindows(dbName);
413 }
414}
415
416function guardedConsole(method) {
417 /* istanbul ignore else */
418 if (typeof console !== 'undefined' && typeof console[method] === 'function') {
419 var args = Array.prototype.slice.call(arguments, 1);
420 console[method].apply(console, args);
421 }
422}
423
424function randomNumber(min, max) {
425 var maxTimeout = 600000; // Hard-coded default of 10 minutes
426 min = parseInt(min, 10) || 0;
427 max = parseInt(max, 10);
428 if (max !== max || max <= min) {
429 max = (min || 1) << 1; //doubling
430 } else {
431 max = max + 1;
432 }
433 // In order to not exceed maxTimeout, pick a random value between half of maxTimeout and maxTimeout
434 if (max > maxTimeout) {
435 min = maxTimeout >> 1; // divide by two
436 max = maxTimeout;
437 }
438 var ratio = Math.random();
439 var range = max - min;
440
441 return ~~(range * ratio + min); // ~~ coerces to an int, but fast.
442}
443
444function defaultBackOff(min) {
445 var max = 0;
446 if (!min) {
447 max = 2000;
448 }
449 return randomNumber(min, max);
450}
451
452// We assume Node users don't need to see this warning
453var res = function () {};
454
455class PouchError extends Error {
456 constructor(status, error, reason) {
457 super();
458 this.status = status;
459 this.name = error;
460 this.message = reason;
461 this.error = true;
462 }
463
464 toString() {
465 return JSON.stringify({
466 status: this.status,
467 name: this.name,
468 message: this.message,
469 reason: this.reason
470 });
471 }
472}
473
474var UNAUTHORIZED = new PouchError(401, 'unauthorized', "Name or password is incorrect.");
475var MISSING_BULK_DOCS = new PouchError(400, 'bad_request', "Missing JSON list of 'docs'");
476var MISSING_DOC = new PouchError(404, 'not_found', 'missing');
477var REV_CONFLICT = new PouchError(409, 'conflict', 'Document update conflict');
478var INVALID_ID = new PouchError(400, 'bad_request', '_id field must contain a string');
479var MISSING_ID = new PouchError(412, 'missing_id', '_id is required for puts');
480var RESERVED_ID = new PouchError(400, 'bad_request', 'Only reserved document ids may start with underscore.');
481var NOT_OPEN = new PouchError(412, 'precondition_failed', 'Database not open');
482var UNKNOWN_ERROR = new PouchError(500, 'unknown_error', 'Database encountered an unknown error');
483var BAD_ARG = new PouchError(500, 'badarg', 'Some query argument is invalid');
484var INVALID_REQUEST = new PouchError(400, 'invalid_request', 'Request was invalid');
485var QUERY_PARSE_ERROR = new PouchError(400, 'query_parse_error', 'Some query parameter is invalid');
486var DOC_VALIDATION = new PouchError(500, 'doc_validation', 'Bad special document member');
487var BAD_REQUEST = new PouchError(400, 'bad_request', 'Something wrong with the request');
488var NOT_AN_OBJECT = new PouchError(400, 'bad_request', 'Document must be a JSON object');
489var DB_MISSING = new PouchError(404, 'not_found', 'Database not found');
490var IDB_ERROR = new PouchError(500, 'indexed_db_went_bad', 'unknown');
491var WSQ_ERROR = new PouchError(500, 'web_sql_went_bad', 'unknown');
492var LDB_ERROR = new PouchError(500, 'levelDB_went_went_bad', 'unknown');
493var FORBIDDEN = new PouchError(403, 'forbidden', 'Forbidden by design doc validate_doc_update function');
494var INVALID_REV = new PouchError(400, 'bad_request', 'Invalid rev format');
495var FILE_EXISTS = new PouchError(412, 'file_exists', 'The database could not be created, the file already exists.');
496var MISSING_STUB = new PouchError(412, 'missing_stub', 'A pre-existing attachment stub wasn\'t found');
497var INVALID_URL = new PouchError(413, 'invalid_url', 'Provided URL is invalid');
498
499function createError(error, reason) {
500 function CustomPouchError(reason) {
501 // inherit error properties from our parent error manually
502 // so as to allow proper JSON parsing.
503 var names = Object.getOwnPropertyNames(error);
504 for (var i = 0, len = names.length; i < len; i++) {
505 if (typeof error[names[i]] !== 'function') {
506 this[names[i]] = error[names[i]];
507 }
508 }
509
510 if (this.stack === undefined) {
511 this.stack = (new Error()).stack;
512 }
513
514 if (reason !== undefined) {
515 this.reason = reason;
516 }
517 }
518 CustomPouchError.prototype = PouchError.prototype;
519 return new CustomPouchError(reason);
520}
521
522function generateErrorFromResponse(err) {
523
524 if (typeof err !== 'object') {
525 var data = err;
526 err = UNKNOWN_ERROR;
527 err.data = data;
528 }
529
530 if ('error' in err && err.error === 'conflict') {
531 err.name = 'conflict';
532 err.status = 409;
533 }
534
535 if (!('name' in err)) {
536 err.name = err.error || 'unknown';
537 }
538
539 if (!('status' in err)) {
540 err.status = 500;
541 }
542
543 if (!('message' in err)) {
544 err.message = err.message || err.reason;
545 }
546
547 if (!('stack' in err)) {
548 err.stack = (new Error()).stack;
549 }
550
551 return err;
552}
553
554function tryFilter(filter, doc, req) {
555 try {
556 return !filter(doc, req);
557 } catch (err) {
558 var msg = 'Filter function threw: ' + err.toString();
559 return createError(BAD_REQUEST, msg);
560 }
561}
562
563function filterChange(opts) {
564 var req = {};
565 var hasFilter = opts.filter && typeof opts.filter === 'function';
566 req.query = opts.query_params;
567
568 return function filter(change) {
569 if (!change.doc) {
570 // CSG sends events on the changes feed that don't have documents,
571 // this hack makes a whole lot of existing code robust.
572 change.doc = {};
573 }
574
575 var filterReturn = hasFilter && tryFilter(opts.filter, change.doc, req);
576
577 if (typeof filterReturn === 'object') {
578 return filterReturn;
579 }
580
581 if (filterReturn) {
582 return false;
583 }
584
585 if (!opts.include_docs) {
586 delete change.doc;
587 } else if (!opts.attachments) {
588 for (var att in change.doc._attachments) {
589 /* istanbul ignore else */
590 if (Object.prototype.hasOwnProperty.call(change.doc._attachments, att)) {
591 change.doc._attachments[att].stub = true;
592 }
593 }
594 }
595 return true;
596 };
597}
598
599// shim for Function.prototype.name,
600// for browsers that don't support it like IE
601
602/* istanbul ignore next */
603function f() {}
604
605var hasName = f.name;
606var res$1;
607
608// We don't run coverage in IE
609/* istanbul ignore else */
610if (hasName) {
611 res$1 = function (fun) {
612 return fun.name;
613 };
614} else {
615 res$1 = function (fun) {
616 var match = fun.toString().match(/^\s*function\s*(?:(\S+)\s*)?\(/);
617 if (match && match[1]) {
618 return match[1];
619 }
620 else {
621 return '';
622 }
623 };
624}
625
626var functionName = res$1;
627
628// Determine id an ID is valid
629// - invalid IDs begin with an underescore that does not begin '_design' or
630// '_local'
631// - any other string value is a valid id
632// Returns the specific error object for each case
633function invalidIdError(id) {
634 var err;
635 if (!id) {
636 err = createError(MISSING_ID);
637 } else if (typeof id !== 'string') {
638 err = createError(INVALID_ID);
639 } else if (/^_/.test(id) && !(/^_(design|local)/).test(id)) {
640 err = createError(RESERVED_ID);
641 }
642 if (err) {
643 throw err;
644 }
645}
646
647// Checks if a PouchDB object is "remote" or not. This is
648
649function isRemote(db) {
650 if (typeof db._remote === 'boolean') {
651 return db._remote;
652 }
653 /* istanbul ignore next */
654 if (typeof db.type === 'function') {
655 guardedConsole('warn',
656 'db.type() is deprecated and will be removed in ' +
657 'a future version of PouchDB');
658 return db.type() === 'http';
659 }
660 /* istanbul ignore next */
661 return false;
662}
663
664function listenerCount(ee, type) {
665 return 'listenerCount' in ee ? ee.listenerCount(type) :
666 EE.listenerCount(ee, type);
667}
668
669function parseDesignDocFunctionName(s) {
670 if (!s) {
671 return null;
672 }
673 var parts = s.split('/');
674 if (parts.length === 2) {
675 return parts;
676 }
677 if (parts.length === 1) {
678 return [s, s];
679 }
680 return null;
681}
682
683function normalizeDesignDocFunctionName(s) {
684 var normalized = parseDesignDocFunctionName(s);
685 return normalized ? normalized.join('/') : null;
686}
687
688// originally parseUri 1.2.2, now patched by us
689// (c) Steven Levithan <stevenlevithan.com>
690// MIT License
691var keys = ["source", "protocol", "authority", "userInfo", "user", "password",
692 "host", "port", "relative", "path", "directory", "file", "query", "anchor"];
693var qName ="queryKey";
694var qParser = /(?:^|&)([^&=]*)=?([^&]*)/g;
695
696// use the "loose" parser
697/* eslint no-useless-escape: 0 */
698var parser = /^(?:(?![^:@]+:[^:@\/]*@)([^:\/?#.]+):)?(?:\/\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/;
699
700function parseUri(str) {
701 var m = parser.exec(str);
702 var uri = {};
703 var i = 14;
704
705 while (i--) {
706 var key = keys[i];
707 var value = m[i] || "";
708 var encoded = ['user', 'password'].indexOf(key) !== -1;
709 uri[key] = encoded ? decodeURIComponent(value) : value;
710 }
711
712 uri[qName] = {};
713 uri[keys[12]].replace(qParser, function ($0, $1, $2) {
714 if ($1) {
715 uri[qName][$1] = $2;
716 }
717 });
718
719 return uri;
720}
721
722// Based on https://github.com/alexdavid/scope-eval v0.0.3
723
724// this is essentially the "update sugar" function from daleharvey/pouchdb#1388
725// the diffFun tells us what delta to apply to the doc. it either returns
726// the doc, or false if it doesn't need to do an update after all
727function upsert(db, docId, diffFun) {
728 return db.get(docId)
729 .catch(function (err) {
730 /* istanbul ignore next */
731 if (err.status !== 404) {
732 throw err;
733 }
734 return {};
735 })
736 .then(function (doc) {
737 // the user might change the _rev, so save it for posterity
738 var docRev = doc._rev;
739 var newDoc = diffFun(doc);
740
741 if (!newDoc) {
742 // if the diffFun returns falsy, we short-circuit as
743 // an optimization
744 return {updated: false, rev: docRev};
745 }
746
747 // users aren't allowed to modify these values,
748 // so reset them here
749 newDoc._id = docId;
750 newDoc._rev = docRev;
751 return tryAndPut(db, newDoc, diffFun);
752 });
753}
754
755function tryAndPut(db, doc, diffFun) {
756 return db.put(doc).then(function (res) {
757 return {
758 updated: true,
759 rev: res.rev
760 };
761 }, function (err) {
762 /* istanbul ignore next */
763 if (err.status !== 409) {
764 throw err;
765 }
766 return upsert(db, doc._id, diffFun);
767 });
768}
769
770function binaryMd5(data, callback) {
771 var base64 = crypto.createHash('md5').update(data, 'binary').digest('base64');
772 callback(base64);
773}
774
775function stringMd5(string) {
776 return crypto.createHash('md5').update(string, 'binary').digest('hex');
777}
778
779/**
780 * Creates a new revision string that does NOT include the revision height
781 * For example '56649f1b0506c6ca9fda0746eb0cacdf'
782 */
783function rev(doc, deterministic_revs) {
784 if (!deterministic_revs) {
785 return v4().replace(/-/g, '').toLowerCase();
786 }
787
788 var mutateableDoc = Object.assign({}, doc);
789 delete mutateableDoc._rev_tree;
790 return stringMd5(JSON.stringify(mutateableDoc));
791}
792
793var uuid = v4; // mimic old import, only v4 is ever used elsewhere
794
795// We fetch all leafs of the revision tree, and sort them based on tree length
796// and whether they were deleted, undeleted documents with the longest revision
797// tree (most edits) win
798// The final sort algorithm is slightly documented in a sidebar here:
799// http://guide.couchdb.org/draft/conflicts.html
800function winningRev(metadata) {
801 var winningId;
802 var winningPos;
803 var winningDeleted;
804 var toVisit = metadata.rev_tree.slice();
805 var node;
806 while ((node = toVisit.pop())) {
807 var tree = node.ids;
808 var branches = tree[2];
809 var pos = node.pos;
810 if (branches.length) { // non-leaf
811 for (var i = 0, len = branches.length; i < len; i++) {
812 toVisit.push({pos: pos + 1, ids: branches[i]});
813 }
814 continue;
815 }
816 var deleted = !!tree[1].deleted;
817 var id = tree[0];
818 // sort by deleted, then pos, then id
819 if (!winningId || (winningDeleted !== deleted ? winningDeleted :
820 winningPos !== pos ? winningPos < pos : winningId < id)) {
821 winningId = id;
822 winningPos = pos;
823 winningDeleted = deleted;
824 }
825 }
826
827 return winningPos + '-' + winningId;
828}
829
830// Pretty much all below can be combined into a higher order function to
831// traverse revisions
832// The return value from the callback will be passed as context to all
833// children of that node
834function traverseRevTree(revs, callback) {
835 var toVisit = revs.slice();
836
837 var node;
838 while ((node = toVisit.pop())) {
839 var pos = node.pos;
840 var tree = node.ids;
841 var branches = tree[2];
842 var newCtx =
843 callback(branches.length === 0, pos, tree[0], node.ctx, tree[1]);
844 for (var i = 0, len = branches.length; i < len; i++) {
845 toVisit.push({pos: pos + 1, ids: branches[i], ctx: newCtx});
846 }
847 }
848}
849
850function sortByPos(a, b) {
851 return a.pos - b.pos;
852}
853
854function collectLeaves(revs) {
855 var leaves = [];
856 traverseRevTree(revs, function (isLeaf, pos, id, acc, opts) {
857 if (isLeaf) {
858 leaves.push({rev: pos + "-" + id, pos, opts});
859 }
860 });
861 leaves.sort(sortByPos).reverse();
862 for (var i = 0, len = leaves.length; i < len; i++) {
863 delete leaves[i].pos;
864 }
865 return leaves;
866}
867
868// returns revs of all conflicts that is leaves such that
869// 1. are not deleted and
870// 2. are different than winning revision
871function collectConflicts(metadata) {
872 var win = winningRev(metadata);
873 var leaves = collectLeaves(metadata.rev_tree);
874 var conflicts = [];
875 for (var i = 0, len = leaves.length; i < len; i++) {
876 var leaf = leaves[i];
877 if (leaf.rev !== win && !leaf.opts.deleted) {
878 conflicts.push(leaf.rev);
879 }
880 }
881 return conflicts;
882}
883
884// compact a tree by marking its non-leafs as missing,
885// and return a list of revs to delete
886function compactTree(metadata) {
887 var revs = [];
888 traverseRevTree(metadata.rev_tree, function (isLeaf, pos,
889 revHash, ctx, opts) {
890 if (opts.status === 'available' && !isLeaf) {
891 revs.push(pos + '-' + revHash);
892 opts.status = 'missing';
893 }
894 });
895 return revs;
896}
897
898// `findPathToLeaf()` returns an array of revs that goes from the specified
899// leaf rev to the root of that leaf’s branch.
900//
901// eg. for this rev tree:
902// 1-9692 ▶ 2-37aa ▶ 3-df22 ▶ 4-6e94 ▶ 5-df4a ▶ 6-6a3a ▶ 7-57e5
903// ┃ ┗━━━━━━▶ 5-8d8c ▶ 6-65e0
904// ┗━━━━━━▶ 3-43f6 ▶ 4-a3b4
905//
906// For a `targetRev` of '7-57e5', `findPathToLeaf()` would return ['7-57e5', '6-6a3a', '5-df4a']
907// The `revs` argument has the same structure as what `revs_tree` has on e.g.
908// the IndexedDB representation of the rev tree datastructure. Please refer to
909// tests/unit/test.purge.js for examples of what these look like.
910//
911// This function will throw an error if:
912// - The requested revision does not exist
913// - The requested revision is not a leaf
914function findPathToLeaf(revs, targetRev) {
915 let path$$1 = [];
916 const toVisit = revs.slice();
917
918 let node;
919 while ((node = toVisit.pop())) {
920 const { pos, ids: tree } = node;
921 const rev = `${pos}-${tree[0]}`;
922 const branches = tree[2];
923
924 // just assuming we're already working on the path up towards our desired leaf.
925 path$$1.push(rev);
926
927 // we've reached the leaf of our dreams, so return the computed path.
928 if (rev === targetRev) {
929 //…unleeeeess
930 if (branches.length !== 0) {
931 throw new Error('The requested revision is not a leaf');
932 }
933 return path$$1.reverse();
934 }
935
936 // this is based on the assumption that after we have a leaf (`branches.length == 0`), we handle the next
937 // branch. this is true for all branches other than the path leading to the winning rev (which is 7-57e5 in
938 // the example above. i've added a reset condition for branching nodes (`branches.length > 1`) as well.
939 if (branches.length === 0 || branches.length > 1) {
940 path$$1 = [];
941 }
942
943 // as a next step, we push the branches of this node to `toVisit` for visiting it during the next iteration
944 for (let i = 0, len = branches.length; i < len; i++) {
945 toVisit.push({ pos: pos + 1, ids: branches[i] });
946 }
947 }
948 if (path$$1.length === 0) {
949 throw new Error('The requested revision does not exist');
950 }
951 return path$$1.reverse();
952}
953
954// build up a list of all the paths to the leafs in this revision tree
955function rootToLeaf(revs) {
956 var paths = [];
957 var toVisit = revs.slice();
958 var node;
959 while ((node = toVisit.pop())) {
960 var pos = node.pos;
961 var tree = node.ids;
962 var id = tree[0];
963 var opts = tree[1];
964 var branches = tree[2];
965 var isLeaf = branches.length === 0;
966
967 var history = node.history ? node.history.slice() : [];
968 history.push({id, opts});
969 if (isLeaf) {
970 paths.push({pos: (pos + 1 - history.length), ids: history});
971 }
972 for (var i = 0, len = branches.length; i < len; i++) {
973 toVisit.push({pos: pos + 1, ids: branches[i], history});
974 }
975 }
976 return paths.reverse();
977}
978
979// for a better overview of what this is doing, read:
980
981function sortByPos$1(a, b) {
982 return a.pos - b.pos;
983}
984
985// classic binary search
986function binarySearch(arr, item, comparator) {
987 var low = 0;
988 var high = arr.length;
989 var mid;
990 while (low < high) {
991 mid = (low + high) >>> 1;
992 if (comparator(arr[mid], item) < 0) {
993 low = mid + 1;
994 } else {
995 high = mid;
996 }
997 }
998 return low;
999}
1000
1001// assuming the arr is sorted, insert the item in the proper place
1002function insertSorted(arr, item, comparator) {
1003 var idx = binarySearch(arr, item, comparator);
1004 arr.splice(idx, 0, item);
1005}
1006
1007// Turn a path as a flat array into a tree with a single branch.
1008// If any should be stemmed from the beginning of the array, that's passed
1009// in as the second argument
1010function pathToTree(path$$1, numStemmed) {
1011 var root;
1012 var leaf;
1013 for (var i = numStemmed, len = path$$1.length; i < len; i++) {
1014 var node = path$$1[i];
1015 var currentLeaf = [node.id, node.opts, []];
1016 if (leaf) {
1017 leaf[2].push(currentLeaf);
1018 leaf = currentLeaf;
1019 } else {
1020 root = leaf = currentLeaf;
1021 }
1022 }
1023 return root;
1024}
1025
1026// compare the IDs of two trees
1027function compareTree(a, b) {
1028 return a[0] < b[0] ? -1 : 1;
1029}
1030
1031// Merge two trees together
1032// The roots of tree1 and tree2 must be the same revision
1033function mergeTree(in_tree1, in_tree2) {
1034 var queue = [{tree1: in_tree1, tree2: in_tree2}];
1035 var conflicts = false;
1036 while (queue.length > 0) {
1037 var item = queue.pop();
1038 var tree1 = item.tree1;
1039 var tree2 = item.tree2;
1040
1041 if (tree1[1].status || tree2[1].status) {
1042 tree1[1].status =
1043 (tree1[1].status === 'available' ||
1044 tree2[1].status === 'available') ? 'available' : 'missing';
1045 }
1046
1047 for (var i = 0; i < tree2[2].length; i++) {
1048 if (!tree1[2][0]) {
1049 conflicts = 'new_leaf';
1050 tree1[2][0] = tree2[2][i];
1051 continue;
1052 }
1053
1054 var merged = false;
1055 for (var j = 0; j < tree1[2].length; j++) {
1056 if (tree1[2][j][0] === tree2[2][i][0]) {
1057 queue.push({tree1: tree1[2][j], tree2: tree2[2][i]});
1058 merged = true;
1059 }
1060 }
1061 if (!merged) {
1062 conflicts = 'new_branch';
1063 insertSorted(tree1[2], tree2[2][i], compareTree);
1064 }
1065 }
1066 }
1067 return {conflicts, tree: in_tree1};
1068}
1069
1070function doMerge(tree, path$$1, dontExpand) {
1071 var restree = [];
1072 var conflicts = false;
1073 var merged = false;
1074 var res;
1075
1076 if (!tree.length) {
1077 return {tree: [path$$1], conflicts: 'new_leaf'};
1078 }
1079
1080 for (var i = 0, len = tree.length; i < len; i++) {
1081 var branch = tree[i];
1082 if (branch.pos === path$$1.pos && branch.ids[0] === path$$1.ids[0]) {
1083 // Paths start at the same position and have the same root, so they need
1084 // merged
1085 res = mergeTree(branch.ids, path$$1.ids);
1086 restree.push({pos: branch.pos, ids: res.tree});
1087 conflicts = conflicts || res.conflicts;
1088 merged = true;
1089 } else if (dontExpand !== true) {
1090 // The paths start at a different position, take the earliest path and
1091 // traverse up until it as at the same point from root as the path we
1092 // want to merge. If the keys match we return the longer path with the
1093 // other merged After stemming we don't want to expand the trees
1094
1095 var t1 = branch.pos < path$$1.pos ? branch : path$$1;
1096 var t2 = branch.pos < path$$1.pos ? path$$1 : branch;
1097 var diff = t2.pos - t1.pos;
1098
1099 var candidateParents = [];
1100
1101 var trees = [];
1102 trees.push({ids: t1.ids, diff, parent: null, parentIdx: null});
1103 while (trees.length > 0) {
1104 var item = trees.pop();
1105 if (item.diff === 0) {
1106 if (item.ids[0] === t2.ids[0]) {
1107 candidateParents.push(item);
1108 }
1109 continue;
1110 }
1111 var elements = item.ids[2];
1112 for (var j = 0, elementsLen = elements.length; j < elementsLen; j++) {
1113 trees.push({
1114 ids: elements[j],
1115 diff: item.diff - 1,
1116 parent: item.ids,
1117 parentIdx: j
1118 });
1119 }
1120 }
1121
1122 var el = candidateParents[0];
1123
1124 if (!el) {
1125 restree.push(branch);
1126 } else {
1127 res = mergeTree(el.ids, t2.ids);
1128 el.parent[2][el.parentIdx] = res.tree;
1129 restree.push({pos: t1.pos, ids: t1.ids});
1130 conflicts = conflicts || res.conflicts;
1131 merged = true;
1132 }
1133 } else {
1134 restree.push(branch);
1135 }
1136 }
1137
1138 // We didnt find
1139 if (!merged) {
1140 restree.push(path$$1);
1141 }
1142
1143 restree.sort(sortByPos$1);
1144
1145 return {
1146 tree: restree,
1147 conflicts: conflicts || 'internal_node'
1148 };
1149}
1150
1151// To ensure we don't grow the revision tree infinitely, we stem old revisions
1152function stem(tree, depth) {
1153 // First we break out the tree into a complete list of root to leaf paths
1154 var paths = rootToLeaf(tree);
1155 var stemmedRevs;
1156
1157 var result;
1158 for (var i = 0, len = paths.length; i < len; i++) {
1159 // Then for each path, we cut off the start of the path based on the
1160 // `depth` to stem to, and generate a new set of flat trees
1161 var path$$1 = paths[i];
1162 var stemmed = path$$1.ids;
1163 var node;
1164 if (stemmed.length > depth) {
1165 // only do the stemming work if we actually need to stem
1166 if (!stemmedRevs) {
1167 stemmedRevs = {}; // avoid allocating this object unnecessarily
1168 }
1169 var numStemmed = stemmed.length - depth;
1170 node = {
1171 pos: path$$1.pos + numStemmed,
1172 ids: pathToTree(stemmed, numStemmed)
1173 };
1174
1175 for (var s = 0; s < numStemmed; s++) {
1176 var rev = (path$$1.pos + s) + '-' + stemmed[s].id;
1177 stemmedRevs[rev] = true;
1178 }
1179 } else { // no need to actually stem
1180 node = {
1181 pos: path$$1.pos,
1182 ids: pathToTree(stemmed, 0)
1183 };
1184 }
1185
1186 // Then we remerge all those flat trees together, ensuring that we don't
1187 // connect trees that would go beyond the depth limit
1188 if (result) {
1189 result = doMerge(result, node, true).tree;
1190 } else {
1191 result = [node];
1192 }
1193 }
1194
1195 // this is memory-heavy per Chrome profiler, avoid unless we actually stemmed
1196 if (stemmedRevs) {
1197 traverseRevTree(result, function (isLeaf, pos, revHash) {
1198 // some revisions may have been removed in a branch but not in another
1199 delete stemmedRevs[pos + '-' + revHash];
1200 });
1201 }
1202
1203 return {
1204 tree: result,
1205 revs: stemmedRevs ? Object.keys(stemmedRevs) : []
1206 };
1207}
1208
1209function merge(tree, path$$1, depth) {
1210 var newTree = doMerge(tree, path$$1);
1211 var stemmed = stem(newTree.tree, depth);
1212 return {
1213 tree: stemmed.tree,
1214 stemmedRevs: stemmed.revs,
1215 conflicts: newTree.conflicts
1216 };
1217}
1218
1219// return true if a rev exists in the rev tree, false otherwise
1220function revExists(revs, rev) {
1221 var toVisit = revs.slice();
1222 var splitRev = rev.split('-');
1223 var targetPos = parseInt(splitRev[0], 10);
1224 var targetId = splitRev[1];
1225
1226 var node;
1227 while ((node = toVisit.pop())) {
1228 if (node.pos === targetPos && node.ids[0] === targetId) {
1229 return true;
1230 }
1231 var branches = node.ids[2];
1232 for (var i = 0, len = branches.length; i < len; i++) {
1233 toVisit.push({pos: node.pos + 1, ids: branches[i]});
1234 }
1235 }
1236 return false;
1237}
1238
1239function getTrees(node) {
1240 return node.ids;
1241}
1242
1243// check if a specific revision of a doc has been deleted
1244// - metadata: the metadata object from the doc store
1245// - rev: (optional) the revision to check. defaults to winning revision
1246function isDeleted(metadata, rev) {
1247 if (!rev) {
1248 rev = winningRev(metadata);
1249 }
1250 var id = rev.substring(rev.indexOf('-') + 1);
1251 var toVisit = metadata.rev_tree.map(getTrees);
1252
1253 var tree;
1254 while ((tree = toVisit.pop())) {
1255 if (tree[0] === id) {
1256 return !!tree[1].deleted;
1257 }
1258 toVisit = toVisit.concat(tree[2]);
1259 }
1260}
1261
1262function isLocalId(id) {
1263 return typeof id === 'string' && id.startsWith('_local/');
1264}
1265
1266// returns the current leaf node for a given revision
1267function latest(rev, metadata) {
1268 var toVisit = metadata.rev_tree.slice();
1269 var node;
1270 while ((node = toVisit.pop())) {
1271 var pos = node.pos;
1272 var tree = node.ids;
1273 var id = tree[0];
1274 var opts = tree[1];
1275 var branches = tree[2];
1276 var isLeaf = branches.length === 0;
1277
1278 var history = node.history ? node.history.slice() : [];
1279 history.push({id, pos, opts});
1280
1281 if (isLeaf) {
1282 for (var i = 0, len = history.length; i < len; i++) {
1283 var historyNode = history[i];
1284 var historyRev = historyNode.pos + '-' + historyNode.id;
1285
1286 if (historyRev === rev) {
1287 // return the rev of this leaf
1288 return pos + '-' + id;
1289 }
1290 }
1291 }
1292
1293 for (var j = 0, l = branches.length; j < l; j++) {
1294 toVisit.push({pos: pos + 1, ids: branches[j], history});
1295 }
1296 }
1297
1298 /* istanbul ignore next */
1299 throw new Error('Unable to resolve latest revision for id ' + metadata.id + ', rev ' + rev);
1300}
1301
1302function tryCatchInChangeListener(self, change, pending, lastSeq) {
1303 // isolate try/catches to avoid V8 deoptimizations
1304 try {
1305 self.emit('change', change, pending, lastSeq);
1306 } catch (e) {
1307 guardedConsole('error', 'Error in .on("change", function):', e);
1308 }
1309}
1310
1311function processChange(doc, metadata, opts) {
1312 var changeList = [{rev: doc._rev}];
1313 if (opts.style === 'all_docs') {
1314 changeList = collectLeaves(metadata.rev_tree)
1315 .map(function (x) { return {rev: x.rev}; });
1316 }
1317 var change = {
1318 id: metadata.id,
1319 changes: changeList,
1320 doc
1321 };
1322
1323 if (isDeleted(metadata, doc._rev)) {
1324 change.deleted = true;
1325 }
1326 if (opts.conflicts) {
1327 change.doc._conflicts = collectConflicts(metadata);
1328 if (!change.doc._conflicts.length) {
1329 delete change.doc._conflicts;
1330 }
1331 }
1332 return change;
1333}
1334
1335class Changes$1 extends EE {
1336 constructor(db, opts, callback) {
1337 super();
1338 this.db = db;
1339 opts = opts ? clone(opts) : {};
1340 var complete = opts.complete = once((err, resp) => {
1341 if (err) {
1342 if (listenerCount(this, 'error') > 0) {
1343 this.emit('error', err);
1344 }
1345 } else {
1346 this.emit('complete', resp);
1347 }
1348 this.removeAllListeners();
1349 db.removeListener('destroyed', onDestroy);
1350 });
1351 if (callback) {
1352 this.on('complete', function (resp) {
1353 callback(null, resp);
1354 });
1355 this.on('error', callback);
1356 }
1357 const onDestroy = () => {
1358 this.cancel();
1359 };
1360 db.once('destroyed', onDestroy);
1361
1362 opts.onChange = (change, pending, lastSeq) => {
1363 /* istanbul ignore if */
1364 if (this.isCancelled) {
1365 return;
1366 }
1367 tryCatchInChangeListener(this, change, pending, lastSeq);
1368 };
1369
1370 var promise = new Promise(function (fulfill, reject) {
1371 opts.complete = function (err, res$$1) {
1372 if (err) {
1373 reject(err);
1374 } else {
1375 fulfill(res$$1);
1376 }
1377 };
1378 });
1379 this.once('cancel', function () {
1380 db.removeListener('destroyed', onDestroy);
1381 opts.complete(null, {status: 'cancelled'});
1382 });
1383 this.then = promise.then.bind(promise);
1384 this['catch'] = promise['catch'].bind(promise);
1385 this.then(function (result) {
1386 complete(null, result);
1387 }, complete);
1388
1389
1390
1391 if (!db.taskqueue.isReady) {
1392 db.taskqueue.addTask((failed) => {
1393 if (failed) {
1394 opts.complete(failed);
1395 } else if (this.isCancelled) {
1396 this.emit('cancel');
1397 } else {
1398 this.validateChanges(opts);
1399 }
1400 });
1401 } else {
1402 this.validateChanges(opts);
1403 }
1404 }
1405
1406 cancel() {
1407 this.isCancelled = true;
1408 if (this.db.taskqueue.isReady) {
1409 this.emit('cancel');
1410 }
1411 }
1412
1413 validateChanges(opts) {
1414 var callback = opts.complete;
1415
1416 /* istanbul ignore else */
1417 if (PouchDB._changesFilterPlugin) {
1418 PouchDB._changesFilterPlugin.validate(opts, (err) => {
1419 if (err) {
1420 return callback(err);
1421 }
1422 this.doChanges(opts);
1423 });
1424 } else {
1425 this.doChanges(opts);
1426 }
1427 }
1428
1429 doChanges(opts) {
1430 var callback = opts.complete;
1431
1432 opts = clone(opts);
1433 if ('live' in opts && !('continuous' in opts)) {
1434 opts.continuous = opts.live;
1435 }
1436 opts.processChange = processChange;
1437
1438 if (opts.since === 'latest') {
1439 opts.since = 'now';
1440 }
1441 if (!opts.since) {
1442 opts.since = 0;
1443 }
1444 if (opts.since === 'now') {
1445 this.db.info().then((info) => {
1446 /* istanbul ignore if */
1447 if (this.isCancelled) {
1448 callback(null, {status: 'cancelled'});
1449 return;
1450 }
1451 opts.since = info.update_seq;
1452 this.doChanges(opts);
1453 }, callback);
1454 return;
1455 }
1456
1457 /* istanbul ignore else */
1458 if (PouchDB._changesFilterPlugin) {
1459 PouchDB._changesFilterPlugin.normalize(opts);
1460 if (PouchDB._changesFilterPlugin.shouldFilter(this, opts)) {
1461 return PouchDB._changesFilterPlugin.filter(this, opts);
1462 }
1463 } else {
1464 ['doc_ids', 'filter', 'selector', 'view'].forEach(function (key) {
1465 if (key in opts) {
1466 guardedConsole('warn',
1467 'The "' + key + '" option was passed in to changes/replicate, ' +
1468 'but pouchdb-changes-filter plugin is not installed, so it ' +
1469 'was ignored. Please install the plugin to enable filtering.'
1470 );
1471 }
1472 });
1473 }
1474
1475 if (!('descending' in opts)) {
1476 opts.descending = false;
1477 }
1478
1479 // 0 and 1 should return 1 document
1480 opts.limit = opts.limit === 0 ? 1 : opts.limit;
1481 opts.complete = callback;
1482 var newPromise = this.db._changes(opts);
1483 /* istanbul ignore else */
1484 if (newPromise && typeof newPromise.cancel === 'function') {
1485 const cancel = this.cancel;
1486 this.cancel = (...args) => {
1487 newPromise.cancel();
1488 cancel.apply(this, args);
1489 };
1490 }
1491 }
1492}
1493
1494/*
1495 * A generic pouch adapter
1496 */
1497
1498// Wrapper for functions that call the bulkdocs api with a single doc,
1499// if the first result is an error, return an error
1500function yankError(callback, docId) {
1501 return function (err, results) {
1502 if (err || (results[0] && results[0].error)) {
1503 err = err || results[0];
1504 err.docId = docId;
1505 callback(err);
1506 } else {
1507 callback(null, results.length ? results[0] : results);
1508 }
1509 };
1510}
1511
1512// clean docs given to us by the user
1513function cleanDocs(docs) {
1514 for (var i = 0; i < docs.length; i++) {
1515 var doc = docs[i];
1516 if (doc._deleted) {
1517 delete doc._attachments; // ignore atts for deleted docs
1518 } else if (doc._attachments) {
1519 // filter out extraneous keys from _attachments
1520 var atts = Object.keys(doc._attachments);
1521 for (var j = 0; j < atts.length; j++) {
1522 var att = atts[j];
1523 doc._attachments[att] = pick(doc._attachments[att],
1524 ['data', 'digest', 'content_type', 'length', 'revpos', 'stub']);
1525 }
1526 }
1527 }
1528}
1529
1530// compare two docs, first by _id then by _rev
1531function compareByIdThenRev(a, b) {
1532 if (a._id === b._id) {
1533 const aStart = a._revisions ? a._revisions.start : 0;
1534 const bStart = b._revisions ? b._revisions.start : 0;
1535 return aStart - bStart;
1536 }
1537 return a._id < b._id ? -1 : 1;
1538}
1539
1540// for every node in a revision tree computes its distance from the closest
1541// leaf
1542function computeHeight(revs) {
1543 var height = {};
1544 var edges = [];
1545 traverseRevTree(revs, function (isLeaf, pos, id, prnt) {
1546 var rev$$1 = pos + "-" + id;
1547 if (isLeaf) {
1548 height[rev$$1] = 0;
1549 }
1550 if (prnt !== undefined) {
1551 edges.push({from: prnt, to: rev$$1});
1552 }
1553 return rev$$1;
1554 });
1555
1556 edges.reverse();
1557 edges.forEach(function (edge) {
1558 if (height[edge.from] === undefined) {
1559 height[edge.from] = 1 + height[edge.to];
1560 } else {
1561 height[edge.from] = Math.min(height[edge.from], 1 + height[edge.to]);
1562 }
1563 });
1564 return height;
1565}
1566
1567function allDocsKeysParse(opts) {
1568 var keys = ('limit' in opts) ?
1569 opts.keys.slice(opts.skip, opts.limit + opts.skip) :
1570 (opts.skip > 0) ? opts.keys.slice(opts.skip) : opts.keys;
1571 opts.keys = keys;
1572 opts.skip = 0;
1573 delete opts.limit;
1574 if (opts.descending) {
1575 keys.reverse();
1576 opts.descending = false;
1577 }
1578}
1579
1580// all compaction is done in a queue, to avoid attaching
1581// too many listeners at once
1582function doNextCompaction(self) {
1583 var task = self._compactionQueue[0];
1584 var opts = task.opts;
1585 var callback = task.callback;
1586 self.get('_local/compaction').catch(function () {
1587 return false;
1588 }).then(function (doc) {
1589 if (doc && doc.last_seq) {
1590 opts.last_seq = doc.last_seq;
1591 }
1592 self._compact(opts, function (err, res$$1) {
1593 /* istanbul ignore if */
1594 if (err) {
1595 callback(err);
1596 } else {
1597 callback(null, res$$1);
1598 }
1599 nextTick(function () {
1600 self._compactionQueue.shift();
1601 if (self._compactionQueue.length) {
1602 doNextCompaction(self);
1603 }
1604 });
1605 });
1606 });
1607}
1608
1609function appendPurgeSeq(db, docId, rev$$1) {
1610 return db.get('_local/purges').then(function (doc) {
1611 const purgeSeq = doc.purgeSeq + 1;
1612 doc.purges.push({
1613 docId,
1614 rev: rev$$1,
1615 purgeSeq,
1616 });
1617 if (doc.purges.length > self.purged_infos_limit) {
1618 doc.purges.splice(0, doc.purges.length - self.purged_infos_limit);
1619 }
1620 doc.purgeSeq = purgeSeq;
1621 return doc;
1622 }).catch(function (err) {
1623 if (err.status !== 404) {
1624 throw err;
1625 }
1626 return {
1627 _id: '_local/purges',
1628 purges: [{
1629 docId,
1630 rev: rev$$1,
1631 purgeSeq: 0,
1632 }],
1633 purgeSeq: 0,
1634 };
1635 }).then(function (doc) {
1636 return db.put(doc);
1637 });
1638}
1639
1640function attachmentNameError(name) {
1641 if (name.charAt(0) === '_') {
1642 return name + ' is not a valid attachment name, attachment ' +
1643 'names cannot start with \'_\'';
1644 }
1645 return false;
1646}
1647
1648function isNotSingleDoc(doc) {
1649 return doc === null || typeof doc !== 'object' || Array.isArray(doc);
1650}
1651
1652const validRevRegex = /^\d+-[^-]*$/;
1653function isValidRev(rev$$1) {
1654 return typeof rev$$1 === 'string' && validRevRegex.test(rev$$1);
1655}
1656
1657class AbstractPouchDB extends EE {
1658 _setup() {
1659 this.post = adapterFun('post', function (doc, opts, callback) {
1660 if (typeof opts === 'function') {
1661 callback = opts;
1662 opts = {};
1663 }
1664 if (isNotSingleDoc(doc)) {
1665 return callback(createError(NOT_AN_OBJECT));
1666 }
1667 this.bulkDocs({docs: [doc]}, opts, yankError(callback, doc._id));
1668 }).bind(this);
1669
1670 this.put = adapterFun('put', function (doc, opts, cb) {
1671 if (typeof opts === 'function') {
1672 cb = opts;
1673 opts = {};
1674 }
1675 if (isNotSingleDoc(doc)) {
1676 return cb(createError(NOT_AN_OBJECT));
1677 }
1678 invalidIdError(doc._id);
1679 if ('_rev' in doc && !isValidRev(doc._rev)) {
1680 return cb(createError(INVALID_REV));
1681 }
1682 if (isLocalId(doc._id) && typeof this._putLocal === 'function') {
1683 if (doc._deleted) {
1684 return this._removeLocal(doc, cb);
1685 } else {
1686 return this._putLocal(doc, cb);
1687 }
1688 }
1689
1690 const putDoc = (next) => {
1691 if (typeof this._put === 'function' && opts.new_edits !== false) {
1692 this._put(doc, opts, next);
1693 } else {
1694 this.bulkDocs({docs: [doc]}, opts, yankError(next, doc._id));
1695 }
1696 };
1697
1698 if (opts.force && doc._rev) {
1699 transformForceOptionToNewEditsOption();
1700 putDoc(function (err) {
1701 var result = err ? null : {ok: true, id: doc._id, rev: doc._rev};
1702 cb(err, result);
1703 });
1704 } else {
1705 putDoc(cb);
1706 }
1707
1708 function transformForceOptionToNewEditsOption() {
1709 var parts = doc._rev.split('-');
1710 var oldRevId = parts[1];
1711 var oldRevNum = parseInt(parts[0], 10);
1712
1713 var newRevNum = oldRevNum + 1;
1714 var newRevId = rev();
1715
1716 doc._revisions = {
1717 start: newRevNum,
1718 ids: [newRevId, oldRevId]
1719 };
1720 doc._rev = newRevNum + '-' + newRevId;
1721 opts.new_edits = false;
1722 }
1723 }).bind(this);
1724
1725 this.putAttachment = adapterFun('putAttachment', function (docId, attachmentId, rev$$1, blob, type) {
1726 var api = this;
1727 if (typeof type === 'function') {
1728 type = blob;
1729 blob = rev$$1;
1730 rev$$1 = null;
1731 }
1732 // Lets fix in https://github.com/pouchdb/pouchdb/issues/3267
1733 /* istanbul ignore if */
1734 if (typeof type === 'undefined') {
1735 type = blob;
1736 blob = rev$$1;
1737 rev$$1 = null;
1738 }
1739 if (!type) {
1740 guardedConsole('warn', 'Attachment', attachmentId, 'on document', docId, 'is missing content_type');
1741 }
1742
1743 function createAttachment(doc) {
1744 var prevrevpos = '_rev' in doc ? parseInt(doc._rev, 10) : 0;
1745 doc._attachments = doc._attachments || {};
1746 doc._attachments[attachmentId] = {
1747 content_type: type,
1748 data: blob,
1749 revpos: ++prevrevpos
1750 };
1751 return api.put(doc);
1752 }
1753
1754 return api.get(docId).then(function (doc) {
1755 if (doc._rev !== rev$$1) {
1756 throw createError(REV_CONFLICT);
1757 }
1758
1759 return createAttachment(doc);
1760 }, function (err) {
1761 // create new doc
1762 /* istanbul ignore else */
1763 if (err.reason === MISSING_DOC.message) {
1764 return createAttachment({_id: docId});
1765 } else {
1766 throw err;
1767 }
1768 });
1769 }).bind(this);
1770
1771 this.removeAttachment = adapterFun('removeAttachment', function (docId, attachmentId, rev$$1, callback) {
1772 this.get(docId, (err, obj$$1) => {
1773 /* istanbul ignore if */
1774 if (err) {
1775 callback(err);
1776 return;
1777 }
1778 if (obj$$1._rev !== rev$$1) {
1779 callback(createError(REV_CONFLICT));
1780 return;
1781 }
1782 /* istanbul ignore if */
1783 if (!obj$$1._attachments) {
1784 return callback();
1785 }
1786 delete obj$$1._attachments[attachmentId];
1787 if (Object.keys(obj$$1._attachments).length === 0) {
1788 delete obj$$1._attachments;
1789 }
1790 this.put(obj$$1, callback);
1791 });
1792 }).bind(this);
1793
1794 this.remove = adapterFun('remove', function (docOrId, optsOrRev, opts, callback) {
1795 var doc;
1796 if (typeof optsOrRev === 'string') {
1797 // id, rev, opts, callback style
1798 doc = {
1799 _id: docOrId,
1800 _rev: optsOrRev
1801 };
1802 if (typeof opts === 'function') {
1803 callback = opts;
1804 opts = {};
1805 }
1806 } else {
1807 // doc, opts, callback style
1808 doc = docOrId;
1809 if (typeof optsOrRev === 'function') {
1810 callback = optsOrRev;
1811 opts = {};
1812 } else {
1813 callback = opts;
1814 opts = optsOrRev;
1815 }
1816 }
1817 opts = opts || {};
1818 opts.was_delete = true;
1819 var newDoc = {_id: doc._id, _rev: (doc._rev || opts.rev)};
1820 newDoc._deleted = true;
1821 if (isLocalId(newDoc._id) && typeof this._removeLocal === 'function') {
1822 return this._removeLocal(doc, callback);
1823 }
1824 this.bulkDocs({docs: [newDoc]}, opts, yankError(callback, newDoc._id));
1825 }).bind(this);
1826
1827 this.revsDiff = adapterFun('revsDiff', function (req, opts, callback) {
1828 if (typeof opts === 'function') {
1829 callback = opts;
1830 opts = {};
1831 }
1832 var ids = Object.keys(req);
1833
1834 if (!ids.length) {
1835 return callback(null, {});
1836 }
1837
1838 var count = 0;
1839 var missing = new Map();
1840
1841 function addToMissing(id, revId) {
1842 if (!missing.has(id)) {
1843 missing.set(id, {missing: []});
1844 }
1845 missing.get(id).missing.push(revId);
1846 }
1847
1848 function processDoc(id, rev_tree) {
1849 // Is this fast enough? Maybe we should switch to a set simulated by a map
1850 var missingForId = req[id].slice(0);
1851 traverseRevTree(rev_tree, function (isLeaf, pos, revHash, ctx,
1852 opts) {
1853 var rev$$1 = pos + '-' + revHash;
1854 var idx = missingForId.indexOf(rev$$1);
1855 if (idx === -1) {
1856 return;
1857 }
1858
1859 missingForId.splice(idx, 1);
1860 /* istanbul ignore if */
1861 if (opts.status !== 'available') {
1862 addToMissing(id, rev$$1);
1863 }
1864 });
1865
1866 // Traversing the tree is synchronous, so now `missingForId` contains
1867 // revisions that were not found in the tree
1868 missingForId.forEach(function (rev$$1) {
1869 addToMissing(id, rev$$1);
1870 });
1871 }
1872
1873 ids.forEach(function (id) {
1874 this._getRevisionTree(id, function (err, rev_tree) {
1875 if (err && err.status === 404 && err.message === 'missing') {
1876 missing.set(id, {missing: req[id]});
1877 } else if (err) {
1878 /* istanbul ignore next */
1879 return callback(err);
1880 } else {
1881 processDoc(id, rev_tree);
1882 }
1883
1884 if (++count === ids.length) {
1885 // convert LazyMap to object
1886 var missingObj = {};
1887 missing.forEach(function (value, key) {
1888 missingObj[key] = value;
1889 });
1890 return callback(null, missingObj);
1891 }
1892 });
1893 }, this);
1894 }).bind(this);
1895
1896 // _bulk_get API for faster replication, as described in
1897 // https://github.com/apache/couchdb-chttpd/pull/33
1898 // At the "abstract" level, it will just run multiple get()s in
1899 // parallel, because this isn't much of a performance cost
1900 // for local databases (except the cost of multiple transactions, which is
1901 // small). The http adapter overrides this in order
1902 // to do a more efficient single HTTP request.
1903 this.bulkGet = adapterFun('bulkGet', function (opts, callback) {
1904 bulkGet(this, opts, callback);
1905 }).bind(this);
1906
1907 // compact one document and fire callback
1908 // by compacting we mean removing all revisions which
1909 // are further from the leaf in revision tree than max_height
1910 this.compactDocument = adapterFun('compactDocument', function (docId, maxHeight, callback) {
1911 this._getRevisionTree(docId, (err, revTree) => {
1912 /* istanbul ignore if */
1913 if (err) {
1914 return callback(err);
1915 }
1916 var height = computeHeight(revTree);
1917 var candidates = [];
1918 var revs = [];
1919 Object.keys(height).forEach(function (rev$$1) {
1920 if (height[rev$$1] > maxHeight) {
1921 candidates.push(rev$$1);
1922 }
1923 });
1924
1925 traverseRevTree(revTree, function (isLeaf, pos, revHash, ctx, opts) {
1926 var rev$$1 = pos + '-' + revHash;
1927 if (opts.status === 'available' && candidates.indexOf(rev$$1) !== -1) {
1928 revs.push(rev$$1);
1929 }
1930 });
1931 this._doCompaction(docId, revs, callback);
1932 });
1933 }).bind(this);
1934
1935 // compact the whole database using single document
1936 // compaction
1937 this.compact = adapterFun('compact', function (opts, callback) {
1938 if (typeof opts === 'function') {
1939 callback = opts;
1940 opts = {};
1941 }
1942
1943 opts = opts || {};
1944
1945 this._compactionQueue = this._compactionQueue || [];
1946 this._compactionQueue.push({opts, callback});
1947 if (this._compactionQueue.length === 1) {
1948 doNextCompaction(this);
1949 }
1950 }).bind(this);
1951
1952 /* Begin api wrappers. Specific functionality to storage belongs in the _[method] */
1953 this.get = adapterFun('get', function (id, opts, cb) {
1954 if (typeof opts === 'function') {
1955 cb = opts;
1956 opts = {};
1957 }
1958 opts = opts || {};
1959 if (typeof id !== 'string') {
1960 return cb(createError(INVALID_ID));
1961 }
1962 if (isLocalId(id) && typeof this._getLocal === 'function') {
1963 return this._getLocal(id, cb);
1964 }
1965 var leaves = [];
1966
1967 const finishOpenRevs = () => {
1968 var result = [];
1969 var count = leaves.length;
1970 /* istanbul ignore if */
1971 if (!count) {
1972 return cb(null, result);
1973 }
1974
1975 // order with open_revs is unspecified
1976 leaves.forEach((leaf) => {
1977 this.get(id, {
1978 rev: leaf,
1979 revs: opts.revs,
1980 latest: opts.latest,
1981 attachments: opts.attachments,
1982 binary: opts.binary
1983 }, function (err, doc) {
1984 if (!err) {
1985 // using latest=true can produce duplicates
1986 var existing;
1987 for (var i = 0, l = result.length; i < l; i++) {
1988 if (result[i].ok && result[i].ok._rev === doc._rev) {
1989 existing = true;
1990 break;
1991 }
1992 }
1993 if (!existing) {
1994 result.push({ok: doc});
1995 }
1996 } else {
1997 result.push({missing: leaf});
1998 }
1999 count--;
2000 if (!count) {
2001 cb(null, result);
2002 }
2003 });
2004 });
2005 };
2006
2007 if (opts.open_revs) {
2008 if (opts.open_revs === "all") {
2009 this._getRevisionTree(id, function (err, rev_tree) {
2010 /* istanbul ignore if */
2011 if (err) {
2012 return cb(err);
2013 }
2014 leaves = collectLeaves(rev_tree).map(function (leaf) {
2015 return leaf.rev;
2016 });
2017 finishOpenRevs();
2018 });
2019 } else {
2020 if (Array.isArray(opts.open_revs)) {
2021 leaves = opts.open_revs;
2022 for (var i = 0; i < leaves.length; i++) {
2023 var l = leaves[i];
2024 // looks like it's the only thing couchdb checks
2025 if (!isValidRev(l)) {
2026 return cb(createError(INVALID_REV));
2027 }
2028 }
2029 finishOpenRevs();
2030 } else {
2031 return cb(createError(UNKNOWN_ERROR, 'function_clause'));
2032 }
2033 }
2034 return; // open_revs does not like other options
2035 }
2036
2037 return this._get(id, opts, (err, result) => {
2038 if (err) {
2039 err.docId = id;
2040 return cb(err);
2041 }
2042
2043 var doc = result.doc;
2044 var metadata = result.metadata;
2045 var ctx = result.ctx;
2046
2047 if (opts.conflicts) {
2048 var conflicts = collectConflicts(metadata);
2049 if (conflicts.length) {
2050 doc._conflicts = conflicts;
2051 }
2052 }
2053
2054 if (isDeleted(metadata, doc._rev)) {
2055 doc._deleted = true;
2056 }
2057
2058 if (opts.revs || opts.revs_info) {
2059 var splittedRev = doc._rev.split('-');
2060 var revNo = parseInt(splittedRev[0], 10);
2061 var revHash = splittedRev[1];
2062
2063 var paths = rootToLeaf(metadata.rev_tree);
2064 var path$$1 = null;
2065
2066 for (var i = 0; i < paths.length; i++) {
2067 var currentPath = paths[i];
2068 const hashIndex = currentPath.ids.findIndex(x => x.id === revHash);
2069 var hashFoundAtRevPos = hashIndex === (revNo - 1);
2070
2071 if (hashFoundAtRevPos || (!path$$1 && hashIndex !== -1)) {
2072 path$$1 = currentPath;
2073 }
2074 }
2075
2076 /* istanbul ignore if */
2077 if (!path$$1) {
2078 err = new Error('invalid rev tree');
2079 err.docId = id;
2080 return cb(err);
2081 }
2082
2083 const pathId = doc._rev.split('-')[1];
2084 const indexOfRev = path$$1.ids.findIndex(x => x.id === pathId) + 1;
2085 var howMany = path$$1.ids.length - indexOfRev;
2086 path$$1.ids.splice(indexOfRev, howMany);
2087 path$$1.ids.reverse();
2088
2089 if (opts.revs) {
2090 doc._revisions = {
2091 start: (path$$1.pos + path$$1.ids.length) - 1,
2092 ids: path$$1.ids.map(function (rev$$1) {
2093 return rev$$1.id;
2094 })
2095 };
2096 }
2097 if (opts.revs_info) {
2098 var pos = path$$1.pos + path$$1.ids.length;
2099 doc._revs_info = path$$1.ids.map(function (rev$$1) {
2100 pos--;
2101 return {
2102 rev: pos + '-' + rev$$1.id,
2103 status: rev$$1.opts.status
2104 };
2105 });
2106 }
2107 }
2108
2109 if (opts.attachments && doc._attachments) {
2110 var attachments = doc._attachments;
2111 var count = Object.keys(attachments).length;
2112 if (count === 0) {
2113 return cb(null, doc);
2114 }
2115 Object.keys(attachments).forEach((key) => {
2116 this._getAttachment(doc._id, key, attachments[key], {
2117 binary: opts.binary,
2118 metadata,
2119 ctx
2120 }, function (err, data) {
2121 var att = doc._attachments[key];
2122 att.data = data;
2123 delete att.stub;
2124 delete att.length;
2125 if (!--count) {
2126 cb(null, doc);
2127 }
2128 });
2129 });
2130 } else {
2131 if (doc._attachments) {
2132 for (var key in doc._attachments) {
2133 /* istanbul ignore else */
2134 if (Object.prototype.hasOwnProperty.call(doc._attachments, key)) {
2135 doc._attachments[key].stub = true;
2136 }
2137 }
2138 }
2139 cb(null, doc);
2140 }
2141 });
2142 }).bind(this);
2143
2144 // TODO: I don't like this, it forces an extra read for every
2145 // attachment read and enforces a confusing api between
2146 // adapter.js and the adapter implementation
2147 this.getAttachment = adapterFun('getAttachment', function (docId, attachmentId, opts, callback) {
2148 if (opts instanceof Function) {
2149 callback = opts;
2150 opts = {};
2151 }
2152 this._get(docId, opts, (err, res$$1) => {
2153 if (err) {
2154 return callback(err);
2155 }
2156 if (res$$1.doc._attachments && res$$1.doc._attachments[attachmentId]) {
2157 opts.ctx = res$$1.ctx;
2158 opts.binary = true;
2159 opts.metadata = res$$1.metadata;
2160 this._getAttachment(docId, attachmentId,
2161 res$$1.doc._attachments[attachmentId], opts, callback);
2162 } else {
2163 return callback(createError(MISSING_DOC));
2164 }
2165 });
2166 }).bind(this);
2167
2168 this.allDocs = adapterFun('allDocs', function (opts, callback) {
2169 if (typeof opts === 'function') {
2170 callback = opts;
2171 opts = {};
2172 }
2173 opts.skip = typeof opts.skip !== 'undefined' ? opts.skip : 0;
2174 if (opts.start_key) {
2175 opts.startkey = opts.start_key;
2176 }
2177 if (opts.end_key) {
2178 opts.endkey = opts.end_key;
2179 }
2180 if ('keys' in opts) {
2181 if (!Array.isArray(opts.keys)) {
2182 return callback(new TypeError('options.keys must be an array'));
2183 }
2184 var incompatibleOpt =
2185 ['startkey', 'endkey', 'key'].filter(function (incompatibleOpt) {
2186 return incompatibleOpt in opts;
2187 })[0];
2188 if (incompatibleOpt) {
2189 callback(createError(QUERY_PARSE_ERROR,
2190 'Query parameter `' + incompatibleOpt +
2191 '` is not compatible with multi-get'
2192 ));
2193 return;
2194 }
2195 if (!isRemote(this)) {
2196 allDocsKeysParse(opts);
2197 if (opts.keys.length === 0) {
2198 return this._allDocs({limit: 0}, callback);
2199 }
2200 }
2201 }
2202
2203 return this._allDocs(opts, callback);
2204 }).bind(this);
2205
2206 this.close = adapterFun('close', function (callback) {
2207 this._closed = true;
2208 this.emit('closed');
2209 return this._close(callback);
2210 }).bind(this);
2211
2212 this.info = adapterFun('info', function (callback) {
2213 this._info((err, info) => {
2214 if (err) {
2215 return callback(err);
2216 }
2217 // assume we know better than the adapter, unless it informs us
2218 info.db_name = info.db_name || this.name;
2219 info.auto_compaction = !!(this.auto_compaction && !isRemote(this));
2220 info.adapter = this.adapter;
2221 callback(null, info);
2222 });
2223 }).bind(this);
2224
2225 this.id = adapterFun('id', function (callback) {
2226 return this._id(callback);
2227 }).bind(this);
2228
2229 this.bulkDocs = adapterFun('bulkDocs', function (req, opts, callback) {
2230 if (typeof opts === 'function') {
2231 callback = opts;
2232 opts = {};
2233 }
2234
2235 opts = opts || {};
2236
2237 if (Array.isArray(req)) {
2238 req = {
2239 docs: req
2240 };
2241 }
2242
2243 if (!req || !req.docs || !Array.isArray(req.docs)) {
2244 return callback(createError(MISSING_BULK_DOCS));
2245 }
2246
2247 for (var i = 0; i < req.docs.length; ++i) {
2248 const doc = req.docs[i];
2249 if (isNotSingleDoc(doc)) {
2250 return callback(createError(NOT_AN_OBJECT));
2251 }
2252 if ('_rev' in doc && !isValidRev(doc._rev)) {
2253 return callback(createError(INVALID_REV));
2254 }
2255 }
2256
2257 var attachmentError;
2258 req.docs.forEach(function (doc) {
2259 if (doc._attachments) {
2260 Object.keys(doc._attachments).forEach(function (name) {
2261 attachmentError = attachmentError || attachmentNameError(name);
2262 if (!doc._attachments[name].content_type) {
2263 guardedConsole('warn', 'Attachment', name, 'on document', doc._id, 'is missing content_type');
2264 }
2265 });
2266 }
2267 });
2268
2269 if (attachmentError) {
2270 return callback(createError(BAD_REQUEST, attachmentError));
2271 }
2272
2273 if (!('new_edits' in opts)) {
2274 if ('new_edits' in req) {
2275 opts.new_edits = req.new_edits;
2276 } else {
2277 opts.new_edits = true;
2278 }
2279 }
2280
2281 var adapter = this;
2282 if (!opts.new_edits && !isRemote(adapter)) {
2283 // ensure revisions of the same doc are sorted, so that
2284 // the local adapter processes them correctly (#2935)
2285 req.docs.sort(compareByIdThenRev);
2286 }
2287
2288 cleanDocs(req.docs);
2289
2290 // in the case of conflicts, we want to return the _ids to the user
2291 // however, the underlying adapter may destroy the docs array, so
2292 // create a copy here
2293 var ids = req.docs.map(function (doc) {
2294 return doc._id;
2295 });
2296
2297 this._bulkDocs(req, opts, function (err, res$$1) {
2298 if (err) {
2299 return callback(err);
2300 }
2301 if (!opts.new_edits) {
2302 // this is what couch does when new_edits is false
2303 res$$1 = res$$1.filter(function (x) {
2304 return x.error;
2305 });
2306 }
2307 // add ids for error/conflict responses (not required for CouchDB)
2308 if (!isRemote(adapter)) {
2309 for (var i = 0, l = res$$1.length; i < l; i++) {
2310 res$$1[i].id = res$$1[i].id || ids[i];
2311 }
2312 }
2313
2314 callback(null, res$$1);
2315 });
2316 }).bind(this);
2317
2318 this.registerDependentDatabase = adapterFun('registerDependentDatabase', function (dependentDb, callback) {
2319 var dbOptions = clone(this.__opts);
2320 if (this.__opts.view_adapter) {
2321 dbOptions.adapter = this.__opts.view_adapter;
2322 }
2323
2324 var depDB = new this.constructor(dependentDb, dbOptions);
2325
2326 function diffFun(doc) {
2327 doc.dependentDbs = doc.dependentDbs || {};
2328 if (doc.dependentDbs[dependentDb]) {
2329 return false; // no update required
2330 }
2331 doc.dependentDbs[dependentDb] = true;
2332 return doc;
2333 }
2334 upsert(this, '_local/_pouch_dependentDbs', diffFun).then(function () {
2335 callback(null, {db: depDB});
2336 }).catch(callback);
2337 }).bind(this);
2338
2339 this.destroy = adapterFun('destroy', function (opts, callback) {
2340
2341 if (typeof opts === 'function') {
2342 callback = opts;
2343 opts = {};
2344 }
2345
2346 var usePrefix = 'use_prefix' in this ? this.use_prefix : true;
2347
2348 const destroyDb = () => {
2349 // call destroy method of the particular adaptor
2350 this._destroy(opts, (err, resp) => {
2351 if (err) {
2352 return callback(err);
2353 }
2354 this._destroyed = true;
2355 this.emit('destroyed');
2356 callback(null, resp || { 'ok': true });
2357 });
2358 };
2359
2360 if (isRemote(this)) {
2361 // no need to check for dependent DBs if it's a remote DB
2362 return destroyDb();
2363 }
2364
2365 this.get('_local/_pouch_dependentDbs', (err, localDoc) => {
2366 if (err) {
2367 /* istanbul ignore if */
2368 if (err.status !== 404) {
2369 return callback(err);
2370 } else { // no dependencies
2371 return destroyDb();
2372 }
2373 }
2374 var dependentDbs = localDoc.dependentDbs;
2375 var PouchDB = this.constructor;
2376 var deletedMap = Object.keys(dependentDbs).map((name) => {
2377 // use_prefix is only false in the browser
2378 /* istanbul ignore next */
2379 var trueName = usePrefix ?
2380 name.replace(new RegExp('^' + PouchDB.prefix), '') : name;
2381 return new PouchDB(trueName, this.__opts).destroy();
2382 });
2383 Promise.all(deletedMap).then(destroyDb, callback);
2384 });
2385 }).bind(this);
2386 }
2387
2388 _compact(opts, callback) {
2389 var changesOpts = {
2390 return_docs: false,
2391 last_seq: opts.last_seq || 0,
2392 since: opts.last_seq || 0
2393 };
2394 var promises = [];
2395
2396 var taskId;
2397 var compactedDocs = 0;
2398
2399 const onChange = (row) => {
2400 this.activeTasks.update(taskId, {
2401 completed_items: ++compactedDocs
2402 });
2403 promises.push(this.compactDocument(row.id, 0));
2404 };
2405 const onError = (err) => {
2406 this.activeTasks.remove(taskId, err);
2407 callback(err);
2408 };
2409 const onComplete = (resp) => {
2410 var lastSeq = resp.last_seq;
2411 Promise.all(promises).then(() => {
2412 return upsert(this, '_local/compaction', (doc) => {
2413 if (!doc.last_seq || doc.last_seq < lastSeq) {
2414 doc.last_seq = lastSeq;
2415 return doc;
2416 }
2417 return false; // somebody else got here first, don't update
2418 });
2419 }).then(() => {
2420 this.activeTasks.remove(taskId);
2421 callback(null, {ok: true});
2422 }).catch(onError);
2423 };
2424
2425 this.info().then((info) => {
2426 taskId = this.activeTasks.add({
2427 name: 'database_compaction',
2428 total_items: info.update_seq - changesOpts.last_seq,
2429 });
2430
2431 this.changes(changesOpts)
2432 .on('change', onChange)
2433 .on('complete', onComplete)
2434 .on('error', onError);
2435 });
2436 }
2437
2438 changes(opts, callback) {
2439 if (typeof opts === 'function') {
2440 callback = opts;
2441 opts = {};
2442 }
2443
2444 opts = opts || {};
2445
2446 // By default set return_docs to false if the caller has opts.live = true,
2447 // this will prevent us from collecting the set of changes indefinitely
2448 // resulting in growing memory
2449 opts.return_docs = ('return_docs' in opts) ? opts.return_docs : !opts.live;
2450
2451 return new Changes$1(this, opts, callback);
2452 }
2453
2454 type() {
2455 return (typeof this._type === 'function') ? this._type() : this.adapter;
2456 }
2457}
2458
2459// The abstract purge implementation expects a doc id and the rev of a leaf node in that doc.
2460// It will return errors if the rev doesn’t exist or isn’t a leaf.
2461AbstractPouchDB.prototype.purge = adapterFun('_purge', function (docId, rev$$1, callback) {
2462 if (typeof this._purge === 'undefined') {
2463 return callback(createError(UNKNOWN_ERROR, 'Purge is not implemented in the ' + this.adapter + ' adapter.'));
2464 }
2465 var self = this;
2466
2467 self._getRevisionTree(docId, (error, revs) => {
2468 if (error) {
2469 return callback(error);
2470 }
2471 if (!revs) {
2472 return callback(createError(MISSING_DOC));
2473 }
2474 let path$$1;
2475 try {
2476 path$$1 = findPathToLeaf(revs, rev$$1);
2477 } catch (error) {
2478 return callback(error.message || error);
2479 }
2480 self._purge(docId, path$$1, (error, result) => {
2481 if (error) {
2482 return callback(error);
2483 } else {
2484 appendPurgeSeq(self, docId, rev$$1).then(function () {
2485 return callback(null, result);
2486 });
2487 }
2488 });
2489 });
2490});
2491
2492class TaskQueue {
2493 constructor() {
2494 this.isReady = false;
2495 this.failed = false;
2496 this.queue = [];
2497 }
2498
2499 execute() {
2500 var fun;
2501 if (this.failed) {
2502 while ((fun = this.queue.shift())) {
2503 fun(this.failed);
2504 }
2505 } else {
2506 while ((fun = this.queue.shift())) {
2507 fun();
2508 }
2509 }
2510 }
2511
2512 fail(err) {
2513 this.failed = err;
2514 this.execute();
2515 }
2516
2517 ready(db) {
2518 this.isReady = true;
2519 this.db = db;
2520 this.execute();
2521 }
2522
2523 addTask(fun) {
2524 this.queue.push(fun);
2525 if (this.failed) {
2526 this.execute();
2527 }
2528 }
2529}
2530
2531function parseAdapter(name, opts) {
2532 var match = name.match(/([a-z-]*):\/\/(.*)/);
2533 if (match) {
2534 // the http adapter expects the fully qualified name
2535 return {
2536 name: /https?/.test(match[1]) ? match[1] + '://' + match[2] : match[2],
2537 adapter: match[1]
2538 };
2539 }
2540
2541 var adapters = PouchDB.adapters;
2542 var preferredAdapters = PouchDB.preferredAdapters;
2543 var prefix = PouchDB.prefix;
2544 var adapterName = opts.adapter;
2545
2546 if (!adapterName) { // automatically determine adapter
2547 for (var i = 0; i < preferredAdapters.length; ++i) {
2548 adapterName = preferredAdapters[i];
2549 // check for browsers that have been upgraded from websql-only to websql+idb
2550 /* istanbul ignore if */
2551 if (adapterName === 'idb' && 'websql' in adapters &&
2552 hasLocalStorage() && localStorage['_pouch__websqldb_' + prefix + name]) {
2553 // log it, because this can be confusing during development
2554 guardedConsole('log', 'PouchDB is downgrading "' + name + '" to WebSQL to' +
2555 ' avoid data loss, because it was already opened with WebSQL.');
2556 continue; // keep using websql to avoid user data loss
2557 }
2558 break;
2559 }
2560 }
2561
2562 var adapter = adapters[adapterName];
2563
2564 // if adapter is invalid, then an error will be thrown later
2565 var usePrefix = (adapter && 'use_prefix' in adapter) ?
2566 adapter.use_prefix : true;
2567
2568 return {
2569 name: usePrefix ? (prefix + name) : name,
2570 adapter: adapterName
2571 };
2572}
2573
2574function inherits(A, B) {
2575 A.prototype = Object.create(B.prototype, {
2576 constructor: { value: A }
2577 });
2578}
2579
2580function createClass(parent, init) {
2581 let klass = function (...args) {
2582 if (!(this instanceof klass)) {
2583 return new klass(...args);
2584 }
2585 init.apply(this, args);
2586 };
2587 inherits(klass, parent);
2588 return klass;
2589}
2590
2591// OK, so here's the deal. Consider this code:
2592// var db1 = new PouchDB('foo');
2593// var db2 = new PouchDB('foo');
2594// db1.destroy();
2595// ^ these two both need to emit 'destroyed' events,
2596// as well as the PouchDB constructor itself.
2597// So we have one db object (whichever one got destroy() called on it)
2598// responsible for emitting the initial event, which then gets emitted
2599// by the constructor, which then broadcasts it to any other dbs
2600// that may have been created with the same name.
2601function prepareForDestruction(self) {
2602
2603 function onDestroyed(from_constructor) {
2604 self.removeListener('closed', onClosed);
2605 if (!from_constructor) {
2606 self.constructor.emit('destroyed', self.name);
2607 }
2608 }
2609
2610 function onClosed() {
2611 self.removeListener('destroyed', onDestroyed);
2612 self.constructor.emit('unref', self);
2613 }
2614
2615 self.once('destroyed', onDestroyed);
2616 self.once('closed', onClosed);
2617 self.constructor.emit('ref', self);
2618}
2619
2620class PouchInternal extends AbstractPouchDB {
2621 constructor(name, opts) {
2622 super();
2623 this._setup(name, opts);
2624 }
2625
2626 _setup(name, opts) {
2627 super._setup();
2628 opts = opts || {};
2629
2630 if (name && typeof name === 'object') {
2631 opts = name;
2632 name = opts.name;
2633 delete opts.name;
2634 }
2635
2636 if (opts.deterministic_revs === undefined) {
2637 opts.deterministic_revs = true;
2638 }
2639
2640 this.__opts = opts = clone(opts);
2641
2642 this.auto_compaction = opts.auto_compaction;
2643 this.purged_infos_limit = opts.purged_infos_limit || 1000;
2644 this.prefix = PouchDB.prefix;
2645
2646 if (typeof name !== 'string') {
2647 throw new Error('Missing/invalid DB name');
2648 }
2649
2650 var prefixedName = (opts.prefix || '') + name;
2651 var backend = parseAdapter(prefixedName, opts);
2652
2653 opts.name = backend.name;
2654 opts.adapter = opts.adapter || backend.adapter;
2655
2656 this.name = name;
2657 this._adapter = opts.adapter;
2658 PouchDB.emit('debug', ['adapter', 'Picked adapter: ', opts.adapter]);
2659
2660 if (!PouchDB.adapters[opts.adapter] ||
2661 !PouchDB.adapters[opts.adapter].valid()) {
2662 throw new Error('Invalid Adapter: ' + opts.adapter);
2663 }
2664
2665 if (opts.view_adapter) {
2666 if (!PouchDB.adapters[opts.view_adapter] ||
2667 !PouchDB.adapters[opts.view_adapter].valid()) {
2668 throw new Error('Invalid View Adapter: ' + opts.view_adapter);
2669 }
2670 }
2671
2672 this.taskqueue = new TaskQueue();
2673
2674 this.adapter = opts.adapter;
2675
2676 PouchDB.adapters[opts.adapter].call(this, opts, (err) => {
2677 if (err) {
2678 return this.taskqueue.fail(err);
2679 }
2680 prepareForDestruction(this);
2681
2682 this.emit('created', this);
2683 PouchDB.emit('created', this.name);
2684 this.taskqueue.ready(this);
2685 });
2686 }
2687}
2688
2689const PouchDB = createClass(PouchInternal, function (name, opts) {
2690 PouchInternal.prototype._setup.call(this, name, opts);
2691});
2692
2693var fetch = fetchCookie(nodeFetch);
2694
2695class ActiveTasks {
2696 constructor() {
2697 this.tasks = {};
2698 }
2699
2700 list() {
2701 return Object.values(this.tasks);
2702 }
2703
2704 add(task) {
2705 const id = v4();
2706 this.tasks[id] = {
2707 id,
2708 name: task.name,
2709 total_items: task.total_items,
2710 created_at: new Date().toJSON()
2711 };
2712 return id;
2713 }
2714
2715 get(id) {
2716 return this.tasks[id];
2717 }
2718
2719 /* eslint-disable no-unused-vars */
2720 remove(id, reason) {
2721 delete this.tasks[id];
2722 return this.tasks;
2723 }
2724
2725 update(id, updatedTask) {
2726 const task = this.tasks[id];
2727 if (typeof task !== 'undefined') {
2728 const mergedTask = {
2729 id: task.id,
2730 name: task.name,
2731 created_at: task.created_at,
2732 total_items: updatedTask.total_items || task.total_items,
2733 completed_items: updatedTask.completed_items || task.completed_items,
2734 updated_at: new Date().toJSON()
2735 };
2736 this.tasks[id] = mergedTask;
2737 }
2738 return this.tasks;
2739 }
2740}
2741
2742PouchDB.adapters = {};
2743PouchDB.preferredAdapters = [];
2744
2745PouchDB.prefix = '_pouch_';
2746
2747var eventEmitter = new EE();
2748
2749function setUpEventEmitter(Pouch) {
2750 Object.keys(EE.prototype).forEach(function (key) {
2751 if (typeof EE.prototype[key] === 'function') {
2752 Pouch[key] = eventEmitter[key].bind(eventEmitter);
2753 }
2754 });
2755
2756 // these are created in constructor.js, and allow us to notify each DB with
2757 // the same name that it was destroyed, via the constructor object
2758 var destructListeners = Pouch._destructionListeners = new Map();
2759
2760 Pouch.on('ref', function onConstructorRef(db) {
2761 if (!destructListeners.has(db.name)) {
2762 destructListeners.set(db.name, []);
2763 }
2764 destructListeners.get(db.name).push(db);
2765 });
2766
2767 Pouch.on('unref', function onConstructorUnref(db) {
2768 if (!destructListeners.has(db.name)) {
2769 return;
2770 }
2771 var dbList = destructListeners.get(db.name);
2772 var pos = dbList.indexOf(db);
2773 if (pos < 0) {
2774 /* istanbul ignore next */
2775 return;
2776 }
2777 dbList.splice(pos, 1);
2778 if (dbList.length > 1) {
2779 /* istanbul ignore next */
2780 destructListeners.set(db.name, dbList);
2781 } else {
2782 destructListeners.delete(db.name);
2783 }
2784 });
2785
2786 Pouch.on('destroyed', function onConstructorDestroyed(name) {
2787 if (!destructListeners.has(name)) {
2788 return;
2789 }
2790 var dbList = destructListeners.get(name);
2791 destructListeners.delete(name);
2792 dbList.forEach(function (db) {
2793 db.emit('destroyed',true);
2794 });
2795 });
2796}
2797
2798setUpEventEmitter(PouchDB);
2799
2800PouchDB.adapter = function (id, obj$$1, addToPreferredAdapters) {
2801 /* istanbul ignore else */
2802 if (obj$$1.valid()) {
2803 PouchDB.adapters[id] = obj$$1;
2804 if (addToPreferredAdapters) {
2805 PouchDB.preferredAdapters.push(id);
2806 }
2807 }
2808};
2809
2810PouchDB.plugin = function (obj$$1) {
2811 if (typeof obj$$1 === 'function') { // function style for plugins
2812 obj$$1(PouchDB);
2813 } else if (typeof obj$$1 !== 'object' || Object.keys(obj$$1).length === 0) {
2814 throw new Error('Invalid plugin: got "' + obj$$1 + '", expected an object or a function');
2815 } else {
2816 Object.keys(obj$$1).forEach(function (id) { // object style for plugins
2817 PouchDB.prototype[id] = obj$$1[id];
2818 });
2819 }
2820 if (this.__defaults) {
2821 PouchDB.__defaults = Object.assign({}, this.__defaults);
2822 }
2823 return PouchDB;
2824};
2825
2826PouchDB.defaults = function (defaultOpts) {
2827 let PouchWithDefaults = createClass(PouchDB, function (name, opts) {
2828 opts = opts || {};
2829
2830 if (name && typeof name === 'object') {
2831 opts = name;
2832 name = opts.name;
2833 delete opts.name;
2834 }
2835
2836 opts = Object.assign({}, PouchWithDefaults.__defaults, opts);
2837 PouchDB.call(this, name, opts);
2838 });
2839
2840 PouchWithDefaults.preferredAdapters = PouchDB.preferredAdapters.slice();
2841 Object.keys(PouchDB).forEach(function (key) {
2842 if (!(key in PouchWithDefaults)) {
2843 PouchWithDefaults[key] = PouchDB[key];
2844 }
2845 });
2846
2847 // make default options transitive
2848 // https://github.com/pouchdb/pouchdb/issues/5922
2849 PouchWithDefaults.__defaults = Object.assign({}, this.__defaults, defaultOpts);
2850
2851 return PouchWithDefaults;
2852};
2853
2854PouchDB.fetch = function (url, opts) {
2855 return fetch(url, opts);
2856};
2857
2858PouchDB.prototype.activeTasks = PouchDB.activeTasks = new ActiveTasks();
2859
2860// managed automatically by set-version.js
2861var version = "9.0.0";
2862
2863// this would just be "return doc[field]", but fields
2864// can be "deep" due to dot notation
2865function getFieldFromDoc(doc, parsedField) {
2866 var value = doc;
2867 for (var i = 0, len = parsedField.length; i < len; i++) {
2868 var key = parsedField[i];
2869 value = value[key];
2870 if (!value) {
2871 break;
2872 }
2873 }
2874 return value;
2875}
2876
2877function compare(left, right) {
2878 return left < right ? -1 : left > right ? 1 : 0;
2879}
2880
2881// Converts a string in dot notation to an array of its components, with backslash escaping
2882function parseField(fieldName) {
2883 // fields may be deep (e.g. "foo.bar.baz"), so parse
2884 var fields = [];
2885 var current = '';
2886 for (var i = 0, len = fieldName.length; i < len; i++) {
2887 var ch = fieldName[i];
2888 if (i > 0 && fieldName[i - 1] === '\\' && (ch === '$' || ch === '.')) {
2889 // escaped delimiter
2890 current = current.substring(0, current.length - 1) + ch;
2891 } else if (ch === '.') {
2892 // When `.` is not escaped (above), it is a field delimiter
2893 fields.push(current);
2894 current = '';
2895 } else { // normal character
2896 current += ch;
2897 }
2898 }
2899 fields.push(current);
2900 return fields;
2901}
2902
2903var combinationFields = ['$or', '$nor', '$not'];
2904function isCombinationalField(field) {
2905 return combinationFields.indexOf(field) > -1;
2906}
2907
2908function getKey(obj$$1) {
2909 return Object.keys(obj$$1)[0];
2910}
2911
2912function getValue(obj$$1) {
2913 return obj$$1[getKey(obj$$1)];
2914}
2915
2916
2917// flatten an array of selectors joined by an $and operator
2918function mergeAndedSelectors(selectors) {
2919
2920 // sort to ensure that e.g. if the user specified
2921 // $and: [{$gt: 'a'}, {$gt: 'b'}], then it's collapsed into
2922 // just {$gt: 'b'}
2923 var res$$1 = {};
2924 var first = {$or: true, $nor: true};
2925
2926 selectors.forEach(function (selector) {
2927 Object.keys(selector).forEach(function (field) {
2928 var matcher = selector[field];
2929 if (typeof matcher !== 'object') {
2930 matcher = {$eq: matcher};
2931 }
2932
2933 if (isCombinationalField(field)) {
2934 // or, nor
2935 if (matcher instanceof Array) {
2936 if (first[field]) {
2937 first[field] = false;
2938 res$$1[field] = matcher;
2939 return;
2940 }
2941
2942 var entries = [];
2943 res$$1[field].forEach(function (existing) {
2944 Object.keys(matcher).forEach(function (key) {
2945 var m = matcher[key];
2946 var longest = Math.max(Object.keys(existing).length, Object.keys(m).length);
2947 var merged = mergeAndedSelectors([existing, m]);
2948 if (Object.keys(merged).length <= longest) {
2949 // we have a situation like: (a :{$eq :1} || ...) && (a {$eq: 2} || ...)
2950 // merging would produce a $eq 2 when actually we shouldn't ever match against these merged conditions
2951 // merged should always contain more values to be valid
2952 return;
2953 }
2954 entries.push(merged);
2955 });
2956 });
2957 res$$1[field] = entries;
2958 } else {
2959 // not
2960 res$$1[field] = mergeAndedSelectors([matcher]);
2961 }
2962 } else {
2963 var fieldMatchers = res$$1[field] = res$$1[field] || {};
2964 Object.keys(matcher).forEach(function (operator) {
2965 var value = matcher[operator];
2966
2967 if (operator === '$gt' || operator === '$gte') {
2968 return mergeGtGte(operator, value, fieldMatchers);
2969 } else if (operator === '$lt' || operator === '$lte') {
2970 return mergeLtLte(operator, value, fieldMatchers);
2971 } else if (operator === '$ne') {
2972 return mergeNe(value, fieldMatchers);
2973 } else if (operator === '$eq') {
2974 return mergeEq(value, fieldMatchers);
2975 } else if (operator === "$regex") {
2976 return mergeRegex(value, fieldMatchers);
2977 }
2978 fieldMatchers[operator] = value;
2979 });
2980 }
2981 });
2982 });
2983
2984 return res$$1;
2985}
2986
2987
2988
2989// collapse logically equivalent gt/gte values
2990function mergeGtGte(operator, value, fieldMatchers) {
2991 if (typeof fieldMatchers.$eq !== 'undefined') {
2992 return; // do nothing
2993 }
2994 if (typeof fieldMatchers.$gte !== 'undefined') {
2995 if (operator === '$gte') {
2996 if (value > fieldMatchers.$gte) { // more specificity
2997 fieldMatchers.$gte = value;
2998 }
2999 } else { // operator === '$gt'
3000 if (value >= fieldMatchers.$gte) { // more specificity
3001 delete fieldMatchers.$gte;
3002 fieldMatchers.$gt = value;
3003 }
3004 }
3005 } else if (typeof fieldMatchers.$gt !== 'undefined') {
3006 if (operator === '$gte') {
3007 if (value > fieldMatchers.$gt) { // more specificity
3008 delete fieldMatchers.$gt;
3009 fieldMatchers.$gte = value;
3010 }
3011 } else { // operator === '$gt'
3012 if (value > fieldMatchers.$gt) { // more specificity
3013 fieldMatchers.$gt = value;
3014 }
3015 }
3016 } else {
3017 fieldMatchers[operator] = value;
3018 }
3019}
3020
3021// collapse logically equivalent lt/lte values
3022function mergeLtLte(operator, value, fieldMatchers) {
3023 if (typeof fieldMatchers.$eq !== 'undefined') {
3024 return; // do nothing
3025 }
3026 if (typeof fieldMatchers.$lte !== 'undefined') {
3027 if (operator === '$lte') {
3028 if (value < fieldMatchers.$lte) { // more specificity
3029 fieldMatchers.$lte = value;
3030 }
3031 } else { // operator === '$gt'
3032 if (value <= fieldMatchers.$lte) { // more specificity
3033 delete fieldMatchers.$lte;
3034 fieldMatchers.$lt = value;
3035 }
3036 }
3037 } else if (typeof fieldMatchers.$lt !== 'undefined') {
3038 if (operator === '$lte') {
3039 if (value < fieldMatchers.$lt) { // more specificity
3040 delete fieldMatchers.$lt;
3041 fieldMatchers.$lte = value;
3042 }
3043 } else { // operator === '$gt'
3044 if (value < fieldMatchers.$lt) { // more specificity
3045 fieldMatchers.$lt = value;
3046 }
3047 }
3048 } else {
3049 fieldMatchers[operator] = value;
3050 }
3051}
3052
3053// combine $ne values into one array
3054function mergeNe(value, fieldMatchers) {
3055 if ('$ne' in fieldMatchers) {
3056 // there are many things this could "not" be
3057 fieldMatchers.$ne.push(value);
3058 } else { // doesn't exist yet
3059 fieldMatchers.$ne = [value];
3060 }
3061}
3062
3063// add $eq into the mix
3064function mergeEq(value, fieldMatchers) {
3065 // these all have less specificity than the $eq
3066 // TODO: check for user errors here
3067 delete fieldMatchers.$gt;
3068 delete fieldMatchers.$gte;
3069 delete fieldMatchers.$lt;
3070 delete fieldMatchers.$lte;
3071 delete fieldMatchers.$ne;
3072 fieldMatchers.$eq = value;
3073}
3074
3075// combine $regex values into one array
3076function mergeRegex(value, fieldMatchers) {
3077 if ('$regex' in fieldMatchers) {
3078 // a value could match multiple regexes
3079 fieldMatchers.$regex.push(value);
3080 } else { // doesn't exist yet
3081 fieldMatchers.$regex = [value];
3082 }
3083}
3084
3085//#7458: execute function mergeAndedSelectors on nested $and
3086function mergeAndedSelectorsNested(obj$$1) {
3087 for (var prop in obj$$1) {
3088 if (Array.isArray(obj$$1)) {
3089 for (var i in obj$$1) {
3090 if (obj$$1[i]['$and']) {
3091 obj$$1[i] = mergeAndedSelectors(obj$$1[i]['$and']);
3092 }
3093 }
3094 }
3095 var value = obj$$1[prop];
3096 if (typeof value === 'object') {
3097 mergeAndedSelectorsNested(value); // <- recursive call
3098 }
3099 }
3100 return obj$$1;
3101}
3102
3103//#7458: determine id $and is present in selector (at any level)
3104function isAndInSelector(obj$$1, isAnd) {
3105 for (var prop in obj$$1) {
3106 if (prop === '$and') {
3107 isAnd = true;
3108 }
3109 var value = obj$$1[prop];
3110 if (typeof value === 'object') {
3111 isAnd = isAndInSelector(value, isAnd); // <- recursive call
3112 }
3113 }
3114 return isAnd;
3115}
3116
3117//
3118// normalize the selector
3119//
3120function massageSelector(input) {
3121 var result = clone(input);
3122
3123 //#7458: if $and is present in selector (at any level) merge nested $and
3124 if (isAndInSelector(result, false)) {
3125 result = mergeAndedSelectorsNested(result);
3126 if ('$and' in result) {
3127 result = mergeAndedSelectors(result['$and']);
3128 }
3129 }
3130
3131 ['$or', '$nor'].forEach(function (orOrNor) {
3132 if (orOrNor in result) {
3133 // message each individual selector
3134 // e.g. {foo: 'bar'} becomes {foo: {$eq: 'bar'}}
3135 result[orOrNor].forEach(function (subSelector) {
3136 var fields = Object.keys(subSelector);
3137 for (var i = 0; i < fields.length; i++) {
3138 var field = fields[i];
3139 var matcher = subSelector[field];
3140 if (typeof matcher !== 'object' || matcher === null) {
3141 subSelector[field] = {$eq: matcher};
3142 }
3143 }
3144 });
3145 }
3146 });
3147
3148 if ('$not' in result) {
3149 //This feels a little like forcing, but it will work for now,
3150 //I would like to come back to this and make the merging of selectors a little more generic
3151 result['$not'] = mergeAndedSelectors([result['$not']]);
3152 }
3153
3154 var fields = Object.keys(result);
3155
3156 for (var i = 0; i < fields.length; i++) {
3157 var field = fields[i];
3158 var matcher = result[field];
3159
3160 if (typeof matcher !== 'object' || matcher === null) {
3161 matcher = {$eq: matcher};
3162 }
3163 result[field] = matcher;
3164 }
3165
3166 normalizeArrayOperators(result);
3167
3168 return result;
3169}
3170
3171//
3172// The $ne and $regex values must be placed in an array because these operators can be used multiple times on the same field.
3173// When $and is used, mergeAndedSelectors takes care of putting some of them into arrays, otherwise it's done here.
3174//
3175function normalizeArrayOperators(selector) {
3176 Object.keys(selector).forEach(function (field) {
3177 var matcher = selector[field];
3178
3179 if (Array.isArray(matcher)) {
3180 matcher.forEach(function (matcherItem) {
3181 if (matcherItem && typeof matcherItem === 'object') {
3182 normalizeArrayOperators(matcherItem);
3183 }
3184 });
3185 } else if (field === '$ne') {
3186 selector.$ne = [matcher];
3187 } else if (field === '$regex') {
3188 selector.$regex = [matcher];
3189 } else if (matcher && typeof matcher === 'object') {
3190 normalizeArrayOperators(matcher);
3191 }
3192 });
3193}
3194
3195function pad(str, padWith, upToLength) {
3196 var padding = '';
3197 var targetLength = upToLength - str.length;
3198 /* istanbul ignore next */
3199 while (padding.length < targetLength) {
3200 padding += padWith;
3201 }
3202 return padding;
3203}
3204
3205function padLeft(str, padWith, upToLength) {
3206 var padding = pad(str, padWith, upToLength);
3207 return padding + str;
3208}
3209
3210var MIN_MAGNITUDE = -324; // verified by -Number.MIN_VALUE
3211var MAGNITUDE_DIGITS = 3; // ditto
3212var SEP = ''; // set to '_' for easier debugging
3213
3214function collate(a, b) {
3215
3216 if (a === b) {
3217 return 0;
3218 }
3219
3220 a = normalizeKey(a);
3221 b = normalizeKey(b);
3222
3223 var ai = collationIndex(a);
3224 var bi = collationIndex(b);
3225 if ((ai - bi) !== 0) {
3226 return ai - bi;
3227 }
3228 switch (typeof a) {
3229 case 'number':
3230 return a - b;
3231 case 'boolean':
3232 return a < b ? -1 : 1;
3233 case 'string':
3234 return stringCollate(a, b);
3235 }
3236 return Array.isArray(a) ? arrayCollate(a, b) : objectCollate(a, b);
3237}
3238
3239// couch considers null/NaN/Infinity/-Infinity === undefined,
3240// for the purposes of mapreduce indexes. also, dates get stringified.
3241function normalizeKey(key) {
3242 switch (typeof key) {
3243 case 'undefined':
3244 return null;
3245 case 'number':
3246 if (key === Infinity || key === -Infinity || isNaN(key)) {
3247 return null;
3248 }
3249 return key;
3250 case 'object':
3251 var origKey = key;
3252 if (Array.isArray(key)) {
3253 var len = key.length;
3254 key = new Array(len);
3255 for (var i = 0; i < len; i++) {
3256 key[i] = normalizeKey(origKey[i]);
3257 }
3258 /* istanbul ignore next */
3259 } else if (key instanceof Date) {
3260 return key.toJSON();
3261 } else if (key !== null) { // generic object
3262 key = {};
3263 for (var k in origKey) {
3264 if (Object.prototype.hasOwnProperty.call(origKey, k)) {
3265 var val = origKey[k];
3266 if (typeof val !== 'undefined') {
3267 key[k] = normalizeKey(val);
3268 }
3269 }
3270 }
3271 }
3272 }
3273 return key;
3274}
3275
3276function indexify(key) {
3277 if (key !== null) {
3278 switch (typeof key) {
3279 case 'boolean':
3280 return key ? 1 : 0;
3281 case 'number':
3282 return numToIndexableString(key);
3283 case 'string':
3284 // We've to be sure that key does not contain \u0000
3285 // Do order-preserving replacements:
3286 // 0 -> 1, 1
3287 // 1 -> 1, 2
3288 // 2 -> 2, 2
3289 /* eslint-disable no-control-regex */
3290 return key
3291 .replace(/\u0002/g, '\u0002\u0002')
3292 .replace(/\u0001/g, '\u0001\u0002')
3293 .replace(/\u0000/g, '\u0001\u0001');
3294 /* eslint-enable no-control-regex */
3295 case 'object':
3296 var isArray = Array.isArray(key);
3297 var arr = isArray ? key : Object.keys(key);
3298 var i = -1;
3299 var len = arr.length;
3300 var result = '';
3301 if (isArray) {
3302 while (++i < len) {
3303 result += toIndexableString(arr[i]);
3304 }
3305 } else {
3306 while (++i < len) {
3307 var objKey = arr[i];
3308 result += toIndexableString(objKey) +
3309 toIndexableString(key[objKey]);
3310 }
3311 }
3312 return result;
3313 }
3314 }
3315 return '';
3316}
3317
3318// convert the given key to a string that would be appropriate
3319// for lexical sorting, e.g. within a database, where the
3320// sorting is the same given by the collate() function.
3321function toIndexableString(key) {
3322 var zero = '\u0000';
3323 key = normalizeKey(key);
3324 return collationIndex(key) + SEP + indexify(key) + zero;
3325}
3326
3327function parseNumber(str, i) {
3328 var originalIdx = i;
3329 var num;
3330 var zero = str[i] === '1';
3331 if (zero) {
3332 num = 0;
3333 i++;
3334 } else {
3335 var neg = str[i] === '0';
3336 i++;
3337 var numAsString = '';
3338 var magAsString = str.substring(i, i + MAGNITUDE_DIGITS);
3339 var magnitude = parseInt(magAsString, 10) + MIN_MAGNITUDE;
3340 /* istanbul ignore next */
3341 if (neg) {
3342 magnitude = -magnitude;
3343 }
3344 i += MAGNITUDE_DIGITS;
3345 while (true) {
3346 var ch = str[i];
3347 if (ch === '\u0000') {
3348 break;
3349 } else {
3350 numAsString += ch;
3351 }
3352 i++;
3353 }
3354 numAsString = numAsString.split('.');
3355 if (numAsString.length === 1) {
3356 num = parseInt(numAsString, 10);
3357 } else {
3358 /* istanbul ignore next */
3359 num = parseFloat(numAsString[0] + '.' + numAsString[1]);
3360 }
3361 /* istanbul ignore next */
3362 if (neg) {
3363 num = num - 10;
3364 }
3365 /* istanbul ignore next */
3366 if (magnitude !== 0) {
3367 // parseFloat is more reliable than pow due to rounding errors
3368 // e.g. Number.MAX_VALUE would return Infinity if we did
3369 // num * Math.pow(10, magnitude);
3370 num = parseFloat(num + 'e' + magnitude);
3371 }
3372 }
3373 return {num, length : i - originalIdx};
3374}
3375
3376// move up the stack while parsing
3377// this function moved outside of parseIndexableString for performance
3378function pop(stack, metaStack) {
3379 var obj$$1 = stack.pop();
3380
3381 if (metaStack.length) {
3382 var lastMetaElement = metaStack[metaStack.length - 1];
3383 if (obj$$1 === lastMetaElement.element) {
3384 // popping a meta-element, e.g. an object whose value is another object
3385 metaStack.pop();
3386 lastMetaElement = metaStack[metaStack.length - 1];
3387 }
3388 var element = lastMetaElement.element;
3389 var lastElementIndex = lastMetaElement.index;
3390 if (Array.isArray(element)) {
3391 element.push(obj$$1);
3392 } else if (lastElementIndex === stack.length - 2) { // obj with key+value
3393 var key = stack.pop();
3394 element[key] = obj$$1;
3395 } else {
3396 stack.push(obj$$1); // obj with key only
3397 }
3398 }
3399}
3400
3401function parseIndexableString(str) {
3402 var stack = [];
3403 var metaStack = []; // stack for arrays and objects
3404 var i = 0;
3405
3406 /*eslint no-constant-condition: ["error", { "checkLoops": false }]*/
3407 while (true) {
3408 var collationIndex = str[i++];
3409 if (collationIndex === '\u0000') {
3410 if (stack.length === 1) {
3411 return stack.pop();
3412 } else {
3413 pop(stack, metaStack);
3414 continue;
3415 }
3416 }
3417 switch (collationIndex) {
3418 case '1':
3419 stack.push(null);
3420 break;
3421 case '2':
3422 stack.push(str[i] === '1');
3423 i++;
3424 break;
3425 case '3':
3426 var parsedNum = parseNumber(str, i);
3427 stack.push(parsedNum.num);
3428 i += parsedNum.length;
3429 break;
3430 case '4':
3431 var parsedStr = '';
3432 /*eslint no-constant-condition: ["error", { "checkLoops": false }]*/
3433 while (true) {
3434 var ch = str[i];
3435 if (ch === '\u0000') {
3436 break;
3437 }
3438 parsedStr += ch;
3439 i++;
3440 }
3441 // perform the reverse of the order-preserving replacement
3442 // algorithm (see above)
3443 /* eslint-disable no-control-regex */
3444 parsedStr = parsedStr.replace(/\u0001\u0001/g, '\u0000')
3445 .replace(/\u0001\u0002/g, '\u0001')
3446 .replace(/\u0002\u0002/g, '\u0002');
3447 /* eslint-enable no-control-regex */
3448 stack.push(parsedStr);
3449 break;
3450 case '5':
3451 var arrayElement = { element: [], index: stack.length };
3452 stack.push(arrayElement.element);
3453 metaStack.push(arrayElement);
3454 break;
3455 case '6':
3456 var objElement = { element: {}, index: stack.length };
3457 stack.push(objElement.element);
3458 metaStack.push(objElement);
3459 break;
3460 /* istanbul ignore next */
3461 default:
3462 throw new Error(
3463 'bad collationIndex or unexpectedly reached end of input: ' +
3464 collationIndex);
3465 }
3466 }
3467}
3468
3469function arrayCollate(a, b) {
3470 var len = Math.min(a.length, b.length);
3471 for (var i = 0; i < len; i++) {
3472 var sort = collate(a[i], b[i]);
3473 if (sort !== 0) {
3474 return sort;
3475 }
3476 }
3477 return (a.length === b.length) ? 0 :
3478 (a.length > b.length) ? 1 : -1;
3479}
3480function stringCollate(a, b) {
3481 // See: https://github.com/daleharvey/pouchdb/issues/40
3482 // This is incompatible with the CouchDB implementation, but its the
3483 // best we can do for now
3484 return (a === b) ? 0 : ((a > b) ? 1 : -1);
3485}
3486function objectCollate(a, b) {
3487 var ak = Object.keys(a), bk = Object.keys(b);
3488 var len = Math.min(ak.length, bk.length);
3489 for (var i = 0; i < len; i++) {
3490 // First sort the keys
3491 var sort = collate(ak[i], bk[i]);
3492 if (sort !== 0) {
3493 return sort;
3494 }
3495 // if the keys are equal sort the values
3496 sort = collate(a[ak[i]], b[bk[i]]);
3497 if (sort !== 0) {
3498 return sort;
3499 }
3500
3501 }
3502 return (ak.length === bk.length) ? 0 :
3503 (ak.length > bk.length) ? 1 : -1;
3504}
3505// The collation is defined by erlangs ordered terms
3506// the atoms null, true, false come first, then numbers, strings,
3507// arrays, then objects
3508// null/undefined/NaN/Infinity/-Infinity are all considered null
3509function collationIndex(x) {
3510 var id = ['boolean', 'number', 'string', 'object'];
3511 var idx = id.indexOf(typeof x);
3512 //false if -1 otherwise true, but fast!!!!1
3513 if (~idx) {
3514 if (x === null) {
3515 return 1;
3516 }
3517 if (Array.isArray(x)) {
3518 return 5;
3519 }
3520 return idx < 3 ? (idx + 2) : (idx + 3);
3521 }
3522 /* istanbul ignore next */
3523 if (Array.isArray(x)) {
3524 return 5;
3525 }
3526}
3527
3528// conversion:
3529// x yyy zz...zz
3530// x = 0 for negative, 1 for 0, 2 for positive
3531// y = exponent (for negative numbers negated) moved so that it's >= 0
3532// z = mantisse
3533function numToIndexableString(num) {
3534
3535 if (num === 0) {
3536 return '1';
3537 }
3538
3539 // convert number to exponential format for easier and
3540 // more succinct string sorting
3541 var expFormat = num.toExponential().split(/e\+?/);
3542 var magnitude = parseInt(expFormat[1], 10);
3543
3544 var neg = num < 0;
3545
3546 var result = neg ? '0' : '2';
3547
3548 // first sort by magnitude
3549 // it's easier if all magnitudes are positive
3550 var magForComparison = ((neg ? -magnitude : magnitude) - MIN_MAGNITUDE);
3551 var magString = padLeft((magForComparison).toString(), '0', MAGNITUDE_DIGITS);
3552
3553 result += SEP + magString;
3554
3555 // then sort by the factor
3556 var factor = Math.abs(parseFloat(expFormat[0])); // [1..10)
3557 /* istanbul ignore next */
3558 if (neg) { // for negative reverse ordering
3559 factor = 10 - factor;
3560 }
3561
3562 var factorStr = factor.toFixed(20);
3563
3564 // strip zeros from the end
3565 factorStr = factorStr.replace(/\.?0+$/, '');
3566
3567 result += SEP + factorStr;
3568
3569 return result;
3570}
3571
3572// create a comparator based on the sort object
3573function createFieldSorter(sort) {
3574
3575 function getFieldValuesAsArray(doc) {
3576 return sort.map(function (sorting) {
3577 var fieldName = getKey(sorting);
3578 var parsedField = parseField(fieldName);
3579 var docFieldValue = getFieldFromDoc(doc, parsedField);
3580 return docFieldValue;
3581 });
3582 }
3583
3584 return function (aRow, bRow) {
3585 var aFieldValues = getFieldValuesAsArray(aRow.doc);
3586 var bFieldValues = getFieldValuesAsArray(bRow.doc);
3587 var collation = collate(aFieldValues, bFieldValues);
3588 if (collation !== 0) {
3589 return collation;
3590 }
3591 // this is what mango seems to do
3592 return compare(aRow.doc._id, bRow.doc._id);
3593 };
3594}
3595
3596function filterInMemoryFields(rows, requestDef, inMemoryFields) {
3597 rows = rows.filter(function (row) {
3598 return rowFilter(row.doc, requestDef.selector, inMemoryFields);
3599 });
3600
3601 if (requestDef.sort) {
3602 // in-memory sort
3603 var fieldSorter = createFieldSorter(requestDef.sort);
3604 rows = rows.sort(fieldSorter);
3605 if (typeof requestDef.sort[0] !== 'string' &&
3606 getValue(requestDef.sort[0]) === 'desc') {
3607 rows = rows.reverse();
3608 }
3609 }
3610
3611 if ('limit' in requestDef || 'skip' in requestDef) {
3612 // have to do the limit in-memory
3613 var skip = requestDef.skip || 0;
3614 var limit = ('limit' in requestDef ? requestDef.limit : rows.length) + skip;
3615 rows = rows.slice(skip, limit);
3616 }
3617 return rows;
3618}
3619
3620function rowFilter(doc, selector, inMemoryFields) {
3621 return inMemoryFields.every(function (field) {
3622 var matcher = selector[field];
3623 var parsedField = parseField(field);
3624 var docFieldValue = getFieldFromDoc(doc, parsedField);
3625 if (isCombinationalField(field)) {
3626 return matchCominationalSelector(field, matcher, doc);
3627 }
3628
3629 return matchSelector(matcher, doc, parsedField, docFieldValue);
3630 });
3631}
3632
3633function matchSelector(matcher, doc, parsedField, docFieldValue) {
3634 if (!matcher) {
3635 // no filtering necessary; this field is just needed for sorting
3636 return true;
3637 }
3638
3639 // is matcher an object, if so continue recursion
3640 if (typeof matcher === 'object') {
3641 return Object.keys(matcher).every(function (maybeUserOperator) {
3642 var userValue = matcher[ maybeUserOperator ];
3643 // explicit operator
3644 if (maybeUserOperator.indexOf("$") === 0) {
3645 return match(maybeUserOperator, doc, userValue, parsedField, docFieldValue);
3646 } else {
3647 var subParsedField = parseField(maybeUserOperator);
3648
3649 if (
3650 docFieldValue === undefined &&
3651 typeof userValue !== "object" &&
3652 subParsedField.length > 0
3653 ) {
3654 // the field does not exist, return or getFieldFromDoc will throw
3655 return false;
3656 }
3657
3658 var subDocFieldValue = getFieldFromDoc(docFieldValue, subParsedField);
3659
3660 if (typeof userValue === "object") {
3661 // field value is an object that might contain more operators
3662 return matchSelector(userValue, doc, parsedField, subDocFieldValue);
3663 }
3664
3665 // implicit operator
3666 return match("$eq", doc, userValue, subParsedField, subDocFieldValue);
3667 }
3668 });
3669 }
3670
3671 // no more depth, No need to recurse further
3672 return matcher === docFieldValue;
3673}
3674
3675function matchCominationalSelector(field, matcher, doc) {
3676
3677 if (field === '$or') {
3678 return matcher.some(function (orMatchers) {
3679 return rowFilter(doc, orMatchers, Object.keys(orMatchers));
3680 });
3681 }
3682
3683 if (field === '$not') {
3684 return !rowFilter(doc, matcher, Object.keys(matcher));
3685 }
3686
3687 //`$nor`
3688 return !matcher.find(function (orMatchers) {
3689 return rowFilter(doc, orMatchers, Object.keys(orMatchers));
3690 });
3691
3692}
3693
3694function match(userOperator, doc, userValue, parsedField, docFieldValue) {
3695 if (!matchers[userOperator]) {
3696 /* istanbul ignore next */
3697 throw new Error('unknown operator "' + userOperator +
3698 '" - should be one of $eq, $lte, $lt, $gt, $gte, $exists, $ne, $in, ' +
3699 '$nin, $size, $mod, $regex, $elemMatch, $type, $allMatch or $all');
3700 }
3701 return matchers[userOperator](doc, userValue, parsedField, docFieldValue);
3702}
3703
3704function fieldExists(docFieldValue) {
3705 return typeof docFieldValue !== 'undefined' && docFieldValue !== null;
3706}
3707
3708function fieldIsNotUndefined(docFieldValue) {
3709 return typeof docFieldValue !== 'undefined';
3710}
3711
3712function modField(docFieldValue, userValue) {
3713 if (typeof docFieldValue !== "number" ||
3714 parseInt(docFieldValue, 10) !== docFieldValue) {
3715 return false;
3716 }
3717
3718 var divisor = userValue[0];
3719 var mod = userValue[1];
3720
3721 return docFieldValue % divisor === mod;
3722}
3723
3724function arrayContainsValue(docFieldValue, userValue) {
3725 return userValue.some(function (val) {
3726 if (docFieldValue instanceof Array) {
3727 return docFieldValue.some(function (docFieldValueItem) {
3728 return collate(val, docFieldValueItem) === 0;
3729 });
3730 }
3731
3732 return collate(val, docFieldValue) === 0;
3733 });
3734}
3735
3736function arrayContainsAllValues(docFieldValue, userValue) {
3737 return userValue.every(function (val) {
3738 return docFieldValue.some(function (docFieldValueItem) {
3739 return collate(val, docFieldValueItem) === 0;
3740 });
3741 });
3742}
3743
3744function arraySize(docFieldValue, userValue) {
3745 return docFieldValue.length === userValue;
3746}
3747
3748function regexMatch(docFieldValue, userValue) {
3749 var re = new RegExp(userValue);
3750
3751 return re.test(docFieldValue);
3752}
3753
3754function typeMatch(docFieldValue, userValue) {
3755
3756 switch (userValue) {
3757 case 'null':
3758 return docFieldValue === null;
3759 case 'boolean':
3760 return typeof (docFieldValue) === 'boolean';
3761 case 'number':
3762 return typeof (docFieldValue) === 'number';
3763 case 'string':
3764 return typeof (docFieldValue) === 'string';
3765 case 'array':
3766 return docFieldValue instanceof Array;
3767 case 'object':
3768 return ({}).toString.call(docFieldValue) === '[object Object]';
3769 }
3770}
3771
3772var matchers = {
3773
3774 '$elemMatch': function (doc, userValue, parsedField, docFieldValue) {
3775 if (!Array.isArray(docFieldValue)) {
3776 return false;
3777 }
3778
3779 if (docFieldValue.length === 0) {
3780 return false;
3781 }
3782
3783 if (typeof docFieldValue[0] === 'object' && docFieldValue[0] !== null) {
3784 return docFieldValue.some(function (val) {
3785 return rowFilter(val, userValue, Object.keys(userValue));
3786 });
3787 }
3788
3789 return docFieldValue.some(function (val) {
3790 return matchSelector(userValue, doc, parsedField, val);
3791 });
3792 },
3793
3794 '$allMatch': function (doc, userValue, parsedField, docFieldValue) {
3795 if (!Array.isArray(docFieldValue)) {
3796 return false;
3797 }
3798
3799 /* istanbul ignore next */
3800 if (docFieldValue.length === 0) {
3801 return false;
3802 }
3803
3804 if (typeof docFieldValue[0] === 'object' && docFieldValue[0] !== null) {
3805 return docFieldValue.every(function (val) {
3806 return rowFilter(val, userValue, Object.keys(userValue));
3807 });
3808 }
3809
3810 return docFieldValue.every(function (val) {
3811 return matchSelector(userValue, doc, parsedField, val);
3812 });
3813 },
3814
3815 '$eq': function (doc, userValue, parsedField, docFieldValue) {
3816 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) === 0;
3817 },
3818
3819 '$gte': function (doc, userValue, parsedField, docFieldValue) {
3820 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) >= 0;
3821 },
3822
3823 '$gt': function (doc, userValue, parsedField, docFieldValue) {
3824 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) > 0;
3825 },
3826
3827 '$lte': function (doc, userValue, parsedField, docFieldValue) {
3828 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) <= 0;
3829 },
3830
3831 '$lt': function (doc, userValue, parsedField, docFieldValue) {
3832 return fieldIsNotUndefined(docFieldValue) && collate(docFieldValue, userValue) < 0;
3833 },
3834
3835 '$exists': function (doc, userValue, parsedField, docFieldValue) {
3836 //a field that is null is still considered to exist
3837 if (userValue) {
3838 return fieldIsNotUndefined(docFieldValue);
3839 }
3840
3841 return !fieldIsNotUndefined(docFieldValue);
3842 },
3843
3844 '$mod': function (doc, userValue, parsedField, docFieldValue) {
3845 return fieldExists(docFieldValue) && modField(docFieldValue, userValue);
3846 },
3847
3848 '$ne': function (doc, userValue, parsedField, docFieldValue) {
3849 return userValue.every(function (neValue) {
3850 return collate(docFieldValue, neValue) !== 0;
3851 });
3852 },
3853 '$in': function (doc, userValue, parsedField, docFieldValue) {
3854 return fieldExists(docFieldValue) && arrayContainsValue(docFieldValue, userValue);
3855 },
3856
3857 '$nin': function (doc, userValue, parsedField, docFieldValue) {
3858 return fieldExists(docFieldValue) && !arrayContainsValue(docFieldValue, userValue);
3859 },
3860
3861 '$size': function (doc, userValue, parsedField, docFieldValue) {
3862 return fieldExists(docFieldValue) &&
3863 Array.isArray(docFieldValue) &&
3864 arraySize(docFieldValue, userValue);
3865 },
3866
3867 '$all': function (doc, userValue, parsedField, docFieldValue) {
3868 return Array.isArray(docFieldValue) && arrayContainsAllValues(docFieldValue, userValue);
3869 },
3870
3871 '$regex': function (doc, userValue, parsedField, docFieldValue) {
3872 return fieldExists(docFieldValue) &&
3873 typeof docFieldValue == "string" &&
3874 userValue.every(function (regexValue) {
3875 return regexMatch(docFieldValue, regexValue);
3876 });
3877 },
3878
3879 '$type': function (doc, userValue, parsedField, docFieldValue) {
3880 return typeMatch(docFieldValue, userValue);
3881 }
3882};
3883
3884// return true if the given doc matches the supplied selector
3885function matchesSelector(doc, selector) {
3886 /* istanbul ignore if */
3887 if (typeof selector !== 'object') {
3888 // match the CouchDB error message
3889 throw new Error('Selector error: expected a JSON object');
3890 }
3891
3892 selector = massageSelector(selector);
3893 var row = {
3894 doc
3895 };
3896
3897 var rowsMatched = filterInMemoryFields([row], { selector }, Object.keys(selector));
3898 return rowsMatched && rowsMatched.length === 1;
3899}
3900
3901function evalFilter(input) {
3902 var code = '(function() {\n"use strict";\nreturn ' + input + '\n})()';
3903
3904 return vm.runInNewContext(code);
3905}
3906
3907function evalView(input) {
3908 var code = [
3909 '"use strict";',
3910 'var emitted = false;',
3911 'var emit = function (a, b) {',
3912 ' emitted = true;',
3913 '};',
3914 'var view = ' + input + ';',
3915 'view(doc);',
3916 'if (emitted) {',
3917 ' return true;',
3918 '}'
3919 ].join('\n');
3920
3921 return vm.runInNewContext('(function(doc) {\n' + code + '\n})');
3922}
3923
3924function validate(opts, callback) {
3925 if (opts.selector) {
3926 if (opts.filter && opts.filter !== '_selector') {
3927 var filterName = typeof opts.filter === 'string' ?
3928 opts.filter : 'function';
3929 return callback(new Error('selector invalid for filter "' + filterName + '"'));
3930 }
3931 }
3932 callback();
3933}
3934
3935function normalize(opts) {
3936 if (opts.view && !opts.filter) {
3937 opts.filter = '_view';
3938 }
3939
3940 if (opts.selector && !opts.filter) {
3941 opts.filter = '_selector';
3942 }
3943
3944 if (opts.filter && typeof opts.filter === 'string') {
3945 if (opts.filter === '_view') {
3946 opts.view = normalizeDesignDocFunctionName(opts.view);
3947 } else {
3948 opts.filter = normalizeDesignDocFunctionName(opts.filter);
3949 }
3950 }
3951}
3952
3953function shouldFilter(changesHandler, opts) {
3954 return opts.filter && typeof opts.filter === 'string' &&
3955 !opts.doc_ids && !isRemote(changesHandler.db);
3956}
3957
3958function filter(changesHandler, opts) {
3959 var callback = opts.complete;
3960 if (opts.filter === '_view') {
3961 if (!opts.view || typeof opts.view !== 'string') {
3962 var err = createError(BAD_REQUEST,
3963 '`view` filter parameter not found or invalid.');
3964 return callback(err);
3965 }
3966 // fetch a view from a design doc, make it behave like a filter
3967 var viewName = parseDesignDocFunctionName(opts.view);
3968 changesHandler.db.get('_design/' + viewName[0], function (err, ddoc) {
3969 /* istanbul ignore if */
3970 if (changesHandler.isCancelled) {
3971 return callback(null, {status: 'cancelled'});
3972 }
3973 /* istanbul ignore next */
3974 if (err) {
3975 return callback(generateErrorFromResponse(err));
3976 }
3977 var mapFun = ddoc && ddoc.views && ddoc.views[viewName[1]] &&
3978 ddoc.views[viewName[1]].map;
3979 if (!mapFun) {
3980 return callback(createError(MISSING_DOC,
3981 (ddoc.views ? 'missing json key: ' + viewName[1] :
3982 'missing json key: views')));
3983 }
3984 opts.filter = evalView(mapFun);
3985 changesHandler.doChanges(opts);
3986 });
3987 } else if (opts.selector) {
3988 opts.filter = function (doc) {
3989 return matchesSelector(doc, opts.selector);
3990 };
3991 changesHandler.doChanges(opts);
3992 } else {
3993 // fetch a filter from a design doc
3994 var filterName = parseDesignDocFunctionName(opts.filter);
3995 changesHandler.db.get('_design/' + filterName[0], function (err, ddoc) {
3996 /* istanbul ignore if */
3997 if (changesHandler.isCancelled) {
3998 return callback(null, {status: 'cancelled'});
3999 }
4000 /* istanbul ignore next */
4001 if (err) {
4002 return callback(generateErrorFromResponse(err));
4003 }
4004 var filterFun = ddoc && ddoc.filters && ddoc.filters[filterName[1]];
4005 if (!filterFun) {
4006 return callback(createError(MISSING_DOC,
4007 ((ddoc && ddoc.filters) ? 'missing json key: ' + filterName[1]
4008 : 'missing json key: filters')));
4009 }
4010 opts.filter = evalFilter(filterFun);
4011 changesHandler.doChanges(opts);
4012 });
4013 }
4014}
4015
4016function applyChangesFilterPlugin(PouchDB) {
4017 PouchDB._changesFilterPlugin = {
4018 validate,
4019 normalize,
4020 shouldFilter,
4021 filter
4022 };
4023}
4024
4025// TODO: remove from pouchdb-core (breaking)
4026PouchDB.plugin(applyChangesFilterPlugin);
4027
4028PouchDB.version = version;
4029
4030function isFunction(f) {
4031 return 'function' === typeof f;
4032}
4033
4034function getPrefix(db) {
4035 if (isFunction(db.prefix)) {
4036 return db.prefix();
4037 }
4038 return db;
4039}
4040
4041function clone$1(_obj) {
4042 var obj$$1 = {};
4043 for (var k in _obj) {
4044 obj$$1[k] = _obj[k];
4045 }
4046 return obj$$1;
4047}
4048
4049function nut(db, precodec, codec) {
4050 function encodePrefix(prefix, key, opts1, opts2) {
4051 return precodec.encode([ prefix, codec.encodeKey(key, opts1, opts2 ) ]);
4052 }
4053
4054 function addEncodings(op, prefix) {
4055 if (prefix && prefix.options) {
4056 op.keyEncoding =
4057 op.keyEncoding || prefix.options.keyEncoding;
4058 op.valueEncoding =
4059 op.valueEncoding || prefix.options.valueEncoding;
4060 }
4061 return op;
4062 }
4063
4064 db.open(function () { /* no-op */});
4065
4066 return {
4067 apply: function (ops, opts, cb) {
4068 opts = opts || {};
4069
4070 var batch = [];
4071 var i = -1;
4072 var len = ops.length;
4073
4074 while (++i < len) {
4075 var op = ops[i];
4076 addEncodings(op, op.prefix);
4077 op.prefix = getPrefix(op.prefix);
4078 batch.push({
4079 key: encodePrefix(op.prefix, op.key, opts, op),
4080 value: op.type !== 'del' && codec.encodeValue(op.value, opts, op),
4081 type: op.type
4082 });
4083 }
4084 db.db.batch(batch, opts, cb);
4085 },
4086 get: function (key, prefix, opts, cb) {
4087 opts.asBuffer = codec.valueAsBuffer(opts);
4088 return db.db.get(
4089 encodePrefix(prefix, key, opts),
4090 opts,
4091 function (err, value) {
4092 if (err) {
4093 cb(err);
4094 } else {
4095 cb(null, codec.decodeValue(value, opts));
4096 }
4097 }
4098 );
4099 },
4100 createDecoder: function (opts) {
4101 return function (key, value) {
4102 return {
4103 key: codec.decodeKey(precodec.decode(key)[1], opts),
4104 value: codec.decodeValue(value, opts)
4105 };
4106 };
4107 },
4108 isClosed: function isClosed() {
4109 return db.isClosed();
4110 },
4111 close: function close(cb) {
4112 return db.close(cb);
4113 },
4114 iterator: function (_opts) {
4115 var opts = clone$1(_opts || {});
4116 var prefix = _opts.prefix || [];
4117
4118 function encodeKey(key) {
4119 return encodePrefix(prefix, key, opts, {});
4120 }
4121
4122 ltgt.toLtgt(_opts, opts, encodeKey, precodec.lowerBound, precodec.upperBound);
4123
4124 // if these legacy values are in the options, remove them
4125
4126 opts.prefix = null;
4127
4128 //************************************************
4129 //hard coded defaults, for now...
4130 //TODO: pull defaults and encoding out of levelup.
4131 opts.keyAsBuffer = opts.valueAsBuffer = false;
4132 //************************************************
4133
4134
4135 //this is vital, otherwise limit: undefined will
4136 //create an empty stream.
4137 /* istanbul ignore next */
4138 if ('number' !== typeof opts.limit) {
4139 opts.limit = -1;
4140 }
4141
4142 opts.keyAsBuffer = precodec.buffer;
4143 opts.valueAsBuffer = codec.valueAsBuffer(opts);
4144
4145 function wrapIterator(iterator) {
4146 return {
4147 next: function (cb) {
4148 return iterator.next(cb);
4149 },
4150 end: function (cb) {
4151 iterator.end(cb);
4152 }
4153 };
4154 }
4155
4156 return wrapIterator(db.db.iterator(opts));
4157 }
4158 };
4159}
4160
4161class NotFoundError extends Error {
4162 constructor() {
4163 super();
4164 this.name = 'NotFoundError';
4165 }
4166}
4167
4168var EventEmitter = EE.EventEmitter;
4169var version$1 = "6.5.4";
4170
4171var NOT_FOUND_ERROR = new NotFoundError();
4172
4173var sublevel = function (nut, prefix, createStream, options) {
4174 var emitter = new EventEmitter();
4175 emitter.sublevels = {};
4176 emitter.options = options;
4177
4178 emitter.version = version$1;
4179
4180 emitter.methods = {};
4181 prefix = prefix || [];
4182
4183 function mergeOpts(opts) {
4184 var o = {};
4185 var k;
4186 if (options) {
4187 for (k in options) {
4188 if (typeof options[k] !== 'undefined') {
4189 o[k] = options[k];
4190 }
4191 }
4192 }
4193 if (opts) {
4194 for (k in opts) {
4195 if (typeof opts[k] !== 'undefined') {
4196 o[k] = opts[k];
4197 }
4198 }
4199 }
4200 return o;
4201 }
4202
4203 emitter.put = function (key, value, opts, cb) {
4204 if ('function' === typeof opts) {
4205 cb = opts;
4206 opts = {};
4207 }
4208
4209 nut.apply([{
4210 key, value,
4211 prefix: prefix.slice(), type: 'put'
4212 }], mergeOpts(opts), function (err) {
4213 /* istanbul ignore next */
4214 if (err) {
4215 return cb(err);
4216 }
4217 emitter.emit('put', key, value);
4218 cb(null);
4219 });
4220 };
4221
4222 emitter.prefix = function () {
4223 return prefix.slice();
4224 };
4225
4226 emitter.batch = function (ops, opts, cb) {
4227 if ('function' === typeof opts) {
4228 cb = opts;
4229 opts = {};
4230 }
4231
4232 ops = ops.map(function (op) {
4233 return {
4234 key: op.key,
4235 value: op.value,
4236 prefix: op.prefix || prefix,
4237 keyEncoding: op.keyEncoding, // *
4238 valueEncoding: op.valueEncoding, // * (TODO: encodings on sublevel)
4239 type: op.type
4240 };
4241 });
4242
4243 nut.apply(ops, mergeOpts(opts), function (err) {
4244 /* istanbul ignore next */
4245 if (err) {
4246 return cb(err);
4247 }
4248 emitter.emit('batch', ops);
4249 cb(null);
4250 });
4251 };
4252
4253 emitter.get = function (key, opts, cb) {
4254 /* istanbul ignore else */
4255 if ('function' === typeof opts) {
4256 cb = opts;
4257 opts = {};
4258 }
4259 nut.get(key, prefix, mergeOpts(opts), function (err, value) {
4260 if (err) {
4261 cb(NOT_FOUND_ERROR);
4262 } else {
4263 cb(null, value);
4264 }
4265 });
4266 };
4267
4268 emitter.sublevel = function (name, opts) {
4269 return emitter.sublevels[name] =
4270 emitter.sublevels[name] || sublevel(nut, prefix.concat(name), createStream, mergeOpts(opts));
4271 };
4272
4273 emitter.readStream = emitter.createReadStream = function (opts) {
4274 opts = mergeOpts(opts);
4275 opts.prefix = prefix;
4276 var stream;
4277 var it = nut.iterator(opts);
4278
4279 stream = createStream(opts, nut.createDecoder(opts));
4280 stream.setIterator(it);
4281
4282 return stream;
4283 };
4284
4285 emitter.close = function (cb) {
4286 nut.close(cb);
4287 };
4288
4289 emitter.isOpen = nut.isOpen;
4290 emitter.isClosed = nut.isClosed;
4291
4292 return emitter;
4293};
4294
4295/* Copyright (c) 2012-2014 LevelUP contributors
4296 * See list at <https://github.com/rvagg/node-levelup#contributing>
4297 * MIT License <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md>
4298 */
4299
4300var Readable = ReadableStreamCore.Readable;
4301
4302function createClass$1(parent, init) {
4303 let klass = function (...args) {
4304 if (!(this instanceof klass)) {
4305 return new klass(...args);
4306 }
4307 init.apply(this, args);
4308 };
4309 klass.prototype = Object.create(parent.prototype, {
4310 constructor: { value: klass }
4311 });
4312 return klass;
4313}
4314
4315class ReadStreamInternal extends Readable {
4316 constructor(options, makeData) {
4317 super({ objectMode: true, highWaterMark: options.highWaterMark });
4318 this._setup(options, makeData);
4319 }
4320
4321 _setup(options, makeData) {
4322 super.constructor({ objectMode: true, highWaterMark: options.highWaterMark });
4323
4324 // purely to keep `db` around until we're done so it's not GCed if the user doesn't keep a ref
4325 this._waiting = false;
4326 this._options = options;
4327 this._makeData = makeData;
4328 }
4329
4330 setIterator(it) {
4331 this._iterator = it;
4332 /* istanbul ignore if */
4333 if (this._destroyed) {
4334 return it.end(function () {});
4335 }
4336 /* istanbul ignore if */
4337 if (this._waiting) {
4338 this._waiting = false;
4339 return this._read();
4340 }
4341 return this;
4342 }
4343
4344 _cleanup(err) {
4345 if (this._destroyed) {
4346 return;
4347 }
4348
4349 this._destroyed = true;
4350
4351 var self = this;
4352 /* istanbul ignore if */
4353 if (err && err.message !== 'iterator has ended') {
4354 self.emit('error', err);
4355 }
4356
4357 /* istanbul ignore else */
4358 if (self._iterator) {
4359 self._iterator.end(function () {
4360 self._iterator = null;
4361 self.emit('close');
4362 });
4363 } else {
4364 self.emit('close');
4365 }
4366 }
4367
4368 destroy() {
4369 this._cleanup();
4370 }
4371
4372 _read() {
4373 var self = this;
4374 /* istanbul ignore if */
4375 if (self._destroyed) {
4376 return;
4377 }
4378 /* istanbul ignore if */
4379 if (!self._iterator) {
4380 return this._waiting = true;
4381 }
4382
4383 self._iterator.next(function (err, key, value) {
4384 if (err || (key === undefined && value === undefined)) {
4385 if (!err && !self._destroyed) {
4386 self.push(null);
4387 }
4388 return self._cleanup(err);
4389 }
4390
4391
4392 value = self._makeData(key, value);
4393 if (!self._destroyed) {
4394 self.push(value);
4395 }
4396 });
4397 }
4398}
4399
4400const ReadStream = createClass$1(ReadStreamInternal, function (options, makeData) {
4401 ReadStreamInternal.prototype._setup.call(this, options, makeData);
4402});
4403
4404var precodec = {
4405 encode: function (decodedKey) {
4406 return '\xff' + decodedKey[0] + '\xff' + decodedKey[1];
4407 },
4408 decode: function (encodedKeyAsBuffer) {
4409 var str = encodedKeyAsBuffer.toString();
4410 var idx = str.indexOf('\xff', 1);
4411 return [str.substring(1, idx), str.substring(idx + 1)];
4412 },
4413 lowerBound: '\x00',
4414 upperBound: '\xff'
4415};
4416
4417var codec = new Codec();
4418
4419function sublevelPouch(db) {
4420 return sublevel(nut(db, precodec, codec), [], ReadStream, db.options);
4421}
4422
4423function allDocsKeysQuery(api, opts) {
4424 var keys = opts.keys;
4425 var finalResults = {
4426 offset: opts.skip
4427 };
4428 return Promise.all(keys.map(function (key) {
4429 var subOpts = Object.assign({key, deleted: 'ok'}, opts);
4430 ['limit', 'skip', 'keys'].forEach(function (optKey) {
4431 delete subOpts[optKey];
4432 });
4433 return new Promise(function (resolve, reject) {
4434 api._allDocs(subOpts, function (err, res) {
4435 /* istanbul ignore if */
4436 if (err) {
4437 return reject(err);
4438 }
4439 /* istanbul ignore if */
4440 if (opts.update_seq && res.update_seq !== undefined) {
4441 finalResults.update_seq = res.update_seq;
4442 }
4443 finalResults.total_rows = res.total_rows;
4444 resolve(res.rows[0] || {key, error: 'not_found'});
4445 });
4446 });
4447 })).then(function (results) {
4448 finalResults.rows = results;
4449 return finalResults;
4450 });
4451}
4452
4453function thisAtob(str) {
4454 var base64 = Buffer.from(str, 'base64');
4455 // Node.js will just skip the characters it can't decode instead of
4456 // throwing an exception
4457 if (base64.toString('base64') !== str) {
4458 throw new Error("attachment is not a valid base64 string");
4459 }
4460 return base64.toString('binary');
4461}
4462
4463function thisBtoa(str) {
4464 return Buffer.from(str, 'binary').toString('base64');
4465}
4466
4467function typedBuffer(binString, buffType, type) {
4468 // buffType is either 'binary' or 'base64'
4469 const buff = Buffer.from(binString, buffType);
4470 buff.type = type; // non-standard, but used for consistency with the browser
4471 return buff;
4472}
4473
4474function b64ToBluffer(b64, type) {
4475 return typedBuffer(b64, 'base64', type);
4476}
4477
4478// From http://stackoverflow.com/questions/14967647/ (continues on next line)
4479
4480function binStringToBluffer(binString, type) {
4481 return typedBuffer(binString, 'binary', type);
4482}
4483
4484// This function is unused in Node
4485
4486function blobToBase64(blobOrBuffer, callback) {
4487 callback(blobOrBuffer.toString('base64'));
4488}
4489
4490// not used in Node, but here for completeness
4491
4492// simplified API. universal browser support is assumed
4493
4494//Can't find original post, but this is close
4495
4496function toObject(array) {
4497 return array.reduce(function (obj$$1, item) {
4498 obj$$1[item] = true;
4499 return obj$$1;
4500 }, {});
4501}
4502// List of top level reserved words for doc
4503var reservedWords = toObject([
4504 '_id',
4505 '_rev',
4506 '_access',
4507 '_attachments',
4508 '_deleted',
4509 '_revisions',
4510 '_revs_info',
4511 '_conflicts',
4512 '_deleted_conflicts',
4513 '_local_seq',
4514 '_rev_tree',
4515 // replication documents
4516 '_replication_id',
4517 '_replication_state',
4518 '_replication_state_time',
4519 '_replication_state_reason',
4520 '_replication_stats',
4521 // Specific to Couchbase Sync Gateway
4522 '_removed'
4523]);
4524
4525// List of reserved words that should end up in the document
4526var dataWords = toObject([
4527 '_access',
4528 '_attachments',
4529 // replication documents
4530 '_replication_id',
4531 '_replication_state',
4532 '_replication_state_time',
4533 '_replication_state_reason',
4534 '_replication_stats'
4535]);
4536
4537function parseRevisionInfo(rev$$1) {
4538 if (!/^\d+-/.test(rev$$1)) {
4539 return createError(INVALID_REV);
4540 }
4541 var idx = rev$$1.indexOf('-');
4542 var left = rev$$1.substring(0, idx);
4543 var right = rev$$1.substring(idx + 1);
4544 return {
4545 prefix: parseInt(left, 10),
4546 id: right
4547 };
4548}
4549
4550function makeRevTreeFromRevisions(revisions, opts) {
4551 var pos = revisions.start - revisions.ids.length + 1;
4552
4553 var revisionIds = revisions.ids;
4554 var ids = [revisionIds[0], opts, []];
4555
4556 for (var i = 1, len = revisionIds.length; i < len; i++) {
4557 ids = [revisionIds[i], {status: 'missing'}, [ids]];
4558 }
4559
4560 return [{
4561 pos,
4562 ids
4563 }];
4564}
4565
4566// Preprocess documents, parse their revisions, assign an id and a
4567// revision for new writes that are missing them, etc
4568function parseDoc(doc, newEdits, dbOpts) {
4569 if (!dbOpts) {
4570 dbOpts = {
4571 deterministic_revs: true
4572 };
4573 }
4574
4575 var nRevNum;
4576 var newRevId;
4577 var revInfo;
4578 var opts = {status: 'available'};
4579 if (doc._deleted) {
4580 opts.deleted = true;
4581 }
4582
4583 if (newEdits) {
4584 if (!doc._id) {
4585 doc._id = uuid();
4586 }
4587 newRevId = rev(doc, dbOpts.deterministic_revs);
4588 if (doc._rev) {
4589 revInfo = parseRevisionInfo(doc._rev);
4590 if (revInfo.error) {
4591 return revInfo;
4592 }
4593 doc._rev_tree = [{
4594 pos: revInfo.prefix,
4595 ids: [revInfo.id, {status: 'missing'}, [[newRevId, opts, []]]]
4596 }];
4597 nRevNum = revInfo.prefix + 1;
4598 } else {
4599 doc._rev_tree = [{
4600 pos: 1,
4601 ids : [newRevId, opts, []]
4602 }];
4603 nRevNum = 1;
4604 }
4605 } else {
4606 if (doc._revisions) {
4607 doc._rev_tree = makeRevTreeFromRevisions(doc._revisions, opts);
4608 nRevNum = doc._revisions.start;
4609 newRevId = doc._revisions.ids[0];
4610 }
4611 if (!doc._rev_tree) {
4612 revInfo = parseRevisionInfo(doc._rev);
4613 if (revInfo.error) {
4614 return revInfo;
4615 }
4616 nRevNum = revInfo.prefix;
4617 newRevId = revInfo.id;
4618 doc._rev_tree = [{
4619 pos: nRevNum,
4620 ids: [newRevId, opts, []]
4621 }];
4622 }
4623 }
4624
4625 invalidIdError(doc._id);
4626
4627 doc._rev = nRevNum + '-' + newRevId;
4628
4629 var result = {metadata : {}, data : {}};
4630 for (var key in doc) {
4631 /* istanbul ignore else */
4632 if (Object.prototype.hasOwnProperty.call(doc, key)) {
4633 var specialKey = key[0] === '_';
4634 if (specialKey && !reservedWords[key]) {
4635 var error = createError(DOC_VALIDATION, key);
4636 error.message = DOC_VALIDATION.message + ': ' + key;
4637 throw error;
4638 } else if (specialKey && !dataWords[key]) {
4639 result.metadata[key.slice(1)] = doc[key];
4640 } else {
4641 result.data[key] = doc[key];
4642 }
4643 }
4644 }
4645 return result;
4646}
4647
4648function updateDoc(revLimit, prev, docInfo, results,
4649 i, cb, writeDoc, newEdits) {
4650
4651 if (revExists(prev.rev_tree, docInfo.metadata.rev) && !newEdits) {
4652 results[i] = docInfo;
4653 return cb();
4654 }
4655
4656 // sometimes this is pre-calculated. historically not always
4657 var previousWinningRev = prev.winningRev || winningRev(prev);
4658 var previouslyDeleted = 'deleted' in prev ? prev.deleted :
4659 isDeleted(prev, previousWinningRev);
4660 var deleted = 'deleted' in docInfo.metadata ? docInfo.metadata.deleted :
4661 isDeleted(docInfo.metadata);
4662 var isRoot = /^1-/.test(docInfo.metadata.rev);
4663
4664 if (previouslyDeleted && !deleted && newEdits && isRoot) {
4665 var newDoc = docInfo.data;
4666 newDoc._rev = previousWinningRev;
4667 newDoc._id = docInfo.metadata.id;
4668 docInfo = parseDoc(newDoc, newEdits);
4669 }
4670
4671 var merged = merge(prev.rev_tree, docInfo.metadata.rev_tree[0], revLimit);
4672
4673 var inConflict = newEdits && ((
4674 (previouslyDeleted && deleted && merged.conflicts !== 'new_leaf') ||
4675 (!previouslyDeleted && merged.conflicts !== 'new_leaf') ||
4676 (previouslyDeleted && !deleted && merged.conflicts === 'new_branch')));
4677
4678 if (inConflict) {
4679 var err = createError(REV_CONFLICT);
4680 results[i] = err;
4681 return cb();
4682 }
4683
4684 var newRev = docInfo.metadata.rev;
4685 docInfo.metadata.rev_tree = merged.tree;
4686 docInfo.stemmedRevs = merged.stemmedRevs || [];
4687 /* istanbul ignore else */
4688 if (prev.rev_map) {
4689 docInfo.metadata.rev_map = prev.rev_map; // used only by leveldb
4690 }
4691
4692 // recalculate
4693 var winningRev$$1 = winningRev(docInfo.metadata);
4694 var winningRevIsDeleted = isDeleted(docInfo.metadata, winningRev$$1);
4695
4696 // calculate the total number of documents that were added/removed,
4697 // from the perspective of total_rows/doc_count
4698 var delta = (previouslyDeleted === winningRevIsDeleted) ? 0 :
4699 previouslyDeleted < winningRevIsDeleted ? -1 : 1;
4700
4701 var newRevIsDeleted;
4702 if (newRev === winningRev$$1) {
4703 // if the new rev is the same as the winning rev, we can reuse that value
4704 newRevIsDeleted = winningRevIsDeleted;
4705 } else {
4706 // if they're not the same, then we need to recalculate
4707 newRevIsDeleted = isDeleted(docInfo.metadata, newRev);
4708 }
4709
4710 writeDoc(docInfo, winningRev$$1, winningRevIsDeleted, newRevIsDeleted,
4711 true, delta, i, cb);
4712}
4713
4714function rootIsMissing(docInfo) {
4715 return docInfo.metadata.rev_tree[0].ids[1].status === 'missing';
4716}
4717
4718function processDocs(revLimit, docInfos, api, fetchedDocs, tx, results,
4719 writeDoc, opts, overallCallback) {
4720
4721 // Default to 1000 locally
4722 revLimit = revLimit || 1000;
4723
4724 function insertDoc(docInfo, resultsIdx, callback) {
4725 // Cant insert new deleted documents
4726 var winningRev$$1 = winningRev(docInfo.metadata);
4727 var deleted = isDeleted(docInfo.metadata, winningRev$$1);
4728 if ('was_delete' in opts && deleted) {
4729 results[resultsIdx] = createError(MISSING_DOC, 'deleted');
4730 return callback();
4731 }
4732
4733 // 4712 - detect whether a new document was inserted with a _rev
4734 var inConflict = newEdits && rootIsMissing(docInfo);
4735
4736 if (inConflict) {
4737 var err = createError(REV_CONFLICT);
4738 results[resultsIdx] = err;
4739 return callback();
4740 }
4741
4742 var delta = deleted ? 0 : 1;
4743
4744 writeDoc(docInfo, winningRev$$1, deleted, deleted, false,
4745 delta, resultsIdx, callback);
4746 }
4747
4748 var newEdits = opts.new_edits;
4749 var idsToDocs = new Map();
4750
4751 var docsDone = 0;
4752 var docsToDo = docInfos.length;
4753
4754 function checkAllDocsDone() {
4755 if (++docsDone === docsToDo && overallCallback) {
4756 overallCallback();
4757 }
4758 }
4759
4760 docInfos.forEach(function (currentDoc, resultsIdx) {
4761
4762 if (currentDoc._id && isLocalId(currentDoc._id)) {
4763 var fun = currentDoc._deleted ? '_removeLocal' : '_putLocal';
4764 api[fun](currentDoc, {ctx: tx}, function (err, res) {
4765 results[resultsIdx] = err || res;
4766 checkAllDocsDone();
4767 });
4768 return;
4769 }
4770
4771 var id = currentDoc.metadata.id;
4772 if (idsToDocs.has(id)) {
4773 docsToDo--; // duplicate
4774 idsToDocs.get(id).push([currentDoc, resultsIdx]);
4775 } else {
4776 idsToDocs.set(id, [[currentDoc, resultsIdx]]);
4777 }
4778 });
4779
4780 // in the case of new_edits, the user can provide multiple docs
4781 // with the same id. these need to be processed sequentially
4782 idsToDocs.forEach(function (docs, id) {
4783 var numDone = 0;
4784
4785 function docWritten() {
4786 if (++numDone < docs.length) {
4787 nextDoc();
4788 } else {
4789 checkAllDocsDone();
4790 }
4791 }
4792 function nextDoc() {
4793 var value = docs[numDone];
4794 var currentDoc = value[0];
4795 var resultsIdx = value[1];
4796
4797 if (fetchedDocs.has(id)) {
4798 updateDoc(revLimit, fetchedDocs.get(id), currentDoc, results,
4799 resultsIdx, docWritten, writeDoc, newEdits);
4800 } else {
4801 // Ensure stemming applies to new writes as well
4802 var merged = merge([], currentDoc.metadata.rev_tree[0], revLimit);
4803 currentDoc.metadata.rev_tree = merged.tree;
4804 currentDoc.stemmedRevs = merged.stemmedRevs || [];
4805 insertDoc(currentDoc, resultsIdx, docWritten);
4806 }
4807 }
4808 nextDoc();
4809 });
4810}
4811
4812function safeJsonParse(str) {
4813 // This try/catch guards against stack overflow errors.
4814 // JSON.parse() is faster than vuvuzela.parse() but vuvuzela
4815 // cannot overflow.
4816 try {
4817 return JSON.parse(str);
4818 } catch (e) {
4819 /* istanbul ignore next */
4820 return vuvuzela.parse(str);
4821 }
4822}
4823
4824function safeJsonStringify(json) {
4825 try {
4826 return JSON.stringify(json);
4827 } catch (e) {
4828 /* istanbul ignore next */
4829 return vuvuzela.stringify(json);
4830 }
4831}
4832
4833function readAsBlobOrBuffer(storedObject, type) {
4834 // In Node, we've stored a buffer
4835 storedObject.type = type; // non-standard, but used for consistency
4836 return storedObject;
4837}
4838
4839// in Node, we store the buffer directly
4840function prepareAttachmentForStorage(attData, cb) {
4841 cb(attData);
4842}
4843
4844function createEmptyBlobOrBuffer(type) {
4845 return typedBuffer('', 'binary', type);
4846}
4847
4848// similar to an idb or websql transaction object
4849
4850function getCacheFor(transaction, store) {
4851 var prefix = store.prefix()[0];
4852 var cache = transaction._cache;
4853 var subCache = cache.get(prefix);
4854 if (!subCache) {
4855 subCache = new Map();
4856 cache.set(prefix, subCache);
4857 }
4858 return subCache;
4859}
4860
4861class LevelTransaction {
4862 constructor() {
4863 this._batch = [];
4864 this._cache = new Map();
4865 }
4866
4867 get(store, key, callback) {
4868 var cache = getCacheFor(this, store);
4869 var exists = cache.get(key);
4870 if (exists) {
4871 return nextTick(function () {
4872 callback(null, exists);
4873 });
4874 } else if (exists === null) { // deleted marker
4875 /* istanbul ignore next */
4876 return nextTick(function () {
4877 callback({name: 'NotFoundError'});
4878 });
4879 }
4880 store.get(key, function (err, res$$1) {
4881 if (err) {
4882 /* istanbul ignore else */
4883 if (err.name === 'NotFoundError') {
4884 cache.set(key, null);
4885 }
4886 return callback(err);
4887 }
4888 cache.set(key, res$$1);
4889 callback(null, res$$1);
4890 });
4891 }
4892
4893 batch(batch) {
4894 for (var i = 0, len = batch.length; i < len; i++) {
4895 var operation = batch[i];
4896
4897 var cache = getCacheFor(this, operation.prefix);
4898
4899 if (operation.type === 'put') {
4900 cache.set(operation.key, operation.value);
4901 } else {
4902 cache.set(operation.key, null);
4903 }
4904 }
4905 this._batch = this._batch.concat(batch);
4906 }
4907
4908 execute(db, callback) {
4909 var keys = new Set();
4910 var uniqBatches = [];
4911
4912 // remove duplicates; last one wins
4913 for (var i = this._batch.length - 1; i >= 0; i--) {
4914 var operation = this._batch[i];
4915 var lookupKey = operation.prefix.prefix()[0] + '\xff' + operation.key;
4916 if (keys.has(lookupKey)) {
4917 continue;
4918 }
4919 keys.add(lookupKey);
4920 uniqBatches.push(operation);
4921 }
4922
4923 db.batch(uniqBatches, callback);
4924 }
4925}
4926
4927var DOC_STORE = 'document-store';
4928var BY_SEQ_STORE = 'by-sequence';
4929var ATTACHMENT_STORE = 'attach-store';
4930var BINARY_STORE = 'attach-binary-store';
4931var LOCAL_STORE = 'local-store';
4932var META_STORE = 'meta-store';
4933
4934// leveldb barks if we try to open a db multiple times
4935// so we cache opened connections here for initstore()
4936var dbStores = new Map();
4937
4938// store the value of update_seq in the by-sequence store the key name will
4939// never conflict, since the keys in the by-sequence store are integers
4940var UPDATE_SEQ_KEY = '_local_last_update_seq';
4941var DOC_COUNT_KEY = '_local_doc_count';
4942var UUID_KEY = '_local_uuid';
4943
4944var MD5_PREFIX = 'md5-';
4945
4946var safeJsonEncoding = {
4947 encode: safeJsonStringify,
4948 decode: safeJsonParse,
4949 buffer: false,
4950 type: 'cheap-json'
4951};
4952
4953var levelChanges = new Changes();
4954
4955// winningRev and deleted are performance-killers, but
4956// in newer versions of PouchDB, they are cached on the metadata
4957function getWinningRev(metadata) {
4958 return 'winningRev' in metadata ?
4959 metadata.winningRev : winningRev(metadata);
4960}
4961
4962function getIsDeleted(metadata, winningRev$$1) {
4963 return 'deleted' in metadata ?
4964 metadata.deleted : isDeleted(metadata, winningRev$$1);
4965}
4966
4967function fetchAttachment(att, stores, opts) {
4968 var type = att.content_type;
4969 return new Promise(function (resolve, reject) {
4970 stores.binaryStore.get(att.digest, function (err, buffer) {
4971 var data;
4972 if (err) {
4973 /* istanbul ignore if */
4974 if (err.name !== 'NotFoundError') {
4975 return reject(err);
4976 } else {
4977 // empty
4978 if (!opts.binary) {
4979 data = '';
4980 } else {
4981 data = binStringToBluffer('', type);
4982 }
4983 }
4984 } else { // non-empty
4985 if (opts.binary) {
4986 data = readAsBlobOrBuffer(buffer, type);
4987 } else {
4988 data = buffer.toString('base64');
4989 }
4990 }
4991 delete att.stub;
4992 delete att.length;
4993 att.data = data;
4994 resolve();
4995 });
4996 });
4997}
4998
4999function fetchAttachments(results, stores, opts) {
5000 var atts = [];
5001 results.forEach(function (row) {
5002 if (!(row.doc && row.doc._attachments)) {
5003 return;
5004 }
5005 var attNames = Object.keys(row.doc._attachments);
5006 attNames.forEach(function (attName) {
5007 var att = row.doc._attachments[attName];
5008 if (!('data' in att)) {
5009 atts.push(att);
5010 }
5011 });
5012 });
5013
5014 return Promise.all(atts.map(function (att) {
5015 return fetchAttachment(att, stores, opts);
5016 }));
5017}
5018
5019function LevelPouch(opts, callback) {
5020 opts = clone(opts);
5021 var api = this;
5022 var instanceId;
5023 var stores = {};
5024 var revLimit = opts.revs_limit;
5025 var db;
5026 var name = opts.name;
5027 // TODO: this is undocumented and unused probably
5028 /* istanbul ignore else */
5029 if (typeof opts.createIfMissing === 'undefined') {
5030 opts.createIfMissing = true;
5031 }
5032
5033 var leveldown = opts.db;
5034
5035 var dbStore;
5036 var leveldownName = functionName(leveldown);
5037 if (dbStores.has(leveldownName)) {
5038 dbStore = dbStores.get(leveldownName);
5039 } else {
5040 dbStore = new Map();
5041 dbStores.set(leveldownName, dbStore);
5042 }
5043 if (dbStore.has(name)) {
5044 db = dbStore.get(name);
5045 afterDBCreated();
5046 } else {
5047 dbStore.set(name, sublevelPouch(levelup(leveldown(name), opts, function (err) {
5048 /* istanbul ignore if */
5049 if (err) {
5050 dbStore.delete(name);
5051 return callback(err);
5052 }
5053 db = dbStore.get(name);
5054 db._docCount = -1;
5055 db._queue = new Deque();
5056 /* istanbul ignore else */
5057 if (typeof opts.migrate === 'object') { // migration for leveldown
5058 opts.migrate.doMigrationOne(name, db, afterDBCreated);
5059 } else {
5060 afterDBCreated();
5061 }
5062 })));
5063 }
5064
5065 function afterDBCreated() {
5066 stores.docStore = db.sublevel(DOC_STORE, {valueEncoding: safeJsonEncoding});
5067 stores.bySeqStore = db.sublevel(BY_SEQ_STORE, {valueEncoding: 'json'});
5068 stores.attachmentStore =
5069 db.sublevel(ATTACHMENT_STORE, {valueEncoding: 'json'});
5070 stores.binaryStore = db.sublevel(BINARY_STORE, {valueEncoding: 'binary'});
5071 stores.localStore = db.sublevel(LOCAL_STORE, {valueEncoding: 'json'});
5072 stores.metaStore = db.sublevel(META_STORE, {valueEncoding: 'json'});
5073 /* istanbul ignore else */
5074 if (typeof opts.migrate === 'object') { // migration for leveldown
5075 opts.migrate.doMigrationTwo(db, stores, afterLastMigration);
5076 } else {
5077 afterLastMigration();
5078 }
5079 }
5080
5081 function afterLastMigration() {
5082 stores.metaStore.get(UPDATE_SEQ_KEY, function (err, value) {
5083 if (typeof db._updateSeq === 'undefined') {
5084 db._updateSeq = value || 0;
5085 }
5086 stores.metaStore.get(DOC_COUNT_KEY, function (err, value) {
5087 db._docCount = !err ? value : 0;
5088 stores.metaStore.get(UUID_KEY, function (err, value) {
5089 instanceId = !err ? value : uuid();
5090 stores.metaStore.put(UUID_KEY, instanceId, function () {
5091 nextTick(function () {
5092 callback(null, api);
5093 });
5094 });
5095 });
5096 });
5097 });
5098 }
5099
5100 function countDocs(callback) {
5101 /* istanbul ignore if */
5102 if (db.isClosed()) {
5103 return callback(new Error('database is closed'));
5104 }
5105 return callback(null, db._docCount); // use cached value
5106 }
5107
5108 api._remote = false;
5109 /* istanbul ignore next */
5110 api.type = function () {
5111 return 'leveldb';
5112 };
5113
5114 api._id = function (callback) {
5115 callback(null, instanceId);
5116 };
5117
5118 api._info = function (callback) {
5119 var res$$1 = {
5120 doc_count: db._docCount,
5121 update_seq: db._updateSeq,
5122 backend_adapter: functionName(leveldown)
5123 };
5124 return nextTick(function () {
5125 callback(null, res$$1);
5126 });
5127 };
5128
5129 function tryCode(fun, args) {
5130 try {
5131 fun.apply(null, args);
5132 } catch (err) {
5133 args[args.length - 1](err);
5134 }
5135 }
5136
5137 function executeNext() {
5138 var firstTask = db._queue.peekFront();
5139
5140 if (firstTask.type === 'read') {
5141 runReadOperation(firstTask);
5142 } else { // write, only do one at a time
5143 runWriteOperation(firstTask);
5144 }
5145 }
5146
5147 function runReadOperation(firstTask) {
5148 // do multiple reads at once simultaneously, because it's safe
5149
5150 var readTasks = [firstTask];
5151 var i = 1;
5152 var nextTask = db._queue.get(i);
5153 while (typeof nextTask !== 'undefined' && nextTask.type === 'read') {
5154 readTasks.push(nextTask);
5155 i++;
5156 nextTask = db._queue.get(i);
5157 }
5158
5159 var numDone = 0;
5160
5161 readTasks.forEach(function (readTask) {
5162 var args = readTask.args;
5163 var callback = args[args.length - 1];
5164 args[args.length - 1] = function (...cbArgs) {
5165 callback.apply(null, cbArgs);
5166 if (++numDone === readTasks.length) {
5167 nextTick(function () {
5168 // all read tasks have finished
5169 readTasks.forEach(function () {
5170 db._queue.shift();
5171 });
5172 if (db._queue.length) {
5173 executeNext();
5174 }
5175 });
5176 }
5177 };
5178 tryCode(readTask.fun, args);
5179 });
5180 }
5181
5182 function runWriteOperation(firstTask) {
5183 var args = firstTask.args;
5184 var callback = args[args.length - 1];
5185 args[args.length - 1] = function (...cbArgs) {
5186 callback.apply(null, cbArgs);
5187 nextTick(function () {
5188 db._queue.shift();
5189 if (db._queue.length) {
5190 executeNext();
5191 }
5192 });
5193 };
5194 tryCode(firstTask.fun, args);
5195 }
5196
5197 // all read/write operations to the database are done in a queue,
5198 // similar to how websql/idb works. this avoids problems such
5199 // as e.g. compaction needing to have a lock on the database while
5200 // it updates stuff. in the future we can revisit this.
5201 function writeLock(fun) {
5202 return function (...args) {
5203 db._queue.push({
5204 fun,
5205 args,
5206 type: 'write'
5207 });
5208
5209 if (db._queue.length === 1) {
5210 nextTick(executeNext);
5211 }
5212 };
5213 }
5214
5215 // same as the writelock, but multiple can run at once
5216 function readLock(fun) {
5217 return function (...args) {
5218 db._queue.push({
5219 fun,
5220 args,
5221 type: 'read'
5222 });
5223
5224 if (db._queue.length === 1) {
5225 nextTick(executeNext);
5226 }
5227 };
5228 }
5229
5230 function formatSeq(n) {
5231 return ('0000000000000000' + n).slice(-16);
5232 }
5233
5234 function parseSeq(s) {
5235 return parseInt(s, 10);
5236 }
5237
5238 api._get = readLock(function (id, opts, callback) {
5239 opts = clone(opts);
5240
5241 stores.docStore.get(id, function (err, metadata) {
5242
5243 if (err || !metadata) {
5244 return callback(createError(MISSING_DOC, 'missing'));
5245 }
5246
5247 var rev$$1;
5248 if (!opts.rev) {
5249 rev$$1 = getWinningRev(metadata);
5250 var deleted = getIsDeleted(metadata, rev$$1);
5251 if (deleted) {
5252 return callback(createError(MISSING_DOC, "deleted"));
5253 }
5254 } else {
5255 rev$$1 = opts.latest ? latest(opts.rev, metadata) : opts.rev;
5256 }
5257
5258 var seq = metadata.rev_map[rev$$1];
5259
5260 stores.bySeqStore.get(formatSeq(seq), function (err, doc) {
5261 if (!doc) {
5262 return callback(createError(MISSING_DOC));
5263 }
5264 /* istanbul ignore if */
5265 if ('_id' in doc && doc._id !== metadata.id) {
5266 // this failing implies something very wrong
5267 return callback(new Error('wrong doc returned'));
5268 }
5269 doc._id = metadata.id;
5270 if ('_rev' in doc) {
5271 /* istanbul ignore if */
5272 if (doc._rev !== rev$$1) {
5273 // this failing implies something very wrong
5274 return callback(new Error('wrong doc returned'));
5275 }
5276 } else {
5277 // we didn't always store this
5278 doc._rev = rev$$1;
5279 }
5280 return callback(null, {doc, metadata});
5281 });
5282 });
5283 });
5284
5285 // not technically part of the spec, but if putAttachment has its own
5286 // method...
5287 api._getAttachment = function (docId, attachId, attachment, opts, callback) {
5288 var digest = attachment.digest;
5289 var type = attachment.content_type;
5290
5291 stores.binaryStore.get(digest, function (err, attach) {
5292 if (err) {
5293 /* istanbul ignore if */
5294 if (err.name !== 'NotFoundError') {
5295 return callback(err);
5296 }
5297 // Empty attachment
5298 return callback(null, opts.binary ? createEmptyBlobOrBuffer(type) : '');
5299 }
5300
5301 if (opts.binary) {
5302 callback(null, readAsBlobOrBuffer(attach, type));
5303 } else {
5304 callback(null, attach.toString('base64'));
5305 }
5306 });
5307 };
5308
5309 api._bulkDocs = writeLock(function (req, opts, callback) {
5310 var newEdits = opts.new_edits;
5311 var results = new Array(req.docs.length);
5312 var fetchedDocs = new Map();
5313 var stemmedRevs = new Map();
5314
5315 var txn = new LevelTransaction();
5316 var docCountDelta = 0;
5317 var newUpdateSeq = db._updateSeq;
5318
5319 // parse the docs and give each a sequence number
5320 var userDocs = req.docs;
5321 var docInfos = userDocs.map(function (doc) {
5322 if (doc._id && isLocalId(doc._id)) {
5323 return doc;
5324 }
5325 var newDoc = parseDoc(doc, newEdits, api.__opts);
5326
5327 if (newDoc.metadata && !newDoc.metadata.rev_map) {
5328 newDoc.metadata.rev_map = {};
5329 }
5330
5331 return newDoc;
5332 });
5333 var infoErrors = docInfos.filter(function (doc) {
5334 return doc.error;
5335 });
5336
5337 if (infoErrors.length) {
5338 return callback(infoErrors[0]);
5339 }
5340
5341 // verify any stub attachments as a precondition test
5342
5343 function verifyAttachment(digest, callback) {
5344 txn.get(stores.attachmentStore, digest, function (levelErr) {
5345 if (levelErr) {
5346 var err = createError(MISSING_STUB,
5347 'unknown stub attachment with digest ' +
5348 digest);
5349 callback(err);
5350 } else {
5351 callback();
5352 }
5353 });
5354 }
5355
5356 function verifyAttachments(finish) {
5357 var digests = [];
5358 userDocs.forEach(function (doc) {
5359 if (doc && doc._attachments) {
5360 Object.keys(doc._attachments).forEach(function (filename) {
5361 var att = doc._attachments[filename];
5362 if (att.stub) {
5363 digests.push(att.digest);
5364 }
5365 });
5366 }
5367 });
5368 if (!digests.length) {
5369 return finish();
5370 }
5371 var numDone = 0;
5372 var err;
5373
5374 digests.forEach(function (digest) {
5375 verifyAttachment(digest, function (attErr) {
5376 if (attErr && !err) {
5377 err = attErr;
5378 }
5379
5380 if (++numDone === digests.length) {
5381 finish(err);
5382 }
5383 });
5384 });
5385 }
5386
5387 function fetchExistingDocs(finish) {
5388 var numDone = 0;
5389 var overallErr;
5390 function checkDone() {
5391 if (++numDone === userDocs.length) {
5392 return finish(overallErr);
5393 }
5394 }
5395
5396 userDocs.forEach(function (doc) {
5397 if (doc._id && isLocalId(doc._id)) {
5398 // skip local docs
5399 return checkDone();
5400 }
5401 txn.get(stores.docStore, doc._id, function (err, info) {
5402 if (err) {
5403 /* istanbul ignore if */
5404 if (err.name !== 'NotFoundError') {
5405 overallErr = err;
5406 }
5407 } else {
5408 fetchedDocs.set(doc._id, info);
5409 }
5410 checkDone();
5411 });
5412 });
5413 }
5414
5415 function compact(revsMap, callback) {
5416 var promise = Promise.resolve();
5417 revsMap.forEach(function (revs, docId) {
5418 // TODO: parallelize, for now need to be sequential to
5419 // pass orphaned attachment tests
5420 promise = promise.then(function () {
5421 return new Promise(function (resolve, reject) {
5422 api._doCompactionNoLock(docId, revs, {ctx: txn}, function (err) {
5423 /* istanbul ignore if */
5424 if (err) {
5425 return reject(err);
5426 }
5427 resolve();
5428 });
5429 });
5430 });
5431 });
5432
5433 promise.then(function () {
5434 callback();
5435 }, callback);
5436 }
5437
5438 function autoCompact(callback) {
5439 var revsMap = new Map();
5440 fetchedDocs.forEach(function (metadata, docId) {
5441 revsMap.set(docId, compactTree(metadata));
5442 });
5443 compact(revsMap, callback);
5444 }
5445
5446 function finish() {
5447 compact(stemmedRevs, function (error) {
5448 /* istanbul ignore if */
5449 if (error) {
5450 complete(error);
5451 }
5452 if (api.auto_compaction) {
5453 return autoCompact(complete);
5454 }
5455 complete();
5456 });
5457 }
5458
5459 function writeDoc(docInfo, winningRev$$1, winningRevIsDeleted, newRevIsDeleted,
5460 isUpdate, delta, resultsIdx, callback2) {
5461 docCountDelta += delta;
5462
5463 var err = null;
5464 var recv = 0;
5465
5466 docInfo.metadata.winningRev = winningRev$$1;
5467 docInfo.metadata.deleted = winningRevIsDeleted;
5468
5469 docInfo.data._id = docInfo.metadata.id;
5470 docInfo.data._rev = docInfo.metadata.rev;
5471
5472 if (newRevIsDeleted) {
5473 docInfo.data._deleted = true;
5474 }
5475
5476 if (docInfo.stemmedRevs.length) {
5477 stemmedRevs.set(docInfo.metadata.id, docInfo.stemmedRevs);
5478 }
5479
5480 var attachments = docInfo.data._attachments ?
5481 Object.keys(docInfo.data._attachments) :
5482 [];
5483
5484 function attachmentSaved(attachmentErr) {
5485 recv++;
5486 if (!err) {
5487 /* istanbul ignore if */
5488 if (attachmentErr) {
5489 err = attachmentErr;
5490 callback2(err);
5491 } else if (recv === attachments.length) {
5492 finish();
5493 }
5494 }
5495 }
5496
5497 function onMD5Load(doc, key, data, attachmentSaved) {
5498 return function (result) {
5499 saveAttachment(doc, MD5_PREFIX + result, key, data, attachmentSaved);
5500 };
5501 }
5502
5503 function doMD5(doc, key, attachmentSaved) {
5504 return function (data) {
5505 binaryMd5(data, onMD5Load(doc, key, data, attachmentSaved));
5506 };
5507 }
5508
5509 for (var i = 0; i < attachments.length; i++) {
5510 var key = attachments[i];
5511 var att = docInfo.data._attachments[key];
5512
5513 if (att.stub) {
5514 // still need to update the refs mapping
5515 var id = docInfo.data._id;
5516 var rev$$1 = docInfo.data._rev;
5517 saveAttachmentRefs(id, rev$$1, att.digest, attachmentSaved);
5518 continue;
5519 }
5520 var data;
5521 if (typeof att.data === 'string') {
5522 // input is assumed to be a base64 string
5523 try {
5524 data = thisAtob(att.data);
5525 } catch (e) {
5526 callback(createError(BAD_ARG,
5527 'Attachment is not a valid base64 string'));
5528 return;
5529 }
5530 doMD5(docInfo, key, attachmentSaved)(data);
5531 } else {
5532 prepareAttachmentForStorage(att.data,
5533 doMD5(docInfo, key, attachmentSaved));
5534 }
5535 }
5536
5537 function finish() {
5538 var seq = docInfo.metadata.rev_map[docInfo.metadata.rev];
5539 /* istanbul ignore if */
5540 if (seq) {
5541 // check that there aren't any existing revisions with the same
5542 // revision id, else we shouldn't do anything
5543 return callback2();
5544 }
5545 seq = ++newUpdateSeq;
5546 docInfo.metadata.rev_map[docInfo.metadata.rev] =
5547 docInfo.metadata.seq = seq;
5548 var seqKey = formatSeq(seq);
5549 var batch = [{
5550 key: seqKey,
5551 value: docInfo.data,
5552 prefix: stores.bySeqStore,
5553 type: 'put'
5554 }, {
5555 key: docInfo.metadata.id,
5556 value: docInfo.metadata,
5557 prefix: stores.docStore,
5558 type: 'put'
5559 }];
5560 txn.batch(batch);
5561 results[resultsIdx] = {
5562 ok: true,
5563 id: docInfo.metadata.id,
5564 rev: docInfo.metadata.rev
5565 };
5566 fetchedDocs.set(docInfo.metadata.id, docInfo.metadata);
5567 callback2();
5568 }
5569
5570 if (!attachments.length) {
5571 finish();
5572 }
5573 }
5574
5575 // attachments are queued per-digest, otherwise the refs could be
5576 // overwritten by concurrent writes in the same bulkDocs session
5577 var attachmentQueues = {};
5578
5579 function saveAttachmentRefs(id, rev$$1, digest, callback) {
5580
5581 function fetchAtt() {
5582 return new Promise(function (resolve, reject) {
5583 txn.get(stores.attachmentStore, digest, function (err, oldAtt) {
5584 /* istanbul ignore if */
5585 if (err && err.name !== 'NotFoundError') {
5586 return reject(err);
5587 }
5588 resolve(oldAtt);
5589 });
5590 });
5591 }
5592
5593 function saveAtt(oldAtt) {
5594 var ref = [id, rev$$1].join('@');
5595 var newAtt = {};
5596
5597 if (oldAtt) {
5598 if (oldAtt.refs) {
5599 // only update references if this attachment already has them
5600 // since we cannot migrate old style attachments here without
5601 // doing a full db scan for references
5602 newAtt.refs = oldAtt.refs;
5603 newAtt.refs[ref] = true;
5604 }
5605 } else {
5606 newAtt.refs = {};
5607 newAtt.refs[ref] = true;
5608 }
5609
5610 return new Promise(function (resolve) {
5611 txn.batch([{
5612 type: 'put',
5613 prefix: stores.attachmentStore,
5614 key: digest,
5615 value: newAtt
5616 }]);
5617 resolve(!oldAtt);
5618 });
5619 }
5620
5621 // put attachments in a per-digest queue, to avoid two docs with the same
5622 // attachment overwriting each other
5623 var queue = attachmentQueues[digest] || Promise.resolve();
5624 attachmentQueues[digest] = queue.then(function () {
5625 return fetchAtt().then(saveAtt).then(function (isNewAttachment) {
5626 callback(null, isNewAttachment);
5627 }, callback);
5628 });
5629 }
5630
5631 function saveAttachment(docInfo, digest, key, data, callback) {
5632 var att = docInfo.data._attachments[key];
5633 delete att.data;
5634 att.digest = digest;
5635 att.length = data.length;
5636 var id = docInfo.metadata.id;
5637 var rev$$1 = docInfo.metadata.rev;
5638 att.revpos = parseInt(rev$$1, 10);
5639
5640 saveAttachmentRefs(id, rev$$1, digest, function (err, isNewAttachment) {
5641 /* istanbul ignore if */
5642 if (err) {
5643 return callback(err);
5644 }
5645 // do not try to store empty attachments
5646 if (data.length === 0) {
5647 return callback(err);
5648 }
5649 if (!isNewAttachment) {
5650 // small optimization - don't bother writing it again
5651 return callback(err);
5652 }
5653 txn.batch([{
5654 type: 'put',
5655 prefix: stores.binaryStore,
5656 key: digest,
5657 value: Buffer.from(data, 'binary')
5658 }]);
5659 callback();
5660 });
5661 }
5662
5663 function complete(err) {
5664 /* istanbul ignore if */
5665 if (err) {
5666 return nextTick(function () {
5667 callback(err);
5668 });
5669 }
5670 txn.batch([
5671 {
5672 prefix: stores.metaStore,
5673 type: 'put',
5674 key: UPDATE_SEQ_KEY,
5675 value: newUpdateSeq
5676 },
5677 {
5678 prefix: stores.metaStore,
5679 type: 'put',
5680 key: DOC_COUNT_KEY,
5681 value: db._docCount + docCountDelta
5682 }
5683 ]);
5684 txn.execute(db, function (err) {
5685 /* istanbul ignore if */
5686 if (err) {
5687 return callback(err);
5688 }
5689 db._docCount += docCountDelta;
5690 db._updateSeq = newUpdateSeq;
5691 levelChanges.notify(name);
5692 nextTick(function () {
5693 callback(null, results);
5694 });
5695 });
5696 }
5697
5698 if (!docInfos.length) {
5699 return callback(null, []);
5700 }
5701
5702 verifyAttachments(function (err) {
5703 if (err) {
5704 return callback(err);
5705 }
5706 fetchExistingDocs(function (err) {
5707 /* istanbul ignore if */
5708 if (err) {
5709 return callback(err);
5710 }
5711 processDocs(revLimit, docInfos, api, fetchedDocs, txn, results,
5712 writeDoc, opts, finish);
5713 });
5714 });
5715 });
5716 api._allDocs = function (opts, callback) {
5717 if ('keys' in opts) {
5718 return allDocsKeysQuery(this, opts);
5719 }
5720 return readLock(function (opts, callback) {
5721 opts = clone(opts);
5722 countDocs(function (err, docCount) {
5723 /* istanbul ignore if */
5724 if (err) {
5725 return callback(err);
5726 }
5727 var readstreamOpts = {};
5728 var skip = opts.skip || 0;
5729 if (opts.startkey) {
5730 readstreamOpts.gte = opts.startkey;
5731 }
5732 if (opts.endkey) {
5733 readstreamOpts.lte = opts.endkey;
5734 }
5735 if (opts.key) {
5736 readstreamOpts.gte = readstreamOpts.lte = opts.key;
5737 }
5738 if (opts.descending) {
5739 readstreamOpts.reverse = true;
5740 // switch start and ends
5741 var tmp = readstreamOpts.lte;
5742 readstreamOpts.lte = readstreamOpts.gte;
5743 readstreamOpts.gte = tmp;
5744 }
5745 var limit;
5746 if (typeof opts.limit === 'number') {
5747 limit = opts.limit;
5748 }
5749 if (limit === 0 ||
5750 ('gte' in readstreamOpts && 'lte' in readstreamOpts &&
5751 readstreamOpts.gte > readstreamOpts.lte)) {
5752 // should return 0 results when start is greater than end.
5753 // normally level would "fix" this for us by reversing the order,
5754 // so short-circuit instead
5755 var returnVal = {
5756 total_rows: docCount,
5757 offset: opts.skip,
5758 rows: []
5759 };
5760 /* istanbul ignore if */
5761 if (opts.update_seq) {
5762 returnVal.update_seq = db._updateSeq;
5763 }
5764 return callback(null, returnVal);
5765 }
5766 var results = [];
5767 var docstream = stores.docStore.readStream(readstreamOpts);
5768
5769 var throughStream = obj(function (entry, _, next) {
5770 var metadata = entry.value;
5771 // winningRev and deleted are performance-killers, but
5772 // in newer versions of PouchDB, they are cached on the metadata
5773 var winningRev$$1 = getWinningRev(metadata);
5774 var deleted = getIsDeleted(metadata, winningRev$$1);
5775 if (!deleted) {
5776 if (skip-- > 0) {
5777 next();
5778 return;
5779 } else if (typeof limit === 'number' && limit-- <= 0) {
5780 docstream.unpipe();
5781 docstream.destroy();
5782 next();
5783 return;
5784 }
5785 } else if (opts.deleted !== 'ok') {
5786 next();
5787 return;
5788 }
5789 function allDocsInner(data) {
5790 var doc = {
5791 id: metadata.id,
5792 key: metadata.id,
5793 value: {
5794 rev: winningRev$$1
5795 }
5796 };
5797 if (opts.include_docs) {
5798 doc.doc = data;
5799 doc.doc._rev = doc.value.rev;
5800 if (opts.conflicts) {
5801 var conflicts = collectConflicts(metadata);
5802 if (conflicts.length) {
5803 doc.doc._conflicts = conflicts;
5804 }
5805 }
5806 for (var att in doc.doc._attachments) {
5807 if (Object.prototype.hasOwnProperty.call(doc.doc._attachments, att)) {
5808 doc.doc._attachments[att].stub = true;
5809 }
5810 }
5811 }
5812 if (opts.inclusive_end === false && metadata.id === opts.endkey) {
5813 return next();
5814 } else if (deleted) {
5815 if (opts.deleted === 'ok') {
5816 doc.value.deleted = true;
5817 doc.doc = null;
5818 } else {
5819 /* istanbul ignore next */
5820 return next();
5821 }
5822 }
5823 results.push(doc);
5824 next();
5825 }
5826 if (opts.include_docs) {
5827 var seq = metadata.rev_map[winningRev$$1];
5828 stores.bySeqStore.get(formatSeq(seq), function (err, data) {
5829 allDocsInner(data);
5830 });
5831 }
5832 else {
5833 allDocsInner();
5834 }
5835 }, function (next) {
5836 Promise.resolve().then(function () {
5837 if (opts.include_docs && opts.attachments) {
5838 return fetchAttachments(results, stores, opts);
5839 }
5840 }).then(function () {
5841 var returnVal = {
5842 total_rows: docCount,
5843 offset: opts.skip,
5844 rows: results
5845 };
5846
5847 /* istanbul ignore if */
5848 if (opts.update_seq) {
5849 returnVal.update_seq = db._updateSeq;
5850 }
5851 callback(null, returnVal);
5852 }, callback);
5853 next();
5854 }).on('unpipe', function () {
5855 throughStream.end();
5856 });
5857
5858 docstream.on('error', callback);
5859
5860 docstream.pipe(throughStream);
5861 });
5862 })(opts, callback);
5863 };
5864
5865 api._changes = function (opts) {
5866 opts = clone(opts);
5867
5868 if (opts.continuous) {
5869 var id = name + ':' + uuid();
5870 levelChanges.addListener(name, id, api, opts);
5871 levelChanges.notify(name);
5872 return {
5873 cancel: function () {
5874 levelChanges.removeListener(name, id);
5875 }
5876 };
5877 }
5878
5879 var descending = opts.descending;
5880 var results = [];
5881 var lastSeq = opts.since || 0;
5882 var called = 0;
5883 var streamOpts = {
5884 reverse: descending
5885 };
5886 var limit;
5887 if ('limit' in opts && opts.limit > 0) {
5888 limit = opts.limit;
5889 }
5890 if (!streamOpts.reverse) {
5891 streamOpts.start = formatSeq(opts.since || 0);
5892 }
5893
5894 var docIds = opts.doc_ids && new Set(opts.doc_ids);
5895 var filter = filterChange(opts);
5896 var docIdsToMetadata = new Map();
5897
5898 function complete() {
5899 opts.done = true;
5900 if (opts.return_docs && opts.limit) {
5901 /* istanbul ignore if */
5902 if (opts.limit < results.length) {
5903 results.length = opts.limit;
5904 }
5905 }
5906 changeStream.unpipe(throughStream);
5907 changeStream.destroy();
5908 if (!opts.continuous && !opts.cancelled) {
5909 if (opts.include_docs && opts.attachments && opts.return_docs) {
5910 fetchAttachments(results, stores, opts).then(function () {
5911 opts.complete(null, {results, last_seq: lastSeq});
5912 });
5913 } else {
5914 opts.complete(null, {results, last_seq: lastSeq});
5915 }
5916 }
5917 }
5918 var changeStream = stores.bySeqStore.readStream(streamOpts);
5919 var throughStream = obj(function (data, _, next) {
5920 if (limit && called >= limit) {
5921 complete();
5922 return next();
5923 }
5924 if (opts.cancelled || opts.done) {
5925 return next();
5926 }
5927
5928 var seq = parseSeq(data.key);
5929 var doc = data.value;
5930
5931 if (seq === opts.since && !descending) {
5932 // couchdb ignores `since` if descending=true
5933 return next();
5934 }
5935
5936 if (docIds && !docIds.has(doc._id)) {
5937 return next();
5938 }
5939
5940 var metadata;
5941
5942 function onGetMetadata(metadata) {
5943 var winningRev$$1 = getWinningRev(metadata);
5944
5945 function onGetWinningDoc(winningDoc) {
5946
5947 var change = opts.processChange(winningDoc, metadata, opts);
5948 change.seq = metadata.seq;
5949
5950 var filtered = filter(change);
5951 if (typeof filtered === 'object') {
5952 return opts.complete(filtered);
5953 }
5954
5955 if (filtered) {
5956 called++;
5957
5958 if (opts.attachments && opts.include_docs) {
5959 // fetch attachment immediately for the benefit
5960 // of live listeners
5961 fetchAttachments([change], stores, opts).then(function () {
5962 opts.onChange(change);
5963 });
5964 } else {
5965 opts.onChange(change);
5966 }
5967
5968 if (opts.return_docs) {
5969 results.push(change);
5970 }
5971 }
5972 next();
5973 }
5974
5975 if (metadata.seq !== seq) {
5976 // some other seq is later
5977 return next();
5978 }
5979
5980 lastSeq = seq;
5981
5982 if (winningRev$$1 === doc._rev) {
5983 return onGetWinningDoc(doc);
5984 }
5985
5986 // fetch the winner
5987
5988 var winningSeq = metadata.rev_map[winningRev$$1];
5989
5990 stores.bySeqStore.get(formatSeq(winningSeq), function (err, doc) {
5991 onGetWinningDoc(doc);
5992 });
5993 }
5994
5995 metadata = docIdsToMetadata.get(doc._id);
5996 if (metadata) { // cached
5997 return onGetMetadata(metadata);
5998 }
5999 // metadata not cached, have to go fetch it
6000 stores.docStore.get(doc._id, function (err, metadata) {
6001 /* istanbul ignore if */
6002 if (opts.cancelled || opts.done || db.isClosed() ||
6003 isLocalId(metadata.id)) {
6004 return next();
6005 }
6006 docIdsToMetadata.set(doc._id, metadata);
6007 onGetMetadata(metadata);
6008 });
6009 }, function (next) {
6010 if (opts.cancelled) {
6011 return next();
6012 }
6013 if (opts.return_docs && opts.limit) {
6014 /* istanbul ignore if */
6015 if (opts.limit < results.length) {
6016 results.length = opts.limit;
6017 }
6018 }
6019
6020 next();
6021 }).on('unpipe', function () {
6022 throughStream.end();
6023 complete();
6024 });
6025 changeStream.pipe(throughStream);
6026 return {
6027 cancel: function () {
6028 opts.cancelled = true;
6029 complete();
6030 }
6031 };
6032 };
6033
6034 api._close = function (callback) {
6035 /* istanbul ignore if */
6036 if (db.isClosed()) {
6037 return callback(createError(NOT_OPEN));
6038 }
6039 db.close(function (err) {
6040 /* istanbul ignore if */
6041 if (err) {
6042 callback(err);
6043 } else {
6044 dbStore.delete(name);
6045
6046 var adapterName = functionName(leveldown);
6047 var adapterStore = dbStores.get(adapterName);
6048 var viewNamePrefix = PouchDB.prefix + name + "-mrview-";
6049 var keys = [...adapterStore.keys()].filter(k => k.includes(viewNamePrefix));
6050 keys.forEach(key => {
6051 var eventEmitter = adapterStore.get(key);
6052 eventEmitter.removeAllListeners();
6053 eventEmitter.close();
6054 adapterStore.delete(key);
6055 });
6056
6057 callback();
6058 }
6059 });
6060 };
6061
6062 api._getRevisionTree = function (docId, callback) {
6063 stores.docStore.get(docId, function (err, metadata) {
6064 if (err) {
6065 callback(createError(MISSING_DOC));
6066 } else {
6067 callback(null, metadata.rev_tree);
6068 }
6069 });
6070 };
6071
6072 api._doCompaction = writeLock(function (docId, revs, opts, callback) {
6073 api._doCompactionNoLock(docId, revs, opts, callback);
6074 });
6075
6076 // the NoLock version is for use by bulkDocs
6077 api._doCompactionNoLock = function (docId, revs, opts, callback) {
6078 if (typeof opts === 'function') {
6079 callback = opts;
6080 opts = {};
6081 }
6082
6083 if (!revs.length) {
6084 return callback();
6085 }
6086 var txn = opts.ctx || new LevelTransaction();
6087
6088 txn.get(stores.docStore, docId, function (err, metadata) {
6089 /* istanbul ignore if */
6090 if (err) {
6091 return callback(err);
6092 }
6093 var seqs = revs.map(function (rev$$1) {
6094 var seq = metadata.rev_map[rev$$1];
6095 delete metadata.rev_map[rev$$1];
6096 return seq;
6097 });
6098 traverseRevTree(metadata.rev_tree, function (isLeaf, pos,
6099 revHash, ctx, opts) {
6100 var rev$$1 = pos + '-' + revHash;
6101 if (revs.indexOf(rev$$1) !== -1) {
6102 opts.status = 'missing';
6103 }
6104 });
6105
6106 var batch = [];
6107 batch.push({
6108 key: metadata.id,
6109 value: metadata,
6110 type: 'put',
6111 prefix: stores.docStore
6112 });
6113
6114 var digestMap = {};
6115 var numDone = 0;
6116 var overallErr;
6117 function checkDone(err) {
6118 /* istanbul ignore if */
6119 if (err) {
6120 overallErr = err;
6121 }
6122 if (++numDone === revs.length) { // done
6123 /* istanbul ignore if */
6124 if (overallErr) {
6125 return callback(overallErr);
6126 }
6127 deleteOrphanedAttachments();
6128 }
6129 }
6130
6131 function finish(err) {
6132 /* istanbul ignore if */
6133 if (err) {
6134 return callback(err);
6135 }
6136 txn.batch(batch);
6137 if (opts.ctx) {
6138 // don't execute immediately
6139 return callback();
6140 }
6141 txn.execute(db, callback);
6142 }
6143
6144 function deleteOrphanedAttachments() {
6145 var possiblyOrphanedAttachments = Object.keys(digestMap);
6146 if (!possiblyOrphanedAttachments.length) {
6147 return finish();
6148 }
6149 var numDone = 0;
6150 var overallErr;
6151 function checkDone(err) {
6152 /* istanbul ignore if */
6153 if (err) {
6154 overallErr = err;
6155 }
6156 if (++numDone === possiblyOrphanedAttachments.length) {
6157 finish(overallErr);
6158 }
6159 }
6160 var refsToDelete = new Map();
6161 revs.forEach(function (rev$$1) {
6162 refsToDelete.set(docId + '@' + rev$$1, true);
6163 });
6164 possiblyOrphanedAttachments.forEach(function (digest) {
6165 txn.get(stores.attachmentStore, digest, function (err, attData) {
6166 /* istanbul ignore if */
6167 if (err) {
6168 if (err.name === 'NotFoundError') {
6169 return checkDone();
6170 } else {
6171 return checkDone(err);
6172 }
6173 }
6174 var refs = Object.keys(attData.refs || {}).filter(function (ref) {
6175 return !refsToDelete.has(ref);
6176 });
6177 var newRefs = {};
6178 refs.forEach(function (ref) {
6179 newRefs[ref] = true;
6180 });
6181 if (refs.length) { // not orphaned
6182 batch.push({
6183 key: digest,
6184 type: 'put',
6185 value: {refs: newRefs},
6186 prefix: stores.attachmentStore
6187 });
6188 } else { // orphaned, can safely delete
6189 batch = batch.concat([{
6190 key: digest,
6191 type: 'del',
6192 prefix: stores.attachmentStore
6193 }, {
6194 key: digest,
6195 type: 'del',
6196 prefix: stores.binaryStore
6197 }]);
6198 }
6199 checkDone();
6200 });
6201 });
6202 }
6203
6204 seqs.forEach(function (seq) {
6205 batch.push({
6206 key: formatSeq(seq),
6207 type: 'del',
6208 prefix: stores.bySeqStore
6209 });
6210 txn.get(stores.bySeqStore, formatSeq(seq), function (err, doc) {
6211 /* istanbul ignore if */
6212 if (err) {
6213 if (err.name === 'NotFoundError') {
6214 return checkDone();
6215 } else {
6216 return checkDone(err);
6217 }
6218 }
6219 var atts = Object.keys(doc._attachments || {});
6220 atts.forEach(function (attName) {
6221 var digest = doc._attachments[attName].digest;
6222 digestMap[digest] = true;
6223 });
6224 checkDone();
6225 });
6226 });
6227 });
6228 };
6229
6230 api._getLocal = function (id, callback) {
6231 stores.localStore.get(id, function (err, doc) {
6232 if (err) {
6233 callback(createError(MISSING_DOC));
6234 } else {
6235 callback(null, doc);
6236 }
6237 });
6238 };
6239
6240 api._putLocal = function (doc, opts, callback) {
6241 if (typeof opts === 'function') {
6242 callback = opts;
6243 opts = {};
6244 }
6245 if (opts.ctx) {
6246 api._putLocalNoLock(doc, opts, callback);
6247 } else {
6248 api._putLocalWithLock(doc, opts, callback);
6249 }
6250 };
6251
6252 api._putLocalWithLock = writeLock(function (doc, opts, callback) {
6253 api._putLocalNoLock(doc, opts, callback);
6254 });
6255
6256 // the NoLock version is for use by bulkDocs
6257 api._putLocalNoLock = function (doc, opts, callback) {
6258 delete doc._revisions; // ignore this, trust the rev
6259 var oldRev = doc._rev;
6260 var id = doc._id;
6261
6262 var txn = opts.ctx || new LevelTransaction();
6263
6264 txn.get(stores.localStore, id, function (err, resp) {
6265 if (err && oldRev) {
6266 return callback(createError(REV_CONFLICT));
6267 }
6268 if (resp && resp._rev !== oldRev) {
6269 return callback(createError(REV_CONFLICT));
6270 }
6271 doc._rev =
6272 oldRev ? '0-' + (parseInt(oldRev.split('-')[1], 10) + 1) : '0-1';
6273 var batch = [
6274 {
6275 type: 'put',
6276 prefix: stores.localStore,
6277 key: id,
6278 value: doc
6279 }
6280 ];
6281
6282 txn.batch(batch);
6283 var ret = {ok: true, id: doc._id, rev: doc._rev};
6284
6285 if (opts.ctx) {
6286 // don't execute immediately
6287 return callback(null, ret);
6288 }
6289 txn.execute(db, function (err) {
6290 /* istanbul ignore if */
6291 if (err) {
6292 return callback(err);
6293 }
6294 callback(null, ret);
6295 });
6296 });
6297 };
6298
6299 api._removeLocal = function (doc, opts, callback) {
6300 if (typeof opts === 'function') {
6301 callback = opts;
6302 opts = {};
6303 }
6304 if (opts.ctx) {
6305 api._removeLocalNoLock(doc, opts, callback);
6306 } else {
6307 api._removeLocalWithLock(doc, opts, callback);
6308 }
6309 };
6310
6311 api._removeLocalWithLock = writeLock(function (doc, opts, callback) {
6312 api._removeLocalNoLock(doc, opts, callback);
6313 });
6314
6315 // the NoLock version is for use by bulkDocs
6316 api._removeLocalNoLock = function (doc, opts, callback) {
6317 var txn = opts.ctx || new LevelTransaction();
6318 txn.get(stores.localStore, doc._id, function (err, resp) {
6319 if (err) {
6320 /* istanbul ignore if */
6321 if (err.name !== 'NotFoundError') {
6322 return callback(err);
6323 } else {
6324 return callback(createError(MISSING_DOC));
6325 }
6326 }
6327 if (resp._rev !== doc._rev) {
6328 return callback(createError(REV_CONFLICT));
6329 }
6330 txn.batch([{
6331 prefix: stores.localStore,
6332 type: 'del',
6333 key: doc._id
6334 }]);
6335 var ret = {ok: true, id: doc._id, rev: '0-0'};
6336 if (opts.ctx) {
6337 // don't execute immediately
6338 return callback(null, ret);
6339 }
6340 txn.execute(db, function (err) {
6341 /* istanbul ignore if */
6342 if (err) {
6343 return callback(err);
6344 }
6345 callback(null, ret);
6346 });
6347 });
6348 };
6349
6350 // close and delete open leveldb stores
6351 api._destroy = function (opts, callback) {
6352 var dbStore;
6353 var leveldownName = functionName(leveldown);
6354 /* istanbul ignore else */
6355 if (dbStores.has(leveldownName)) {
6356 dbStore = dbStores.get(leveldownName);
6357 } else {
6358 return callDestroy(name, callback);
6359 }
6360
6361 /* istanbul ignore else */
6362 if (dbStore.has(name)) {
6363 levelChanges.removeAllListeners(name);
6364
6365 dbStore.get(name).close(function () {
6366 dbStore.delete(name);
6367 callDestroy(name, callback);
6368 });
6369 } else {
6370 callDestroy(name, callback);
6371 }
6372 };
6373 function callDestroy(name, cb) {
6374 // May not exist if leveldown is backed by memory adapter
6375 /* istanbul ignore else */
6376 if ('destroy' in leveldown) {
6377 leveldown.destroy(name, cb);
6378 } else {
6379 cb(null);
6380 }
6381 }
6382}
6383
6384// require leveldown. provide verbose output on error as it is the default
6385// nodejs adapter, which we do not provide for the user
6386/* istanbul ignore next */
6387var requireLeveldown = function () {
6388 try {
6389 return require('leveldown');
6390 } catch (err) {
6391 /* eslint no-ex-assign: 0*/
6392 err = err || 'leveldown import error';
6393 if (err.code === 'MODULE_NOT_FOUND') {
6394 // handle leveldown not installed case
6395 return new Error([
6396 'the \'leveldown\' package is not available. install it, or,',
6397 'specify another storage backend using the \'db\' option'
6398 ].join(' '));
6399 } else if (err.message && err.message.match('Module version mismatch')) {
6400 // handle common user environment error
6401 return new Error([
6402 err.message,
6403 'This generally implies that leveldown was built with a different',
6404 'version of node than that which is running now. You may try',
6405 'fully removing and reinstalling PouchDB or leveldown to resolve.'
6406 ].join(' '));
6407 }
6408 // handle general internal nodejs require error
6409 return new Error(err.toString() + ': unable to import leveldown');
6410 }
6411};
6412
6413var stores = [
6414 'document-store',
6415 'by-sequence',
6416 'attach-store',
6417 'attach-binary-store'
6418];
6419function formatSeq(n) {
6420 return ('0000000000000000' + n).slice(-16);
6421}
6422var UPDATE_SEQ_KEY$1 = '_local_last_update_seq';
6423var DOC_COUNT_KEY$1 = '_local_doc_count';
6424var UUID_KEY$1 = '_local_uuid';
6425
6426var doMigrationOne = function (name, db, callback) {
6427 // local require to prevent crashing if leveldown isn't installed.
6428 var leveldown = require("leveldown");
6429
6430 var base = path.resolve(name);
6431 function move(store, index, cb) {
6432 var storePath = path.join(base, store);
6433 var opts;
6434 if (index === 3) {
6435 opts = {
6436 valueEncoding: 'binary'
6437 };
6438 } else {
6439 opts = {
6440 valueEncoding: 'json'
6441 };
6442 }
6443 var sub = db.sublevel(store, opts);
6444 var orig = level(storePath, opts);
6445 var from = orig.createReadStream();
6446 var writeStream = new LevelWriteStream(sub);
6447 var to = writeStream();
6448 from.on('end', function () {
6449 orig.close(function (err) {
6450 cb(err, storePath);
6451 });
6452 });
6453 from.pipe(to);
6454 }
6455 fs.unlink(base + '.uuid', function (err) {
6456 if (err) {
6457 return callback();
6458 }
6459 var todo = 4;
6460 var done = [];
6461 stores.forEach(function (store, i) {
6462 move(store, i, function (err, storePath) {
6463 /* istanbul ignore if */
6464 if (err) {
6465 return callback(err);
6466 }
6467 done.push(storePath);
6468 if (!(--todo)) {
6469 done.forEach(function (item) {
6470 leveldown.destroy(item, function () {
6471 if (++todo === done.length) {
6472 fs.rmdir(base, callback);
6473 }
6474 });
6475 });
6476 }
6477 });
6478 });
6479 });
6480};
6481var doMigrationTwo = function (db, stores, callback) {
6482 var batches = [];
6483 stores.bySeqStore.get(UUID_KEY$1, function (err, value) {
6484 if (err) {
6485 // no uuid key, so don't need to migrate;
6486 return callback();
6487 }
6488 batches.push({
6489 key: UUID_KEY$1,
6490 value,
6491 prefix: stores.metaStore,
6492 type: 'put',
6493 valueEncoding: 'json'
6494 });
6495 batches.push({
6496 key: UUID_KEY$1,
6497 prefix: stores.bySeqStore,
6498 type: 'del'
6499 });
6500 stores.bySeqStore.get(DOC_COUNT_KEY$1, function (err, value) {
6501 if (value) {
6502 // if no doc count key,
6503 // just skip
6504 // we can live with this
6505 batches.push({
6506 key: DOC_COUNT_KEY$1,
6507 value,
6508 prefix: stores.metaStore,
6509 type: 'put',
6510 valueEncoding: 'json'
6511 });
6512 batches.push({
6513 key: DOC_COUNT_KEY$1,
6514 prefix: stores.bySeqStore,
6515 type: 'del'
6516 });
6517 }
6518 stores.bySeqStore.get(UPDATE_SEQ_KEY$1, function (err, value) {
6519 if (value) {
6520 // if no UPDATE_SEQ_KEY
6521 // just skip
6522 // we've gone to far to stop.
6523 batches.push({
6524 key: UPDATE_SEQ_KEY$1,
6525 value,
6526 prefix: stores.metaStore,
6527 type: 'put',
6528 valueEncoding: 'json'
6529 });
6530 batches.push({
6531 key: UPDATE_SEQ_KEY$1,
6532 prefix: stores.bySeqStore,
6533 type: 'del'
6534 });
6535 }
6536 var deletedSeqs = {};
6537 stores.docStore.createReadStream({
6538 startKey: '_',
6539 endKey: '_\xFF'
6540 }).pipe(obj(function (ch, _, next) {
6541 if (!isLocalId(ch.key)) {
6542 return next();
6543 }
6544 batches.push({
6545 key: ch.key,
6546 prefix: stores.docStore,
6547 type: 'del'
6548 });
6549 var winner = winningRev(ch.value);
6550 Object.keys(ch.value.rev_map).forEach(function (key) {
6551 if (key !== 'winner') {
6552 this.push(formatSeq(ch.value.rev_map[key]));
6553 }
6554 }, this);
6555 var winningSeq = ch.value.rev_map[winner];
6556 stores.bySeqStore.get(formatSeq(winningSeq), function (err, value) {
6557 if (!err) {
6558 batches.push({
6559 key: ch.key,
6560 value,
6561 prefix: stores.localStore,
6562 type: 'put',
6563 valueEncoding: 'json'
6564 });
6565 }
6566 next();
6567 });
6568
6569 })).pipe(obj(function (seq, _, next) {
6570 /* istanbul ignore if */
6571 if (deletedSeqs[seq]) {
6572 return next();
6573 }
6574 deletedSeqs[seq] = true;
6575 stores.bySeqStore.get(seq, function (err, resp) {
6576 /* istanbul ignore if */
6577 if (err || !isLocalId(resp._id)) {
6578 return next();
6579 }
6580 batches.push({
6581 key: seq,
6582 prefix: stores.bySeqStore,
6583 type: 'del'
6584 });
6585 next();
6586 });
6587 }, function () {
6588 db.batch(batches, callback);
6589 }));
6590 });
6591 });
6592 });
6593
6594};
6595
6596var migrate = {
6597 doMigrationOne,
6598 doMigrationTwo
6599};
6600
6601function LevelDownPouch(opts, callback) {
6602
6603 // Users can pass in their own leveldown alternative here, in which case
6604 // it overrides the default one. (This is in addition to the custom builds.)
6605 var leveldown = opts.db;
6606
6607 /* istanbul ignore else */
6608 if (!leveldown) {
6609 leveldown = requireLeveldown();
6610
6611 /* istanbul ignore if */
6612 if (leveldown instanceof Error) {
6613 return callback(leveldown);
6614 }
6615 }
6616
6617 var _opts = Object.assign({
6618 db: leveldown,
6619 migrate
6620 }, opts);
6621
6622 LevelPouch.call(this, _opts, callback);
6623}
6624
6625// overrides for normal LevelDB behavior on Node
6626LevelDownPouch.valid = function () {
6627 return true;
6628};
6629LevelDownPouch.use_prefix = false;
6630
6631function LevelPouch$1 (PouchDB) {
6632 PouchDB.adapter('leveldb', LevelDownPouch, true);
6633}
6634
6635// dead simple promise pool, inspired by https://github.com/timdp/es6-promise-pool
6636// but much smaller in code size. limits the number of concurrent promises that are executed
6637
6638
6639function pool(promiseFactories, limit) {
6640 return new Promise(function (resolve, reject) {
6641 var running = 0;
6642 var current = 0;
6643 var done = 0;
6644 var len = promiseFactories.length;
6645 var err;
6646
6647 function runNext() {
6648 running++;
6649 promiseFactories[current++]().then(onSuccess, onError);
6650 }
6651
6652 function doNext() {
6653 if (++done === len) {
6654 /* istanbul ignore if */
6655 if (err) {
6656 reject(err);
6657 } else {
6658 resolve();
6659 }
6660 } else {
6661 runNextBatch();
6662 }
6663 }
6664
6665 function onSuccess() {
6666 running--;
6667 doNext();
6668 }
6669
6670 /* istanbul ignore next */
6671 function onError(thisErr) {
6672 running--;
6673 err = err || thisErr;
6674 doNext();
6675 }
6676
6677 function runNextBatch() {
6678 while (running < limit && current < len) {
6679 runNext();
6680 }
6681 }
6682
6683 runNextBatch();
6684 });
6685}
6686
6687const CHANGES_BATCH_SIZE = 25;
6688const MAX_SIMULTANEOUS_REVS = 50;
6689const CHANGES_TIMEOUT_BUFFER = 5000;
6690const DEFAULT_HEARTBEAT = 10000;
6691
6692const supportsBulkGetMap = {};
6693
6694function readAttachmentsAsBlobOrBuffer(row) {
6695 const doc = row.doc || row.ok;
6696 const atts = doc && doc._attachments;
6697 if (!atts) {
6698 return;
6699 }
6700 Object.keys(atts).forEach(function (filename) {
6701 const att = atts[filename];
6702 att.data = b64ToBluffer(att.data, att.content_type);
6703 });
6704}
6705
6706function encodeDocId(id) {
6707 if (/^_design/.test(id)) {
6708 return '_design/' + encodeURIComponent(id.slice(8));
6709 }
6710 if (id.startsWith('_local/')) {
6711 return '_local/' + encodeURIComponent(id.slice(7));
6712 }
6713 return encodeURIComponent(id);
6714}
6715
6716function preprocessAttachments$1(doc) {
6717 if (!doc._attachments || !Object.keys(doc._attachments)) {
6718 return Promise.resolve();
6719 }
6720
6721 return Promise.all(Object.keys(doc._attachments).map(function (key) {
6722 const attachment = doc._attachments[key];
6723 if (attachment.data && typeof attachment.data !== 'string') {
6724 return new Promise(function (resolve) {
6725 blobToBase64(attachment.data, resolve);
6726 }).then(function (b64) {
6727 attachment.data = b64;
6728 });
6729 }
6730 }));
6731}
6732
6733function hasUrlPrefix(opts) {
6734 if (!opts.prefix) {
6735 return false;
6736 }
6737 const protocol = parseUri(opts.prefix).protocol;
6738 return protocol === 'http' || protocol === 'https';
6739}
6740
6741// Get all the information you possibly can about the URI given by name and
6742// return it as a suitable object.
6743function getHost(name, opts) {
6744 // encode db name if opts.prefix is a url (#5574)
6745 if (hasUrlPrefix(opts)) {
6746 const dbName = opts.name.substr(opts.prefix.length);
6747 // Ensure prefix has a trailing slash
6748 const prefix = opts.prefix.replace(/\/?$/, '/');
6749 name = prefix + encodeURIComponent(dbName);
6750 }
6751
6752 const uri = parseUri(name);
6753 if (uri.user || uri.password) {
6754 uri.auth = {username: uri.user, password: uri.password};
6755 }
6756
6757 // Split the path part of the URI into parts using '/' as the delimiter
6758 // after removing any leading '/' and any trailing '/'
6759 const parts = uri.path.replace(/(^\/|\/$)/g, '').split('/');
6760
6761 uri.db = parts.pop();
6762 // Prevent double encoding of URI component
6763 if (uri.db.indexOf('%') === -1) {
6764 uri.db = encodeURIComponent(uri.db);
6765 }
6766
6767 uri.path = parts.join('/');
6768
6769 return uri;
6770}
6771
6772// Generate a URL with the host data given by opts and the given path
6773function genDBUrl(opts, path$$1) {
6774 return genUrl(opts, opts.db + '/' + path$$1);
6775}
6776
6777// Generate a URL with the host data given by opts and the given path
6778function genUrl(opts, path$$1) {
6779 // If the host already has a path, then we need to have a path delimiter
6780 // Otherwise, the path delimiter is the empty string
6781 const pathDel = !opts.path ? '' : '/';
6782
6783 // If the host already has a path, then we need to have a path delimiter
6784 // Otherwise, the path delimiter is the empty string
6785 return opts.protocol + '://' + opts.host +
6786 (opts.port ? (':' + opts.port) : '') +
6787 '/' + opts.path + pathDel + path$$1;
6788}
6789
6790function paramsToStr(params) {
6791 const paramKeys = Object.keys(params);
6792 if (paramKeys.length === 0) {
6793 return '';
6794 }
6795
6796 return '?' + paramKeys.map(key => key + '=' + encodeURIComponent(params[key])).join('&');
6797}
6798
6799function shouldCacheBust(opts) {
6800 const ua = (typeof navigator !== 'undefined' && navigator.userAgent) ?
6801 navigator.userAgent.toLowerCase() : '';
6802 const isIE = ua.indexOf('msie') !== -1;
6803 const isTrident = ua.indexOf('trident') !== -1;
6804 const isEdge = ua.indexOf('edge') !== -1;
6805 const isGET = !('method' in opts) || opts.method === 'GET';
6806 return (isIE || isTrident || isEdge) && isGET;
6807}
6808
6809// Implements the PouchDB API for dealing with CouchDB instances over HTTP
6810function HttpPouch(opts, callback) {
6811
6812 // The functions that will be publicly available for HttpPouch
6813 const api = this;
6814
6815 const host = getHost(opts.name, opts);
6816 const dbUrl = genDBUrl(host, '');
6817
6818 opts = clone(opts);
6819
6820 const ourFetch = async function (url, options) {
6821
6822 options = options || {};
6823 options.headers = options.headers || new Headers();
6824
6825 options.credentials = 'include';
6826
6827 if (opts.auth || host.auth) {
6828 const nAuth = opts.auth || host.auth;
6829 const str = nAuth.username + ':' + nAuth.password;
6830 const token = thisBtoa(unescape(encodeURIComponent(str)));
6831 options.headers.set('Authorization', 'Basic ' + token);
6832 }
6833
6834 const headers = opts.headers || {};
6835 Object.keys(headers).forEach(function (key) {
6836 options.headers.append(key, headers[key]);
6837 });
6838
6839 /* istanbul ignore if */
6840 if (shouldCacheBust(options)) {
6841 url += (url.indexOf('?') === -1 ? '?' : '&') + '_nonce=' + Date.now();
6842 }
6843
6844 const fetchFun = opts.fetch || fetch;
6845 return await fetchFun(url, options);
6846 };
6847
6848 function adapterFun$$1(name, fun) {
6849 return adapterFun(name, function (...args) {
6850 setup().then(function () {
6851 return fun.apply(this, args);
6852 }).catch(function (e) {
6853 const callback = args.pop();
6854 callback(e);
6855 });
6856 }).bind(api);
6857 }
6858
6859 async function fetchJSON(url, options) {
6860
6861 const result = {};
6862
6863 options = options || {};
6864 options.headers = options.headers || new Headers();
6865
6866 if (!options.headers.get('Content-Type')) {
6867 options.headers.set('Content-Type', 'application/json');
6868 }
6869 if (!options.headers.get('Accept')) {
6870 options.headers.set('Accept', 'application/json');
6871 }
6872
6873 const response = await ourFetch(url, options);
6874 result.ok = response.ok;
6875 result.status = response.status;
6876 const json = await response.json();
6877
6878 result.data = json;
6879 if (!result.ok) {
6880 result.data.status = result.status;
6881 const err = generateErrorFromResponse(result.data);
6882 throw err;
6883 }
6884
6885 if (Array.isArray(result.data)) {
6886 result.data = result.data.map(function (v) {
6887 if (v.error || v.missing) {
6888 return generateErrorFromResponse(v);
6889 } else {
6890 return v;
6891 }
6892 });
6893 }
6894
6895 return result;
6896 }
6897
6898 let setupPromise;
6899
6900 async function setup() {
6901 if (opts.skip_setup) {
6902 return Promise.resolve();
6903 }
6904
6905 // If there is a setup in process or previous successful setup
6906 // done then we will use that
6907 // If previous setups have been rejected we will try again
6908 if (setupPromise) {
6909 return setupPromise;
6910 }
6911
6912 setupPromise = fetchJSON(dbUrl).catch(function (err) {
6913 if (err && err.status && err.status === 404) {
6914 return fetchJSON(dbUrl, {method: 'PUT'});
6915 } else {
6916 return Promise.reject(err);
6917 }
6918 }).catch(function (err) {
6919 // If we try to create a database that already exists, skipped in
6920 // istanbul since its catching a race condition.
6921 /* istanbul ignore if */
6922 if (err && err.status && err.status === 412) {
6923 return true;
6924 }
6925 return Promise.reject(err);
6926 });
6927
6928 setupPromise.catch(function () {
6929 setupPromise = null;
6930 });
6931
6932 return setupPromise;
6933 }
6934
6935 nextTick(function () {
6936 callback(null, api);
6937 });
6938
6939 api._remote = true;
6940
6941 /* istanbul ignore next */
6942 api.type = function () {
6943 return 'http';
6944 };
6945
6946 api.id = adapterFun$$1('id', async function (callback) {
6947 let result;
6948 try {
6949 const response = await ourFetch(genUrl(host, ''));
6950 result = await response.json();
6951 } catch (err) {
6952 result = {};
6953 }
6954
6955 // Bad response or missing `uuid` should not prevent ID generation.
6956 const uuid$$1 = (result && result.uuid) ? (result.uuid + host.db) : genDBUrl(host, '');
6957 callback(null, uuid$$1);
6958 });
6959
6960 // Sends a POST request to the host calling the couchdb _compact function
6961 // version: The version of CouchDB it is running
6962 api.compact = adapterFun$$1('compact', async function (opts, callback) {
6963 if (typeof opts === 'function') {
6964 callback = opts;
6965 opts = {};
6966 }
6967 opts = clone(opts);
6968
6969 await fetchJSON(genDBUrl(host, '_compact'), {method: 'POST'});
6970
6971 function ping() {
6972 api.info(function (err, res$$1) {
6973 // CouchDB may send a "compact_running:true" if it's
6974 // already compacting. PouchDB Server doesn't.
6975 /* istanbul ignore else */
6976 if (res$$1 && !res$$1.compact_running) {
6977 callback(null, {ok: true});
6978 } else {
6979 setTimeout(ping, opts.interval || 200);
6980 }
6981 });
6982 }
6983 // Ping the http if it's finished compaction
6984 ping();
6985 });
6986
6987 api.bulkGet = adapterFun('bulkGet', function (opts, callback) {
6988 const self = this;
6989
6990 async function doBulkGet(cb) {
6991 const params = {};
6992 if (opts.revs) {
6993 params.revs = true;
6994 }
6995 if (opts.attachments) {
6996 /* istanbul ignore next */
6997 params.attachments = true;
6998 }
6999 if (opts.latest) {
7000 params.latest = true;
7001 }
7002 try {
7003 const result = await fetchJSON(genDBUrl(host, '_bulk_get' + paramsToStr(params)), {
7004 method: 'POST',
7005 body: JSON.stringify({ docs: opts.docs})
7006 });
7007
7008 if (opts.attachments && opts.binary) {
7009 result.data.results.forEach(function (res$$1) {
7010 res$$1.docs.forEach(readAttachmentsAsBlobOrBuffer);
7011 });
7012 }
7013 cb(null, result.data);
7014 } catch (error) {
7015 cb(error);
7016 }
7017 }
7018
7019 /* istanbul ignore next */
7020 function doBulkGetShim() {
7021 // avoid "url too long error" by splitting up into multiple requests
7022 const batchSize = MAX_SIMULTANEOUS_REVS;
7023 const numBatches = Math.ceil(opts.docs.length / batchSize);
7024 let numDone = 0;
7025 const results = new Array(numBatches);
7026
7027 function onResult(batchNum) {
7028 return function (err, res$$1) {
7029 // err is impossible because shim returns a list of errs in that case
7030 results[batchNum] = res$$1.results;
7031 if (++numDone === numBatches) {
7032 callback(null, {results: results.flat()});
7033 }
7034 };
7035 }
7036
7037 for (let i = 0; i < numBatches; i++) {
7038 const subOpts = pick(opts, ['revs', 'attachments', 'binary', 'latest']);
7039 subOpts.docs = opts.docs.slice(i * batchSize,
7040 Math.min(opts.docs.length, (i + 1) * batchSize));
7041 bulkGet(self, subOpts, onResult(i));
7042 }
7043 }
7044
7045 // mark the whole database as either supporting or not supporting _bulk_get
7046 const dbUrl = genUrl(host, '');
7047 const supportsBulkGet = supportsBulkGetMap[dbUrl];
7048
7049 /* istanbul ignore next */
7050 if (typeof supportsBulkGet !== 'boolean') {
7051 // check if this database supports _bulk_get
7052 doBulkGet(function (err, res$$1) {
7053 if (err) {
7054 supportsBulkGetMap[dbUrl] = false;
7055 res(
7056 err.status,
7057 'PouchDB is just detecting if the remote ' +
7058 'supports the _bulk_get API.'
7059 );
7060 doBulkGetShim();
7061 } else {
7062 supportsBulkGetMap[dbUrl] = true;
7063 callback(null, res$$1);
7064 }
7065 });
7066 } else if (supportsBulkGet) {
7067 doBulkGet(callback);
7068 } else {
7069 doBulkGetShim();
7070 }
7071 });
7072
7073 // Calls GET on the host, which gets back a JSON string containing
7074 // couchdb: A welcome string
7075 // version: The version of CouchDB it is running
7076 api._info = async function (callback) {
7077 try {
7078 await setup();
7079 const response = await ourFetch(genDBUrl(host, ''));
7080 const info = await response.json();
7081 info.host = genDBUrl(host, '');
7082 callback(null, info);
7083 } catch (err) {
7084 callback(err);
7085 }
7086 };
7087
7088 api.fetch = async function (path$$1, options) {
7089 await setup();
7090 const url = path$$1.substring(0, 1) === '/' ?
7091 genUrl(host, path$$1.substring(1)) :
7092 genDBUrl(host, path$$1);
7093 return ourFetch(url, options);
7094 };
7095
7096 // Get the document with the given id from the database given by host.
7097 // The id could be solely the _id in the database, or it may be a
7098 // _design/ID or _local/ID path
7099 api.get = adapterFun$$1('get', async function (id, opts, callback) {
7100 // If no options were given, set the callback to the second parameter
7101 if (typeof opts === 'function') {
7102 callback = opts;
7103 opts = {};
7104 }
7105 opts = clone(opts);
7106
7107 // List of parameters to add to the GET request
7108 const params = {};
7109
7110 if (opts.revs) {
7111 params.revs = true;
7112 }
7113
7114 if (opts.revs_info) {
7115 params.revs_info = true;
7116 }
7117
7118 if (opts.latest) {
7119 params.latest = true;
7120 }
7121
7122 if (opts.open_revs) {
7123 if (opts.open_revs !== "all") {
7124 opts.open_revs = JSON.stringify(opts.open_revs);
7125 }
7126 params.open_revs = opts.open_revs;
7127 }
7128
7129 if (opts.rev) {
7130 params.rev = opts.rev;
7131 }
7132
7133 if (opts.conflicts) {
7134 params.conflicts = opts.conflicts;
7135 }
7136
7137 /* istanbul ignore if */
7138 if (opts.update_seq) {
7139 params.update_seq = opts.update_seq;
7140 }
7141
7142 id = encodeDocId(id);
7143
7144 function fetchAttachments(doc) {
7145 const atts = doc._attachments;
7146 const filenames = atts && Object.keys(atts);
7147 if (!atts || !filenames.length) {
7148 return;
7149 }
7150 // we fetch these manually in separate XHRs, because
7151 // Sync Gateway would normally send it back as multipart/mixed,
7152 // which we cannot parse. Also, this is more efficient than
7153 // receiving attachments as base64-encoded strings.
7154 async function fetchData(filename) {
7155 const att = atts[filename];
7156 const path$$1 = encodeDocId(doc._id) + '/' + encodeAttachmentId(filename) +
7157 '?rev=' + doc._rev;
7158
7159 const response = await ourFetch(genDBUrl(host, path$$1));
7160
7161 let blob;
7162 if ('buffer' in response) {
7163 blob = await response.buffer();
7164 } else {
7165 /* istanbul ignore next */
7166 blob = await response.blob();
7167 }
7168
7169 let data;
7170 if (opts.binary) {
7171 const typeFieldDescriptor = Object.getOwnPropertyDescriptor(blob.__proto__, 'type');
7172 if (!typeFieldDescriptor || typeFieldDescriptor.set) {
7173 blob.type = att.content_type;
7174 }
7175 data = blob;
7176 } else {
7177 data = await new Promise(function (resolve) {
7178 blobToBase64(blob, resolve);
7179 });
7180 }
7181
7182 delete att.stub;
7183 delete att.length;
7184 att.data = data;
7185 }
7186
7187 const promiseFactories = filenames.map(function (filename) {
7188 return function () {
7189 return fetchData(filename);
7190 };
7191 });
7192
7193 // This limits the number of parallel xhr requests to 5 any time
7194 // to avoid issues with maximum browser request limits
7195 return pool(promiseFactories, 5);
7196 }
7197
7198 function fetchAllAttachments(docOrDocs) {
7199 if (Array.isArray(docOrDocs)) {
7200 return Promise.all(docOrDocs.map(function (doc) {
7201 if (doc.ok) {
7202 return fetchAttachments(doc.ok);
7203 }
7204 }));
7205 }
7206 return fetchAttachments(docOrDocs);
7207 }
7208
7209 const url = genDBUrl(host, id + paramsToStr(params));
7210 try {
7211 const res$$1 = await fetchJSON(url);
7212 if (opts.attachments) {
7213 await fetchAllAttachments(res$$1.data);
7214 }
7215 callback(null, res$$1.data);
7216 } catch (error) {
7217 error.docId = id;
7218 callback(error);
7219 }
7220 });
7221
7222
7223 // Delete the document given by doc from the database given by host.
7224 api.remove = adapterFun$$1('remove', async function (docOrId, optsOrRev, opts, cb) {
7225 let doc;
7226 if (typeof optsOrRev === 'string') {
7227 // id, rev, opts, callback style
7228 doc = {
7229 _id: docOrId,
7230 _rev: optsOrRev
7231 };
7232 if (typeof opts === 'function') {
7233 cb = opts;
7234 opts = {};
7235 }
7236 } else {
7237 // doc, opts, callback style
7238 doc = docOrId;
7239 if (typeof optsOrRev === 'function') {
7240 cb = optsOrRev;
7241 opts = {};
7242 } else {
7243 cb = opts;
7244 opts = optsOrRev;
7245 }
7246 }
7247
7248 const rev$$1 = (doc._rev || opts.rev);
7249 const url = genDBUrl(host, encodeDocId(doc._id)) + '?rev=' + rev$$1;
7250
7251 try {
7252 const result = await fetchJSON(url, {method: 'DELETE'});
7253 cb(null, result.data);
7254 } catch (error) {
7255 cb(error);
7256 }
7257 });
7258
7259 function encodeAttachmentId(attachmentId) {
7260 return attachmentId.split("/").map(encodeURIComponent).join("/");
7261 }
7262
7263 // Get the attachment
7264 api.getAttachment = adapterFun$$1('getAttachment', async function (docId, attachmentId,
7265 opts, callback) {
7266 if (typeof opts === 'function') {
7267 callback = opts;
7268 opts = {};
7269 }
7270 const params = opts.rev ? ('?rev=' + opts.rev) : '';
7271 const url = genDBUrl(host, encodeDocId(docId)) + '/' +
7272 encodeAttachmentId(attachmentId) + params;
7273 let contentType;
7274 try {
7275 const response = await ourFetch(url, {method: 'GET'});
7276
7277 if (!response.ok) {
7278 throw response;
7279 }
7280
7281 contentType = response.headers.get('content-type');
7282 let blob;
7283 if (typeof process !== 'undefined' && !process.browser && typeof response.buffer === 'function') {
7284 blob = await response.buffer();
7285 } else {
7286 /* istanbul ignore next */
7287 blob = await response.blob();
7288 }
7289
7290 // TODO: also remove
7291 if (typeof process !== 'undefined' && !process.browser) {
7292 const typeFieldDescriptor = Object.getOwnPropertyDescriptor(blob.__proto__, 'type');
7293 if (!typeFieldDescriptor || typeFieldDescriptor.set) {
7294 blob.type = contentType;
7295 }
7296 }
7297 callback(null, blob);
7298 } catch (err) {
7299 callback(err);
7300 }
7301 });
7302
7303 // Remove the attachment given by the id and rev
7304 api.removeAttachment = adapterFun$$1('removeAttachment', async function (
7305 docId,
7306 attachmentId,
7307 rev$$1,
7308 callback,
7309 ) {
7310 const url = genDBUrl(host, encodeDocId(docId) + '/' + encodeAttachmentId(attachmentId)) + '?rev=' + rev$$1;
7311
7312 try {
7313 const result = await fetchJSON(url, {method: 'DELETE'});
7314 callback(null, result.data);
7315 } catch (error) {
7316 callback(error);
7317 }
7318 });
7319
7320 // Add the attachment given by blob and its contentType property
7321 // to the document with the given id, the revision given by rev, and
7322 // add it to the database given by host.
7323 api.putAttachment = adapterFun$$1('putAttachment', async function (
7324 docId,
7325 attachmentId,
7326 rev$$1,
7327 blob,
7328 type,
7329 callback,
7330 ) {
7331 if (typeof type === 'function') {
7332 callback = type;
7333 type = blob;
7334 blob = rev$$1;
7335 rev$$1 = null;
7336 }
7337 const id = encodeDocId(docId) + '/' + encodeAttachmentId(attachmentId);
7338 let url = genDBUrl(host, id);
7339 if (rev$$1) {
7340 url += '?rev=' + rev$$1;
7341 }
7342
7343 if (typeof blob === 'string') {
7344 // input is assumed to be a base64 string
7345 let binary;
7346 try {
7347 binary = thisAtob(blob);
7348 } catch (err) {
7349 return callback(createError(BAD_ARG,
7350 'Attachment is not a valid base64 string'));
7351 }
7352 blob = binary ? binStringToBluffer(binary, type) : '';
7353 }
7354
7355 try {
7356 // Add the attachment
7357 const result = await fetchJSON(url, {
7358 headers: new Headers({'Content-Type': type}),
7359 method: 'PUT',
7360 body: blob
7361 });
7362 callback(null, result.data);
7363 } catch (error) {
7364 callback(error);
7365 }
7366 });
7367
7368 // Update/create multiple documents given by req in the database
7369 // given by host.
7370 api._bulkDocs = async function (req, opts, callback) {
7371 // If new_edits=false then it prevents the database from creating
7372 // new revision numbers for the documents. Instead it just uses
7373 // the old ones. This is used in database replication.
7374 req.new_edits = opts.new_edits;
7375
7376 try {
7377 await setup();
7378 await Promise.all(req.docs.map(preprocessAttachments$1));
7379
7380 // Update/create the documents
7381 const result = await fetchJSON(genDBUrl(host, '_bulk_docs'), {
7382 method: 'POST',
7383 body: JSON.stringify(req)
7384 });
7385 callback(null, result.data);
7386 } catch (error) {
7387 callback(error);
7388 }
7389 };
7390
7391 // Update/create document
7392 api._put = async function (doc, opts, callback) {
7393 try {
7394 await setup();
7395 await preprocessAttachments$1(doc);
7396
7397 const result = await fetchJSON(genDBUrl(host, encodeDocId(doc._id)), {
7398 method: 'PUT',
7399 body: JSON.stringify(doc)
7400 });
7401 callback(null, result.data);
7402 } catch (error) {
7403 error.docId = doc && doc._id;
7404 callback(error);
7405 }
7406 };
7407
7408
7409 // Get a listing of the documents in the database given
7410 // by host and ordered by increasing id.
7411 api.allDocs = adapterFun$$1('allDocs', async function (opts, callback) {
7412 if (typeof opts === 'function') {
7413 callback = opts;
7414 opts = {};
7415 }
7416 opts = clone(opts);
7417
7418 // List of parameters to add to the GET request
7419 const params = {};
7420 let body;
7421 let method = 'GET';
7422
7423 if (opts.conflicts) {
7424 params.conflicts = true;
7425 }
7426
7427 /* istanbul ignore if */
7428 if (opts.update_seq) {
7429 params.update_seq = true;
7430 }
7431
7432 if (opts.descending) {
7433 params.descending = true;
7434 }
7435
7436 if (opts.include_docs) {
7437 params.include_docs = true;
7438 }
7439
7440 // added in CouchDB 1.6.0
7441 if (opts.attachments) {
7442 params.attachments = true;
7443 }
7444
7445 if (opts.key) {
7446 params.key = JSON.stringify(opts.key);
7447 }
7448
7449 if (opts.start_key) {
7450 opts.startkey = opts.start_key;
7451 }
7452
7453 if (opts.startkey) {
7454 params.startkey = JSON.stringify(opts.startkey);
7455 }
7456
7457 if (opts.end_key) {
7458 opts.endkey = opts.end_key;
7459 }
7460
7461 if (opts.endkey) {
7462 params.endkey = JSON.stringify(opts.endkey);
7463 }
7464
7465 if (typeof opts.inclusive_end !== 'undefined') {
7466 params.inclusive_end = !!opts.inclusive_end;
7467 }
7468
7469 if (typeof opts.limit !== 'undefined') {
7470 params.limit = opts.limit;
7471 }
7472
7473 if (typeof opts.skip !== 'undefined') {
7474 params.skip = opts.skip;
7475 }
7476
7477 const paramStr = paramsToStr(params);
7478
7479 if (typeof opts.keys !== 'undefined') {
7480 method = 'POST';
7481 body = {keys: opts.keys};
7482 }
7483
7484 try {
7485 const result = await fetchJSON(genDBUrl(host, '_all_docs' + paramStr), {
7486 method,
7487 body: JSON.stringify(body)
7488 });
7489 if (opts.include_docs && opts.attachments && opts.binary) {
7490 result.data.rows.forEach(readAttachmentsAsBlobOrBuffer);
7491 }
7492 callback(null, result.data);
7493 } catch (error) {
7494 callback(error);
7495 }
7496 });
7497
7498 // Get a list of changes made to documents in the database given by host.
7499 // TODO According to the README, there should be two other methods here,
7500 // api.changes.addListener and api.changes.removeListener.
7501 api._changes = function (opts) {
7502
7503 // We internally page the results of a changes request, this means
7504 // if there is a large set of changes to be returned we can start
7505 // processing them quicker instead of waiting on the entire
7506 // set of changes to return and attempting to process them at once
7507 const batchSize = 'batch_size' in opts ? opts.batch_size : CHANGES_BATCH_SIZE;
7508
7509 opts = clone(opts);
7510
7511 if (opts.continuous && !('heartbeat' in opts)) {
7512 opts.heartbeat = DEFAULT_HEARTBEAT;
7513 }
7514
7515 let requestTimeout = ('timeout' in opts) ? opts.timeout : 30 * 1000;
7516
7517 // ensure CHANGES_TIMEOUT_BUFFER applies
7518 if ('timeout' in opts && opts.timeout &&
7519 (requestTimeout - opts.timeout) < CHANGES_TIMEOUT_BUFFER) {
7520 requestTimeout = opts.timeout + CHANGES_TIMEOUT_BUFFER;
7521 }
7522
7523 /* istanbul ignore if */
7524 if ('heartbeat' in opts && opts.heartbeat &&
7525 (requestTimeout - opts.heartbeat) < CHANGES_TIMEOUT_BUFFER) {
7526 requestTimeout = opts.heartbeat + CHANGES_TIMEOUT_BUFFER;
7527 }
7528
7529 const params = {};
7530 if ('timeout' in opts && opts.timeout) {
7531 params.timeout = opts.timeout;
7532 }
7533
7534 const limit = (typeof opts.limit !== 'undefined') ? opts.limit : false;
7535 let leftToFetch = limit;
7536
7537 if (opts.style) {
7538 params.style = opts.style;
7539 }
7540
7541 if (opts.include_docs || opts.filter && typeof opts.filter === 'function') {
7542 params.include_docs = true;
7543 }
7544
7545 if (opts.attachments) {
7546 params.attachments = true;
7547 }
7548
7549 if (opts.continuous) {
7550 params.feed = 'longpoll';
7551 }
7552
7553 if (opts.seq_interval) {
7554 params.seq_interval = opts.seq_interval;
7555 }
7556
7557 if (opts.conflicts) {
7558 params.conflicts = true;
7559 }
7560
7561 if (opts.descending) {
7562 params.descending = true;
7563 }
7564
7565 /* istanbul ignore if */
7566 if (opts.update_seq) {
7567 params.update_seq = true;
7568 }
7569
7570 if ('heartbeat' in opts) {
7571 // If the heartbeat value is false, it disables the default heartbeat
7572 if (opts.heartbeat) {
7573 params.heartbeat = opts.heartbeat;
7574 }
7575 }
7576
7577 if (opts.filter && typeof opts.filter === 'string') {
7578 params.filter = opts.filter;
7579 }
7580
7581 if (opts.view && typeof opts.view === 'string') {
7582 params.filter = '_view';
7583 params.view = opts.view;
7584 }
7585
7586 // If opts.query_params exists, pass it through to the changes request.
7587 // These parameters may be used by the filter on the source database.
7588 if (opts.query_params && typeof opts.query_params === 'object') {
7589 for (const param_name in opts.query_params) {
7590 /* istanbul ignore else */
7591 if (Object.prototype.hasOwnProperty.call(opts.query_params, param_name)) {
7592 params[param_name] = opts.query_params[param_name];
7593 }
7594 }
7595 }
7596
7597 let method = 'GET';
7598 let body;
7599
7600 if (opts.doc_ids) {
7601 // set this automagically for the user; it's annoying that couchdb
7602 // requires both a "filter" and a "doc_ids" param.
7603 params.filter = '_doc_ids';
7604 method = 'POST';
7605 body = {doc_ids: opts.doc_ids };
7606 }
7607 /* istanbul ignore next */
7608 else if (opts.selector) {
7609 // set this automagically for the user, similar to above
7610 params.filter = '_selector';
7611 method = 'POST';
7612 body = {selector: opts.selector };
7613 }
7614
7615 const controller = new AbortController();
7616 let lastFetchedSeq;
7617
7618 // Get all the changes starting with the one immediately after the
7619 // sequence number given by since.
7620 const fetchData = async function (since, callback) {
7621 if (opts.aborted) {
7622 return;
7623 }
7624 params.since = since;
7625 // "since" can be any kind of json object in Cloudant/CouchDB 2.x
7626 /* istanbul ignore next */
7627 if (typeof params.since === "object") {
7628 params.since = JSON.stringify(params.since);
7629 }
7630
7631 if (opts.descending) {
7632 if (limit) {
7633 params.limit = leftToFetch;
7634 }
7635 } else {
7636 params.limit = (!limit || leftToFetch > batchSize) ?
7637 batchSize : leftToFetch;
7638 }
7639
7640 // Set the options for the ajax call
7641 const url = genDBUrl(host, '_changes' + paramsToStr(params));
7642 const fetchOpts = {
7643 signal: controller.signal,
7644 method,
7645 body: JSON.stringify(body)
7646 };
7647 lastFetchedSeq = since;
7648
7649 /* istanbul ignore if */
7650 if (opts.aborted) {
7651 return;
7652 }
7653
7654 // Get the changes
7655 try {
7656 await setup();
7657 const result = await fetchJSON(url, fetchOpts);
7658 callback(null, result.data);
7659 } catch (error) {
7660 callback(error);
7661 }
7662 };
7663
7664 // If opts.since exists, get all the changes from the sequence
7665 // number given by opts.since. Otherwise, get all the changes
7666 // from the sequence number 0.
7667 const results = {results: []};
7668
7669 const fetched = function (err, res$$1) {
7670 if (opts.aborted) {
7671 return;
7672 }
7673 let raw_results_length = 0;
7674 // If the result of the ajax call (res) contains changes (res.results)
7675 if (res$$1 && res$$1.results) {
7676 raw_results_length = res$$1.results.length;
7677 results.last_seq = res$$1.last_seq;
7678 let pending = null;
7679 let lastSeq = null;
7680 // Attach 'pending' property if server supports it (CouchDB 2.0+)
7681 /* istanbul ignore if */
7682 if (typeof res$$1.pending === 'number') {
7683 pending = res$$1.pending;
7684 }
7685 if (typeof results.last_seq === 'string' || typeof results.last_seq === 'number') {
7686 lastSeq = results.last_seq;
7687 }
7688 // For each change
7689 const req = {};
7690 req.query = opts.query_params;
7691 res$$1.results = res$$1.results.filter(function (c) {
7692 leftToFetch--;
7693 const ret = filterChange(opts)(c);
7694 if (ret) {
7695 if (opts.include_docs && opts.attachments && opts.binary) {
7696 readAttachmentsAsBlobOrBuffer(c);
7697 }
7698 if (opts.return_docs) {
7699 results.results.push(c);
7700 }
7701 opts.onChange(c, pending, lastSeq);
7702 }
7703 return ret;
7704 });
7705 } else if (err) {
7706 // In case of an error, stop listening for changes and call
7707 // opts.complete
7708 opts.aborted = true;
7709 opts.complete(err);
7710 return;
7711 }
7712
7713 // The changes feed may have timed out with no results
7714 // if so reuse last update sequence
7715 if (res$$1 && res$$1.last_seq) {
7716 lastFetchedSeq = res$$1.last_seq;
7717 }
7718
7719 const finished = (limit && leftToFetch <= 0) ||
7720 (res$$1 && raw_results_length < batchSize) ||
7721 (opts.descending);
7722
7723 if ((opts.continuous && !(limit && leftToFetch <= 0)) || !finished) {
7724 // Queue a call to fetch again with the newest sequence number
7725 nextTick(function () { fetchData(lastFetchedSeq, fetched); });
7726 } else {
7727 // We're done, call the callback
7728 opts.complete(null, results);
7729 }
7730 };
7731
7732 fetchData(opts.since || 0, fetched);
7733
7734 // Return a method to cancel this method from processing any more
7735 return {
7736 cancel: function () {
7737 opts.aborted = true;
7738 controller.abort();
7739 }
7740 };
7741 };
7742
7743 // Given a set of document/revision IDs (given by req), tets the subset of
7744 // those that do NOT correspond to revisions stored in the database.
7745 // See http://wiki.apache.org/couchdb/HttpPostRevsDiff
7746 api.revsDiff = adapterFun$$1('revsDiff', async function (req, opts, callback) {
7747 // If no options were given, set the callback to be the second parameter
7748 if (typeof opts === 'function') {
7749 callback = opts;
7750 opts = {};
7751 }
7752
7753 try {
7754 // Get the missing document/revision IDs
7755 const result = await fetchJSON(genDBUrl(host, '_revs_diff'), {
7756 method: 'POST',
7757 body: JSON.stringify(req)
7758 });
7759 callback(null, result.data);
7760 } catch (error) {
7761 callback(error);
7762 }
7763 });
7764
7765 api._close = function (callback) {
7766 callback();
7767 };
7768
7769 api._destroy = async function (options, callback) {
7770 try {
7771 const json = await fetchJSON(genDBUrl(host, ''), {method: 'DELETE'});
7772 callback(null, json);
7773 } catch (error) {
7774 if (error.status === 404) {
7775 callback(null, {ok: true});
7776 } else {
7777 callback(error);
7778 }
7779 }
7780 };
7781}
7782
7783// HttpPouch is a valid adapter.
7784HttpPouch.valid = function () {
7785 return true;
7786};
7787
7788function HttpPouch$1 (PouchDB) {
7789 PouchDB.adapter('http', HttpPouch, false);
7790 PouchDB.adapter('https', HttpPouch, false);
7791}
7792
7793class QueryParseError extends Error {
7794 constructor(message) {
7795 super();
7796 this.status = 400;
7797 this.name = 'query_parse_error';
7798 this.message = message;
7799 this.error = true;
7800 try {
7801 Error.captureStackTrace(this, QueryParseError);
7802 } catch (e) {}
7803 }
7804}
7805
7806class NotFoundError$1 extends Error {
7807 constructor(message) {
7808 super();
7809 this.status = 404;
7810 this.name = 'not_found';
7811 this.message = message;
7812 this.error = true;
7813 try {
7814 Error.captureStackTrace(this, NotFoundError$1);
7815 } catch (e) {}
7816 }
7817}
7818
7819class BuiltInError extends Error {
7820 constructor(message) {
7821 super();
7822 this.status = 500;
7823 this.name = 'invalid_value';
7824 this.message = message;
7825 this.error = true;
7826 try {
7827 Error.captureStackTrace(this, BuiltInError);
7828 } catch (e) {}
7829 }
7830}
7831
7832function promisedCallback(promise, callback) {
7833 if (callback) {
7834 promise.then(function (res$$1) {
7835 nextTick(function () {
7836 callback(null, res$$1);
7837 });
7838 }, function (reason) {
7839 nextTick(function () {
7840 callback(reason);
7841 });
7842 });
7843 }
7844 return promise;
7845}
7846
7847function callbackify(fun) {
7848 return function (...args) {
7849 var cb = args.pop();
7850 var promise = fun.apply(this, args);
7851 if (typeof cb === 'function') {
7852 promisedCallback(promise, cb);
7853 }
7854 return promise;
7855 };
7856}
7857
7858// Promise finally util similar to Q.finally
7859function fin(promise, finalPromiseFactory) {
7860 return promise.then(function (res$$1) {
7861 return finalPromiseFactory().then(function () {
7862 return res$$1;
7863 });
7864 }, function (reason) {
7865 return finalPromiseFactory().then(function () {
7866 throw reason;
7867 });
7868 });
7869}
7870
7871function sequentialize(queue, promiseFactory) {
7872 return function () {
7873 var args = arguments;
7874 var that = this;
7875 return queue.add(function () {
7876 return promiseFactory.apply(that, args);
7877 });
7878 };
7879}
7880
7881// uniq an array of strings, order not guaranteed
7882// similar to underscore/lodash _.uniq
7883function uniq(arr) {
7884 var theSet = new Set(arr);
7885 var result = new Array(theSet.size);
7886 var index = -1;
7887 theSet.forEach(function (value) {
7888 result[++index] = value;
7889 });
7890 return result;
7891}
7892
7893function mapToKeysArray(map) {
7894 var result = new Array(map.size);
7895 var index = -1;
7896 map.forEach(function (value, key) {
7897 result[++index] = key;
7898 });
7899 return result;
7900}
7901
7902function createBuiltInError(name) {
7903 var message = 'builtin ' + name +
7904 ' function requires map values to be numbers' +
7905 ' or number arrays';
7906 return new BuiltInError(message);
7907}
7908
7909function sum(values) {
7910 var result = 0;
7911 for (var i = 0, len = values.length; i < len; i++) {
7912 var num = values[i];
7913 if (typeof num !== 'number') {
7914 if (Array.isArray(num)) {
7915 // lists of numbers are also allowed, sum them separately
7916 result = typeof result === 'number' ? [result] : result;
7917 for (var j = 0, jLen = num.length; j < jLen; j++) {
7918 var jNum = num[j];
7919 if (typeof jNum !== 'number') {
7920 throw createBuiltInError('_sum');
7921 } else if (typeof result[j] === 'undefined') {
7922 result.push(jNum);
7923 } else {
7924 result[j] += jNum;
7925 }
7926 }
7927 } else { // not array/number
7928 throw createBuiltInError('_sum');
7929 }
7930 } else if (typeof result === 'number') {
7931 result += num;
7932 } else { // add number to array
7933 result[0] += num;
7934 }
7935 }
7936 return result;
7937}
7938
7939// Inside of 'vm' for Node, we need a way to translate a pseudo-error
7940// back into a real error once it's out of the VM.
7941function createBuiltInErrorInVm(name) {
7942 return {
7943 builtInError: true,
7944 name
7945 };
7946}
7947
7948function convertToTrueError(err) {
7949 return createBuiltInError(err.name);
7950}
7951
7952function isBuiltInError(obj$$1) {
7953 return obj$$1 && obj$$1.builtInError;
7954}
7955
7956// All of this vm hullaballoo is to be able to run arbitrary code in a sandbox
7957// for security reasons.
7958function evalFunctionInVm(func, emit) {
7959 return function (arg1, arg2, arg3) {
7960 var code = '(function() {"use strict";' +
7961 'var createBuiltInError = ' + createBuiltInErrorInVm.toString() + ';' +
7962 'var sum = ' + sum.toString() + ';' +
7963 'var log = function () {};' +
7964 'var isArray = Array.isArray;' +
7965 'var toJSON = JSON.parse;' +
7966 'var __emitteds__ = [];' +
7967 'var emit = function (key, value) {__emitteds__.push([key, value]);};' +
7968 'var __result__ = (' +
7969 func.replace(/;\s*$/, '') + ')' + '(' +
7970 JSON.stringify(arg1) + ',' +
7971 JSON.stringify(arg2) + ',' +
7972 JSON.stringify(arg3) + ');' +
7973 'return {result: __result__, emitteds: __emitteds__};' +
7974 '})()';
7975
7976 var output = vm.runInNewContext(code);
7977
7978 output.emitteds.forEach(function (emitted) {
7979 emit(emitted[0], emitted[1]);
7980 });
7981 if (isBuiltInError(output.result)) {
7982 output.result = convertToTrueError(output.result);
7983 }
7984 return output.result;
7985 };
7986}
7987
7988var log = guardedConsole.bind(null, 'log');
7989var toJSON = JSON.parse;
7990
7991// The "stringify, then execute in a VM" strategy totally breaks Istanbul due
7992// to missing __coverage global objects. As a solution, export different
7993// code during coverage testing and during regular execution.
7994// Note that this doesn't get shipped to consumers because Rollup replaces it
7995// with rollup-plugin-replace, so false is replaced with `false`
7996var evalFunc;
7997/* istanbul ignore else */
7998{
7999 evalFunc = evalFunctionInVm;
8000}
8001
8002var evalFunction = evalFunc;
8003
8004/*
8005 * Simple task queue to sequentialize actions. Assumes
8006 * callbacks will eventually fire (once).
8007 */
8008
8009class TaskQueue$1 {
8010 constructor() {
8011 this.promise = Promise.resolve();
8012 }
8013
8014 add(promiseFactory) {
8015 this.promise = this.promise
8016 // just recover
8017 .catch(() => { })
8018 .then(() => promiseFactory());
8019 return this.promise;
8020 }
8021
8022 finish() {
8023 return this.promise;
8024 }
8025}
8026
8027function stringify(input) {
8028 if (!input) {
8029 return 'undefined'; // backwards compat for empty reduce
8030 }
8031 // for backwards compat with mapreduce, functions/strings are stringified
8032 // as-is. everything else is JSON-stringified.
8033 switch (typeof input) {
8034 case 'function':
8035 // e.g. a mapreduce map
8036 return input.toString();
8037 case 'string':
8038 // e.g. a mapreduce built-in _reduce function
8039 return input.toString();
8040 default:
8041 // e.g. a JSON object in the case of mango queries
8042 return JSON.stringify(input);
8043 }
8044}
8045
8046/* create a string signature for a view so we can cache it and uniq it */
8047function createViewSignature(mapFun, reduceFun) {
8048 // the "undefined" part is for backwards compatibility
8049 return stringify(mapFun) + stringify(reduceFun) + 'undefined';
8050}
8051
8052async function createView(sourceDB, viewName, mapFun, reduceFun, temporary, localDocName) {
8053 const viewSignature = createViewSignature(mapFun, reduceFun);
8054
8055 let cachedViews;
8056 if (!temporary) {
8057 // cache this to ensure we don't try to update the same view twice
8058 cachedViews = sourceDB._cachedViews = sourceDB._cachedViews || {};
8059 if (cachedViews[viewSignature]) {
8060 return cachedViews[viewSignature];
8061 }
8062 }
8063
8064 const promiseForView = sourceDB.info().then(async function (info) {
8065 const depDbName = info.db_name + '-mrview-' +
8066 (temporary ? 'temp' : stringMd5(viewSignature));
8067
8068 // save the view name in the source db so it can be cleaned up if necessary
8069 // (e.g. when the _design doc is deleted, remove all associated view data)
8070 function diffFunction(doc) {
8071 doc.views = doc.views || {};
8072 let fullViewName = viewName;
8073 if (fullViewName.indexOf('/') === -1) {
8074 fullViewName = viewName + '/' + viewName;
8075 }
8076 const depDbs = doc.views[fullViewName] = doc.views[fullViewName] || {};
8077 /* istanbul ignore if */
8078 if (depDbs[depDbName]) {
8079 return; // no update necessary
8080 }
8081 depDbs[depDbName] = true;
8082 return doc;
8083 }
8084 await upsert(sourceDB, '_local/' + localDocName, diffFunction);
8085 const res$$1 = await sourceDB.registerDependentDatabase(depDbName);
8086 const db = res$$1.db;
8087 db.auto_compaction = true;
8088 const view = {
8089 name: depDbName,
8090 db,
8091 sourceDB,
8092 adapter: sourceDB.adapter,
8093 mapFun,
8094 reduceFun
8095 };
8096
8097 let lastSeqDoc;
8098 try {
8099 lastSeqDoc = await view.db.get('_local/lastSeq');
8100 } catch (err) {
8101 /* istanbul ignore if */
8102 if (err.status !== 404) {
8103 throw err;
8104 }
8105 }
8106
8107 view.seq = lastSeqDoc ? lastSeqDoc.seq : 0;
8108 if (cachedViews) {
8109 view.db.once('destroyed', function () {
8110 delete cachedViews[viewSignature];
8111 });
8112 }
8113 return view;
8114 });
8115
8116 if (cachedViews) {
8117 cachedViews[viewSignature] = promiseForView;
8118 }
8119 return promiseForView;
8120}
8121
8122const persistentQueues = {};
8123const tempViewQueue = new TaskQueue$1();
8124const CHANGES_BATCH_SIZE$1 = 50;
8125
8126function parseViewName(name) {
8127 // can be either 'ddocname/viewname' or just 'viewname'
8128 // (where the ddoc name is the same)
8129 return name.indexOf('/') === -1 ? [name, name] : name.split('/');
8130}
8131
8132function isGenOne(changes) {
8133 // only return true if the current change is 1-
8134 // and there are no other leafs
8135 return changes.length === 1 && /^1-/.test(changes[0].rev);
8136}
8137
8138function emitError(db, e, data) {
8139 try {
8140 db.emit('error', e);
8141 } catch (err) {
8142 guardedConsole('error',
8143 'The user\'s map/reduce function threw an uncaught error.\n' +
8144 'You can debug this error by doing:\n' +
8145 'myDatabase.on(\'error\', function (err) { debugger; });\n' +
8146 'Please double-check your map/reduce function.');
8147 guardedConsole('error', e, data);
8148 }
8149}
8150
8151/**
8152 * Returns an "abstract" mapreduce object of the form:
8153 *
8154 * {
8155 * query: queryFun,
8156 * viewCleanup: viewCleanupFun
8157 * }
8158 *
8159 * Arguments are:
8160 *
8161 * localDoc: string
8162 * This is for the local doc that gets saved in order to track the
8163 * "dependent" DBs and clean them up for viewCleanup. It should be
8164 * unique, so that indexer plugins don't collide with each other.
8165 * mapper: function (mapFunDef, emit)
8166 * Returns a map function based on the mapFunDef, which in the case of
8167 * normal map/reduce is just the de-stringified function, but may be
8168 * something else, such as an object in the case of pouchdb-find.
8169 * reducer: function (reduceFunDef)
8170 * Ditto, but for reducing. Modules don't have to support reducing
8171 * (e.g. pouchdb-find).
8172 * ddocValidator: function (ddoc, viewName)
8173 * Throws an error if the ddoc or viewName is not valid.
8174 * This could be a way to communicate to the user that the configuration for the
8175 * indexer is invalid.
8176 */
8177function createAbstractMapReduce(localDocName, mapper, reducer, ddocValidator) {
8178
8179 function tryMap(db, fun, doc) {
8180 // emit an event if there was an error thrown by a map function.
8181 // putting try/catches in a single function also avoids deoptimizations.
8182 try {
8183 fun(doc);
8184 } catch (e) {
8185 emitError(db, e, {fun, doc});
8186 }
8187 }
8188
8189 function tryReduce(db, fun, keys, values, rereduce) {
8190 // same as above, but returning the result or an error. there are two separate
8191 // functions to avoid extra memory allocations since the tryCode() case is used
8192 // for custom map functions (common) vs this function, which is only used for
8193 // custom reduce functions (rare)
8194 try {
8195 return {output : fun(keys, values, rereduce)};
8196 } catch (e) {
8197 emitError(db, e, {fun, keys, values, rereduce});
8198 return {error: e};
8199 }
8200 }
8201
8202 function sortByKeyThenValue(x, y) {
8203 const keyCompare = collate(x.key, y.key);
8204 return keyCompare !== 0 ? keyCompare : collate(x.value, y.value);
8205 }
8206
8207 function sliceResults(results, limit, skip) {
8208 skip = skip || 0;
8209 if (typeof limit === 'number') {
8210 return results.slice(skip, limit + skip);
8211 } else if (skip > 0) {
8212 return results.slice(skip);
8213 }
8214 return results;
8215 }
8216
8217 function rowToDocId(row) {
8218 const val = row.value;
8219 // Users can explicitly specify a joined doc _id, or it
8220 // defaults to the doc _id that emitted the key/value.
8221 const docId = (val && typeof val === 'object' && val._id) || row.id;
8222 return docId;
8223 }
8224
8225 function readAttachmentsAsBlobOrBuffer(res$$1) {
8226 for (const row of res$$1.rows) {
8227 const atts = row.doc && row.doc._attachments;
8228 if (!atts) {
8229 continue;
8230 }
8231 for (const filename of Object.keys(atts)) {
8232 const att = atts[filename];
8233 atts[filename].data = b64ToBluffer(att.data, att.content_type);
8234 }
8235 }
8236 }
8237
8238 function postprocessAttachments(opts) {
8239 return function (res$$1) {
8240 if (opts.include_docs && opts.attachments && opts.binary) {
8241 readAttachmentsAsBlobOrBuffer(res$$1);
8242 }
8243 return res$$1;
8244 };
8245 }
8246
8247 function addHttpParam(paramName, opts, params, asJson) {
8248 // add an http param from opts to params, optionally json-encoded
8249 let val = opts[paramName];
8250 if (typeof val !== 'undefined') {
8251 if (asJson) {
8252 val = encodeURIComponent(JSON.stringify(val));
8253 }
8254 params.push(paramName + '=' + val);
8255 }
8256 }
8257
8258 function coerceInteger(integerCandidate) {
8259 if (typeof integerCandidate !== 'undefined') {
8260 const asNumber = Number(integerCandidate);
8261 // prevents e.g. '1foo' or '1.1' being coerced to 1
8262 if (!isNaN(asNumber) && asNumber === parseInt(integerCandidate, 10)) {
8263 return asNumber;
8264 } else {
8265 return integerCandidate;
8266 }
8267 }
8268 }
8269
8270 function coerceOptions(opts) {
8271 opts.group_level = coerceInteger(opts.group_level);
8272 opts.limit = coerceInteger(opts.limit);
8273 opts.skip = coerceInteger(opts.skip);
8274 return opts;
8275 }
8276
8277 function checkPositiveInteger(number) {
8278 if (number) {
8279 if (typeof number !== 'number') {
8280 return new QueryParseError(`Invalid value for integer: "${number}"`);
8281 }
8282 if (number < 0) {
8283 return new QueryParseError(`Invalid value for positive integer: "${number}"`);
8284 }
8285 }
8286 }
8287
8288 function checkQueryParseError(options, fun) {
8289 const startkeyName = options.descending ? 'endkey' : 'startkey';
8290 const endkeyName = options.descending ? 'startkey' : 'endkey';
8291
8292 if (typeof options[startkeyName] !== 'undefined' &&
8293 typeof options[endkeyName] !== 'undefined' &&
8294 collate(options[startkeyName], options[endkeyName]) > 0) {
8295 throw new QueryParseError('No rows can match your key range, ' +
8296 'reverse your start_key and end_key or set {descending : true}');
8297 } else if (fun.reduce && options.reduce !== false) {
8298 if (options.include_docs) {
8299 throw new QueryParseError('{include_docs:true} is invalid for reduce');
8300 } else if (options.keys && options.keys.length > 1 &&
8301 !options.group && !options.group_level) {
8302 throw new QueryParseError('Multi-key fetches for reduce views must use ' +
8303 '{group: true}');
8304 }
8305 }
8306 for (const optionName of ['group_level', 'limit', 'skip']) {
8307 const error = checkPositiveInteger(options[optionName]);
8308 if (error) {
8309 throw error;
8310 }
8311 }
8312 }
8313
8314 async function httpQuery(db, fun, opts) {
8315 // List of parameters to add to the PUT request
8316 let params = [];
8317 let body;
8318 let method = 'GET';
8319 let ok;
8320
8321 // If opts.reduce exists and is defined, then add it to the list
8322 // of parameters.
8323 // If reduce=false then the results are that of only the map function
8324 // not the final result of map and reduce.
8325 addHttpParam('reduce', opts, params);
8326 addHttpParam('include_docs', opts, params);
8327 addHttpParam('attachments', opts, params);
8328 addHttpParam('limit', opts, params);
8329 addHttpParam('descending', opts, params);
8330 addHttpParam('group', opts, params);
8331 addHttpParam('group_level', opts, params);
8332 addHttpParam('skip', opts, params);
8333 addHttpParam('stale', opts, params);
8334 addHttpParam('conflicts', opts, params);
8335 addHttpParam('startkey', opts, params, true);
8336 addHttpParam('start_key', opts, params, true);
8337 addHttpParam('endkey', opts, params, true);
8338 addHttpParam('end_key', opts, params, true);
8339 addHttpParam('inclusive_end', opts, params);
8340 addHttpParam('key', opts, params, true);
8341 addHttpParam('update_seq', opts, params);
8342
8343 // Format the list of parameters into a valid URI query string
8344 params = params.join('&');
8345 params = params === '' ? '' : '?' + params;
8346
8347 // If keys are supplied, issue a POST to circumvent GET query string limits
8348 // see http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options
8349 if (typeof opts.keys !== 'undefined') {
8350 const MAX_URL_LENGTH = 2000;
8351 // according to http://stackoverflow.com/a/417184/680742,
8352 // the de facto URL length limit is 2000 characters
8353
8354 const keysAsString = `keys=${encodeURIComponent(JSON.stringify(opts.keys))}`;
8355 if (keysAsString.length + params.length + 1 <= MAX_URL_LENGTH) {
8356 // If the keys are short enough, do a GET. we do this to work around
8357 // Safari not understanding 304s on POSTs (see pouchdb/pouchdb#1239)
8358 params += (params[0] === '?' ? '&' : '?') + keysAsString;
8359 } else {
8360 method = 'POST';
8361 if (typeof fun === 'string') {
8362 body = {keys: opts.keys};
8363 } else { // fun is {map : mapfun}, so append to this
8364 fun.keys = opts.keys;
8365 }
8366 }
8367 }
8368
8369 // We are referencing a query defined in the design doc
8370 if (typeof fun === 'string') {
8371 const parts = parseViewName(fun);
8372
8373 const response = await db.fetch('_design/' + parts[0] + '/_view/' + parts[1] + params, {
8374 headers: new Headers({'Content-Type': 'application/json'}),
8375 method,
8376 body: JSON.stringify(body)
8377 });
8378 ok = response.ok;
8379 // status = response.status;
8380 const result = await response.json();
8381
8382 if (!ok) {
8383 result.status = response.status;
8384 throw generateErrorFromResponse(result);
8385 }
8386
8387 // fail the entire request if the result contains an error
8388 for (const row of result.rows) {
8389 /* istanbul ignore if */
8390 if (row.value && row.value.error && row.value.error === "builtin_reduce_error") {
8391 throw new Error(row.reason);
8392 }
8393 }
8394
8395 return new Promise(function (resolve) {
8396 resolve(result);
8397 }).then(postprocessAttachments(opts));
8398 }
8399
8400 // We are using a temporary view, terrible for performance, good for testing
8401 body = body || {};
8402 for (const key of Object.keys(fun)) {
8403 if (Array.isArray(fun[key])) {
8404 body[key] = fun[key];
8405 } else {
8406 body[key] = fun[key].toString();
8407 }
8408 }
8409
8410 const response = await db.fetch('_temp_view' + params, {
8411 headers: new Headers({'Content-Type': 'application/json'}),
8412 method: 'POST',
8413 body: JSON.stringify(body)
8414 });
8415
8416 ok = response.ok;
8417 // status = response.status;
8418 const result = await response.json();
8419 if (!ok) {
8420 result.status = response.status;
8421 throw generateErrorFromResponse(result);
8422 }
8423
8424 return new Promise(function (resolve) {
8425 resolve(result);
8426 }).then(postprocessAttachments(opts));
8427 }
8428
8429 // custom adapters can define their own api._query
8430 // and override the default behavior
8431 /* istanbul ignore next */
8432 function customQuery(db, fun, opts) {
8433 return new Promise(function (resolve, reject) {
8434 db._query(fun, opts, function (err, res$$1) {
8435 if (err) {
8436 return reject(err);
8437 }
8438 resolve(res$$1);
8439 });
8440 });
8441 }
8442
8443 // custom adapters can define their own api._viewCleanup
8444 // and override the default behavior
8445 /* istanbul ignore next */
8446 function customViewCleanup(db) {
8447 return new Promise(function (resolve, reject) {
8448 db._viewCleanup(function (err, res$$1) {
8449 if (err) {
8450 return reject(err);
8451 }
8452 resolve(res$$1);
8453 });
8454 });
8455 }
8456
8457 function defaultsTo(value) {
8458 return function (reason) {
8459 /* istanbul ignore else */
8460 if (reason.status === 404) {
8461 return value;
8462 } else {
8463 throw reason;
8464 }
8465 };
8466 }
8467
8468 // returns a promise for a list of docs to update, based on the input docId.
8469 // the order doesn't matter, because post-3.2.0, bulkDocs
8470 // is an atomic operation in all three adapters.
8471 async function getDocsToPersist(docId, view, docIdsToChangesAndEmits) {
8472 const metaDocId = '_local/doc_' + docId;
8473 const defaultMetaDoc = {_id: metaDocId, keys: []};
8474 const docData = docIdsToChangesAndEmits.get(docId);
8475 const indexableKeysToKeyValues = docData[0];
8476 const changes = docData[1];
8477
8478 function getMetaDoc() {
8479 if (isGenOne(changes)) {
8480 // generation 1, so we can safely assume initial state
8481 // for performance reasons (avoids unnecessary GETs)
8482 return Promise.resolve(defaultMetaDoc);
8483 }
8484 return view.db.get(metaDocId).catch(defaultsTo(defaultMetaDoc));
8485 }
8486
8487 function getKeyValueDocs(metaDoc) {
8488 if (!metaDoc.keys.length) {
8489 // no keys, no need for a lookup
8490 return Promise.resolve({rows: []});
8491 }
8492 return view.db.allDocs({
8493 keys: metaDoc.keys,
8494 include_docs: true
8495 });
8496 }
8497
8498 function processKeyValueDocs(metaDoc, kvDocsRes) {
8499 const kvDocs = [];
8500 const oldKeys = new Set();
8501
8502 for (const row of kvDocsRes.rows) {
8503 const doc = row.doc;
8504 if (!doc) { // deleted
8505 continue;
8506 }
8507 kvDocs.push(doc);
8508 oldKeys.add(doc._id);
8509 doc._deleted = !indexableKeysToKeyValues.has(doc._id);
8510 if (!doc._deleted) {
8511 const keyValue = indexableKeysToKeyValues.get(doc._id);
8512 if ('value' in keyValue) {
8513 doc.value = keyValue.value;
8514 }
8515 }
8516 }
8517 const newKeys = mapToKeysArray(indexableKeysToKeyValues);
8518 for (const key of newKeys) {
8519 if (!oldKeys.has(key)) {
8520 // new doc
8521 const kvDoc = {
8522 _id: key
8523 };
8524 const keyValue = indexableKeysToKeyValues.get(key);
8525 if ('value' in keyValue) {
8526 kvDoc.value = keyValue.value;
8527 }
8528 kvDocs.push(kvDoc);
8529 }
8530 }
8531 metaDoc.keys = uniq(newKeys.concat(metaDoc.keys));
8532 kvDocs.push(metaDoc);
8533
8534 return kvDocs;
8535 }
8536
8537 const metaDoc = await getMetaDoc();
8538 const keyValueDocs = await getKeyValueDocs(metaDoc);
8539 return processKeyValueDocs(metaDoc, keyValueDocs);
8540 }
8541
8542 function updatePurgeSeq(view) {
8543 // with this approach, we just assume to have processed all missing purges and write the latest
8544 // purgeSeq into the _local/purgeSeq doc.
8545 return view.sourceDB.get('_local/purges').then(function (res$$1) {
8546 const purgeSeq = res$$1.purgeSeq;
8547 return view.db.get('_local/purgeSeq').then(function (res$$1) {
8548 return res$$1._rev;
8549 })
8550 .catch(defaultsTo(undefined))
8551 .then(function (rev$$1) {
8552 return view.db.put({
8553 _id: '_local/purgeSeq',
8554 _rev: rev$$1,
8555 purgeSeq,
8556 });
8557 });
8558 }).catch(function (err) {
8559 if (err.status !== 404) {
8560 throw err;
8561 }
8562 });
8563 }
8564
8565 // updates all emitted key/value docs and metaDocs in the mrview database
8566 // for the given batch of documents from the source database
8567 function saveKeyValues(view, docIdsToChangesAndEmits, seq) {
8568 var seqDocId = '_local/lastSeq';
8569 return view.db.get(seqDocId)
8570 .catch(defaultsTo({_id: seqDocId, seq: 0}))
8571 .then(function (lastSeqDoc) {
8572 var docIds = mapToKeysArray(docIdsToChangesAndEmits);
8573 return Promise.all(docIds.map(function (docId) {
8574 return getDocsToPersist(docId, view, docIdsToChangesAndEmits);
8575 })).then(function (listOfDocsToPersist) {
8576 var docsToPersist = listOfDocsToPersist.flat();
8577 lastSeqDoc.seq = seq;
8578 docsToPersist.push(lastSeqDoc);
8579 // write all docs in a single operation, update the seq once
8580 return view.db.bulkDocs({docs : docsToPersist});
8581 })
8582 // TODO: this should be placed somewhere else, probably? we're querying both docs twice
8583 // (first time when getting the actual purges).
8584 .then(() => updatePurgeSeq(view));
8585 });
8586 }
8587
8588 function getQueue(view) {
8589 const viewName = typeof view === 'string' ? view : view.name;
8590 let queue = persistentQueues[viewName];
8591 if (!queue) {
8592 queue = persistentQueues[viewName] = new TaskQueue$1();
8593 }
8594 return queue;
8595 }
8596
8597 async function updateView(view, opts) {
8598 return sequentialize(getQueue(view), function () {
8599 return updateViewInQueue(view, opts);
8600 })();
8601 }
8602
8603 async function updateViewInQueue(view, opts) {
8604 // bind the emit function once
8605 let mapResults;
8606 let doc;
8607 let taskId;
8608
8609 function emit(key, value) {
8610 const output = {id: doc._id, key: normalizeKey(key)};
8611 // Don't explicitly store the value unless it's defined and non-null.
8612 // This saves on storage space, because often people don't use it.
8613 if (typeof value !== 'undefined' && value !== null) {
8614 output.value = normalizeKey(value);
8615 }
8616 mapResults.push(output);
8617 }
8618
8619 const mapFun = mapper(view.mapFun, emit);
8620
8621 let currentSeq = view.seq || 0;
8622
8623 function createTask() {
8624 return view.sourceDB.info().then(function (info) {
8625 taskId = view.sourceDB.activeTasks.add({
8626 name: 'view_indexing',
8627 total_items: info.update_seq - currentSeq,
8628 });
8629 });
8630 }
8631
8632 function processChange(docIdsToChangesAndEmits, seq) {
8633 return function () {
8634 return saveKeyValues(view, docIdsToChangesAndEmits, seq);
8635 };
8636 }
8637
8638 let indexed_docs = 0;
8639 const progress = {
8640 view: view.name,
8641 indexed_docs
8642 };
8643 view.sourceDB.emit('indexing', progress);
8644
8645 const queue = new TaskQueue$1();
8646
8647 async function processNextBatch() {
8648 const response = await view.sourceDB.changes({
8649 return_docs: true,
8650 conflicts: true,
8651 include_docs: true,
8652 style: 'all_docs',
8653 since: currentSeq,
8654 limit: opts.changes_batch_size
8655 });
8656 const purges = await getRecentPurges();
8657 return processBatch(response, purges);
8658 }
8659
8660 function getRecentPurges() {
8661 return view.db.get('_local/purgeSeq').then(function (res$$1) {
8662 return res$$1.purgeSeq;
8663 })
8664 .catch(defaultsTo(-1))
8665 .then(function (purgeSeq) {
8666 return view.sourceDB.get('_local/purges').then(function (res$$1) {
8667 const recentPurges = res$$1.purges.filter(function (purge, index) {
8668 return index > purgeSeq;
8669 }).map((purge) => purge.docId);
8670
8671 const uniquePurges = recentPurges.filter(function (docId, index) {
8672 return recentPurges.indexOf(docId) === index;
8673 });
8674
8675 return Promise.all(uniquePurges.map(function (docId) {
8676 return view.sourceDB.get(docId).then(function (doc) {
8677 return { docId, doc };
8678 })
8679 .catch(defaultsTo({ docId }));
8680 }));
8681 })
8682 .catch(defaultsTo([]));
8683 });
8684 }
8685
8686 function processBatch(response, purges) {
8687 const results = response.results;
8688 if (!results.length && !purges.length) {
8689 return;
8690 }
8691
8692 for (const purge of purges) {
8693 const index = results.findIndex(function (change) {
8694 return change.id === purge.docId;
8695 });
8696 if (index < 0) {
8697 // mimic a db.remove() on the changes feed
8698 const entry = {
8699 _id: purge.docId,
8700 doc: {
8701 _id: purge.docId,
8702 _deleted: 1,
8703 },
8704 changes: [],
8705 };
8706
8707 if (purge.doc) {
8708 // update with new winning rev after purge
8709 entry.doc = purge.doc;
8710 entry.changes.push({ rev: purge.doc._rev });
8711 }
8712
8713 results.push(entry);
8714 }
8715 }
8716
8717 const docIdsToChangesAndEmits = createDocIdsToChangesAndEmits(results);
8718
8719 queue.add(processChange(docIdsToChangesAndEmits, currentSeq));
8720
8721 indexed_docs = indexed_docs + results.length;
8722 const progress = {
8723 view: view.name,
8724 last_seq: response.last_seq,
8725 results_count: results.length,
8726 indexed_docs
8727 };
8728 view.sourceDB.emit('indexing', progress);
8729 view.sourceDB.activeTasks.update(taskId, {completed_items: indexed_docs});
8730
8731 if (results.length < opts.changes_batch_size) {
8732 return;
8733 }
8734 return processNextBatch();
8735 }
8736
8737 function createDocIdsToChangesAndEmits(results) {
8738 const docIdsToChangesAndEmits = new Map();
8739 for (const change of results) {
8740 if (change.doc._id[0] !== '_') {
8741 mapResults = [];
8742 doc = change.doc;
8743
8744 if (!doc._deleted) {
8745 tryMap(view.sourceDB, mapFun, doc);
8746 }
8747 mapResults.sort(sortByKeyThenValue);
8748
8749 const indexableKeysToKeyValues = createIndexableKeysToKeyValues(mapResults);
8750 docIdsToChangesAndEmits.set(change.doc._id, [
8751 indexableKeysToKeyValues,
8752 change.changes
8753 ]);
8754 }
8755 currentSeq = change.seq;
8756 }
8757 return docIdsToChangesAndEmits;
8758 }
8759
8760 function createIndexableKeysToKeyValues(mapResults) {
8761 const indexableKeysToKeyValues = new Map();
8762 let lastKey;
8763 for (let i = 0, len = mapResults.length; i < len; i++) {
8764 const emittedKeyValue = mapResults[i];
8765 const complexKey = [emittedKeyValue.key, emittedKeyValue.id];
8766 if (i > 0 && collate(emittedKeyValue.key, lastKey) === 0) {
8767 complexKey.push(i); // dup key+id, so make it unique
8768 }
8769 indexableKeysToKeyValues.set(toIndexableString(complexKey), emittedKeyValue);
8770 lastKey = emittedKeyValue.key;
8771 }
8772 return indexableKeysToKeyValues;
8773 }
8774
8775 try {
8776 await createTask();
8777 await processNextBatch();
8778 await queue.finish();
8779 view.seq = currentSeq;
8780 view.sourceDB.activeTasks.remove(taskId);
8781 } catch (error) {
8782 view.sourceDB.activeTasks.remove(taskId, error);
8783 }
8784 }
8785
8786 function reduceView(view, results, options) {
8787 if (options.group_level === 0) {
8788 delete options.group_level;
8789 }
8790
8791 const shouldGroup = options.group || options.group_level;
8792 const reduceFun = reducer(view.reduceFun);
8793 const groups = [];
8794 const lvl = isNaN(options.group_level)
8795 ? Number.POSITIVE_INFINITY
8796 : options.group_level;
8797
8798 for (const result of results) {
8799 const last = groups[groups.length - 1];
8800 let groupKey = shouldGroup ? result.key : null;
8801
8802 // only set group_level for array keys
8803 if (shouldGroup && Array.isArray(groupKey)) {
8804 groupKey = groupKey.slice(0, lvl);
8805 }
8806
8807 if (last && collate(last.groupKey, groupKey) === 0) {
8808 last.keys.push([result.key, result.id]);
8809 last.values.push(result.value);
8810 continue;
8811 }
8812 groups.push({
8813 keys: [[result.key, result.id]],
8814 values: [result.value],
8815 groupKey
8816 });
8817 }
8818
8819 results = [];
8820 for (const group of groups) {
8821 const reduceTry = tryReduce(view.sourceDB, reduceFun, group.keys, group.values, false);
8822 if (reduceTry.error && reduceTry.error instanceof BuiltInError) {
8823 // CouchDB returns an error if a built-in errors out
8824 throw reduceTry.error;
8825 }
8826 results.push({
8827 // CouchDB just sets the value to null if a non-built-in errors out
8828 value: reduceTry.error ? null : reduceTry.output,
8829 key: group.groupKey
8830 });
8831 }
8832 // no total_rows/offset when reducing
8833 return { rows: sliceResults(results, options.limit, options.skip) };
8834 }
8835
8836 function queryView(view, opts) {
8837 return sequentialize(getQueue(view), function () {
8838 return queryViewInQueue(view, opts);
8839 })();
8840 }
8841
8842 async function queryViewInQueue(view, opts) {
8843 let totalRows;
8844 const shouldReduce = view.reduceFun && opts.reduce !== false;
8845 const skip = opts.skip || 0;
8846 if (typeof opts.keys !== 'undefined' && !opts.keys.length) {
8847 // equivalent query
8848 opts.limit = 0;
8849 delete opts.keys;
8850 }
8851
8852 async function fetchFromView(viewOpts) {
8853 viewOpts.include_docs = true;
8854 const res$$1 = await view.db.allDocs(viewOpts);
8855 totalRows = res$$1.total_rows;
8856
8857 return res$$1.rows.map(function (result) {
8858 // implicit migration - in older versions of PouchDB,
8859 // we explicitly stored the doc as {id: ..., key: ..., value: ...}
8860 // this is tested in a migration test
8861 /* istanbul ignore next */
8862 if ('value' in result.doc && typeof result.doc.value === 'object' &&
8863 result.doc.value !== null) {
8864 const keys = Object.keys(result.doc.value).sort();
8865 // this detection method is not perfect, but it's unlikely the user
8866 // emitted a value which was an object with these 3 exact keys
8867 const expectedKeys = ['id', 'key', 'value'];
8868 if (!(keys < expectedKeys || keys > expectedKeys)) {
8869 return result.doc.value;
8870 }
8871 }
8872
8873 const parsedKeyAndDocId = parseIndexableString(result.doc._id);
8874 return {
8875 key: parsedKeyAndDocId[0],
8876 id: parsedKeyAndDocId[1],
8877 value: ('value' in result.doc ? result.doc.value : null)
8878 };
8879 });
8880 }
8881
8882 async function onMapResultsReady(rows) {
8883 let finalResults;
8884 if (shouldReduce) {
8885 finalResults = reduceView(view, rows, opts);
8886 } else if (typeof opts.keys === 'undefined') {
8887 finalResults = {
8888 total_rows: totalRows,
8889 offset: skip,
8890 rows
8891 };
8892 } else {
8893 // support limit, skip for keys query
8894 finalResults = {
8895 total_rows: totalRows,
8896 offset: skip,
8897 rows: sliceResults(rows,opts.limit,opts.skip)
8898 };
8899 }
8900 /* istanbul ignore if */
8901 if (opts.update_seq) {
8902 finalResults.update_seq = view.seq;
8903 }
8904 if (opts.include_docs) {
8905 const docIds = uniq(rows.map(rowToDocId));
8906
8907 const allDocsRes = await view.sourceDB.allDocs({
8908 keys: docIds,
8909 include_docs: true,
8910 conflicts: opts.conflicts,
8911 attachments: opts.attachments,
8912 binary: opts.binary
8913 });
8914 const docIdsToDocs = new Map();
8915 for (const row of allDocsRes.rows) {
8916 docIdsToDocs.set(row.id, row.doc);
8917 }
8918 for (const row of rows) {
8919 const docId = rowToDocId(row);
8920 const doc = docIdsToDocs.get(docId);
8921 if (doc) {
8922 row.doc = doc;
8923 }
8924 }
8925 }
8926 return finalResults;
8927 }
8928
8929 if (typeof opts.keys !== 'undefined') {
8930 const keys = opts.keys;
8931 const fetchPromises = keys.map(function (key) {
8932 const viewOpts = {
8933 startkey : toIndexableString([key]),
8934 endkey : toIndexableString([key, {}])
8935 };
8936 /* istanbul ignore if */
8937 if (opts.update_seq) {
8938 viewOpts.update_seq = true;
8939 }
8940 return fetchFromView(viewOpts);
8941 });
8942 const result = await Promise.all(fetchPromises);
8943 const flattenedResult = result.flat();
8944 return onMapResultsReady(flattenedResult);
8945 } else { // normal query, no 'keys'
8946 const viewOpts = {
8947 descending : opts.descending
8948 };
8949 /* istanbul ignore if */
8950 if (opts.update_seq) {
8951 viewOpts.update_seq = true;
8952 }
8953 let startkey;
8954 let endkey;
8955 if ('start_key' in opts) {
8956 startkey = opts.start_key;
8957 }
8958 if ('startkey' in opts) {
8959 startkey = opts.startkey;
8960 }
8961 if ('end_key' in opts) {
8962 endkey = opts.end_key;
8963 }
8964 if ('endkey' in opts) {
8965 endkey = opts.endkey;
8966 }
8967 if (typeof startkey !== 'undefined') {
8968 viewOpts.startkey = opts.descending ?
8969 toIndexableString([startkey, {}]) :
8970 toIndexableString([startkey]);
8971 }
8972 if (typeof endkey !== 'undefined') {
8973 let inclusiveEnd = opts.inclusive_end !== false;
8974 if (opts.descending) {
8975 inclusiveEnd = !inclusiveEnd;
8976 }
8977
8978 viewOpts.endkey = toIndexableString(
8979 inclusiveEnd ? [endkey, {}] : [endkey]);
8980 }
8981 if (typeof opts.key !== 'undefined') {
8982 const keyStart = toIndexableString([opts.key]);
8983 const keyEnd = toIndexableString([opts.key, {}]);
8984 if (viewOpts.descending) {
8985 viewOpts.endkey = keyStart;
8986 viewOpts.startkey = keyEnd;
8987 } else {
8988 viewOpts.startkey = keyStart;
8989 viewOpts.endkey = keyEnd;
8990 }
8991 }
8992 if (!shouldReduce) {
8993 if (typeof opts.limit === 'number') {
8994 viewOpts.limit = opts.limit;
8995 }
8996 viewOpts.skip = skip;
8997 }
8998
8999 const result = await fetchFromView(viewOpts);
9000 return onMapResultsReady(result);
9001 }
9002 }
9003
9004 async function httpViewCleanup(db) {
9005 const response = await db.fetch('_view_cleanup', {
9006 headers: new Headers({'Content-Type': 'application/json'}),
9007 method: 'POST'
9008 });
9009 return response.json();
9010 }
9011
9012 async function localViewCleanup(db) {
9013 try {
9014 const metaDoc = await db.get('_local/' + localDocName);
9015 const docsToViews = new Map();
9016
9017 for (const fullViewName of Object.keys(metaDoc.views)) {
9018 const parts = parseViewName(fullViewName);
9019 const designDocName = '_design/' + parts[0];
9020 const viewName = parts[1];
9021 let views = docsToViews.get(designDocName);
9022 if (!views) {
9023 views = new Set();
9024 docsToViews.set(designDocName, views);
9025 }
9026 views.add(viewName);
9027 }
9028 const opts = {
9029 keys : mapToKeysArray(docsToViews),
9030 include_docs : true
9031 };
9032
9033 const res$$1 = await db.allDocs(opts);
9034 const viewsToStatus = {};
9035 for (const row of res$$1.rows) {
9036 const ddocName = row.key.substring(8); // cuts off '_design/'
9037 for (const viewName of docsToViews.get(row.key)) {
9038 let fullViewName = ddocName + '/' + viewName;
9039 /* istanbul ignore if */
9040 if (!metaDoc.views[fullViewName]) {
9041 // new format, without slashes, to support PouchDB 2.2.0
9042 // migration test in pouchdb's browser.migration.js verifies this
9043 fullViewName = viewName;
9044 }
9045 const viewDBNames = Object.keys(metaDoc.views[fullViewName]);
9046 // design doc deleted, or view function nonexistent
9047 const statusIsGood = row.doc && row.doc.views &&
9048 row.doc.views[viewName];
9049 for (const viewDBName of viewDBNames) {
9050 viewsToStatus[viewDBName] = viewsToStatus[viewDBName] || statusIsGood;
9051 }
9052 }
9053 }
9054
9055 const dbsToDelete = Object.keys(viewsToStatus)
9056 .filter(function (viewDBName) { return !viewsToStatus[viewDBName]; });
9057
9058 const destroyPromises = dbsToDelete.map(function (viewDBName) {
9059 return sequentialize(getQueue(viewDBName), function () {
9060 return new db.constructor(viewDBName, db.__opts).destroy();
9061 })();
9062 });
9063
9064 return Promise.all(destroyPromises).then(function () {
9065 return {ok: true};
9066 });
9067 } catch (err) {
9068 if (err.status === 404) {
9069 return {ok: true};
9070 } else {
9071 throw err;
9072 }
9073 }
9074 }
9075
9076 async function queryPromised(db, fun, opts) {
9077 /* istanbul ignore next */
9078 if (typeof db._query === 'function') {
9079 return customQuery(db, fun, opts);
9080 }
9081 if (isRemote(db)) {
9082 return httpQuery(db, fun, opts);
9083 }
9084
9085 const updateViewOpts = {
9086 changes_batch_size: db.__opts.view_update_changes_batch_size || CHANGES_BATCH_SIZE$1
9087 };
9088
9089 if (typeof fun !== 'string') {
9090 // temp_view
9091 checkQueryParseError(opts, fun);
9092
9093 tempViewQueue.add(async function () {
9094 const view = await createView(
9095 /* sourceDB */ db,
9096 /* viewName */ 'temp_view/temp_view',
9097 /* mapFun */ fun.map,
9098 /* reduceFun */ fun.reduce,
9099 /* temporary */ true,
9100 /* localDocName */ localDocName);
9101
9102 return fin(updateView(view, updateViewOpts).then(
9103 function () { return queryView(view, opts); }),
9104 function () { return view.db.destroy(); }
9105 );
9106 });
9107 return tempViewQueue.finish();
9108 } else {
9109 // persistent view
9110 const fullViewName = fun;
9111 const parts = parseViewName(fullViewName);
9112 const designDocName = parts[0];
9113 const viewName = parts[1];
9114
9115 const doc = await db.get('_design/' + designDocName);
9116 fun = doc.views && doc.views[viewName];
9117
9118 if (!fun) {
9119 // basic validator; it's assumed that every subclass would want this
9120 throw new NotFoundError$1(`ddoc ${doc._id} has no view named ${viewName}`);
9121 }
9122
9123 ddocValidator(doc, viewName);
9124 checkQueryParseError(opts, fun);
9125
9126 const view = await createView(
9127 /* sourceDB */ db,
9128 /* viewName */ fullViewName,
9129 /* mapFun */ fun.map,
9130 /* reduceFun */ fun.reduce,
9131 /* temporary */ false,
9132 /* localDocName */ localDocName);
9133
9134 if (opts.stale === 'ok' || opts.stale === 'update_after') {
9135 if (opts.stale === 'update_after') {
9136 nextTick(function () {
9137 updateView(view, updateViewOpts);
9138 });
9139 }
9140 return queryView(view, opts);
9141 } else { // stale not ok
9142 await updateView(view, updateViewOpts);
9143 return queryView(view, opts);
9144 }
9145 }
9146 }
9147
9148 function abstractQuery(fun, opts, callback) {
9149 const db = this;
9150 if (typeof opts === 'function') {
9151 callback = opts;
9152 opts = {};
9153 }
9154 opts = opts ? coerceOptions(opts) : {};
9155
9156 if (typeof fun === 'function') {
9157 fun = {map : fun};
9158 }
9159
9160 const promise = Promise.resolve().then(function () {
9161 return queryPromised(db, fun, opts);
9162 });
9163 promisedCallback(promise, callback);
9164 return promise;
9165 }
9166
9167 const abstractViewCleanup = callbackify(function () {
9168 const db = this;
9169 /* istanbul ignore next */
9170 if (typeof db._viewCleanup === 'function') {
9171 return customViewCleanup(db);
9172 }
9173 if (isRemote(db)) {
9174 return httpViewCleanup(db);
9175 }
9176 return localViewCleanup(db);
9177 });
9178
9179 return {
9180 query: abstractQuery,
9181 viewCleanup: abstractViewCleanup
9182 };
9183}
9184
9185var builtInReduce = {
9186 _sum: function (keys, values) {
9187 return sum(values);
9188 },
9189
9190 _count: function (keys, values) {
9191 return values.length;
9192 },
9193
9194 _stats: function (keys, values) {
9195 // no need to implement rereduce=true, because Pouch
9196 // will never call it
9197 function sumsqr(values) {
9198 var _sumsqr = 0;
9199 for (var i = 0, len = values.length; i < len; i++) {
9200 var num = values[i];
9201 _sumsqr += (num * num);
9202 }
9203 return _sumsqr;
9204 }
9205 return {
9206 sum : sum(values),
9207 min : Math.min.apply(null, values),
9208 max : Math.max.apply(null, values),
9209 count : values.length,
9210 sumsqr : sumsqr(values)
9211 };
9212 }
9213};
9214
9215function getBuiltIn(reduceFunString) {
9216 if (/^_sum/.test(reduceFunString)) {
9217 return builtInReduce._sum;
9218 } else if (/^_count/.test(reduceFunString)) {
9219 return builtInReduce._count;
9220 } else if (/^_stats/.test(reduceFunString)) {
9221 return builtInReduce._stats;
9222 } else if (/^_/.test(reduceFunString)) {
9223 throw new Error(reduceFunString + ' is not a supported reduce function.');
9224 }
9225}
9226
9227function mapper(mapFun, emit) {
9228 // for temp_views one can use emit(doc, emit), see #38
9229 if (typeof mapFun === "function" && mapFun.length === 2) {
9230 var origMap = mapFun;
9231 return function (doc) {
9232 return origMap(doc, emit);
9233 };
9234 } else {
9235 return evalFunction(mapFun.toString(), emit);
9236 }
9237}
9238
9239function reducer(reduceFun) {
9240 var reduceFunString = reduceFun.toString();
9241 var builtIn = getBuiltIn(reduceFunString);
9242 if (builtIn) {
9243 return builtIn;
9244 } else {
9245 return evalFunction(reduceFunString);
9246 }
9247}
9248
9249function ddocValidator(ddoc, viewName) {
9250 var fun = ddoc.views && ddoc.views[viewName];
9251 if (typeof fun.map !== 'string') {
9252 throw new NotFoundError$1('ddoc ' + ddoc._id + ' has no string view named ' +
9253 viewName + ', instead found object of type: ' + typeof fun.map);
9254 }
9255}
9256
9257var localDocName = 'mrviews';
9258var abstract = createAbstractMapReduce(localDocName, mapper, reducer, ddocValidator);
9259
9260function query(fun, opts, callback) {
9261 return abstract.query.call(this, fun, opts, callback);
9262}
9263
9264function viewCleanup(callback) {
9265 return abstract.viewCleanup.call(this, callback);
9266}
9267
9268var mapreduce = {
9269 query,
9270 viewCleanup
9271};
9272
9273function fileHasChanged(localDoc, remoteDoc, filename) {
9274 return !localDoc._attachments ||
9275 !localDoc._attachments[filename] ||
9276 localDoc._attachments[filename].digest !== remoteDoc._attachments[filename].digest;
9277}
9278
9279function getDocAttachments(db, doc) {
9280 var filenames = Object.keys(doc._attachments);
9281 return Promise.all(filenames.map(function (filename) {
9282 return db.getAttachment(doc._id, filename, {rev: doc._rev});
9283 }));
9284}
9285
9286function getDocAttachmentsFromTargetOrSource(target, src, doc) {
9287 var doCheckForLocalAttachments = isRemote(src) && !isRemote(target);
9288 var filenames = Object.keys(doc._attachments);
9289
9290 if (!doCheckForLocalAttachments) {
9291 return getDocAttachments(src, doc);
9292 }
9293
9294 return target.get(doc._id).then(function (localDoc) {
9295 return Promise.all(filenames.map(function (filename) {
9296 if (fileHasChanged(localDoc, doc, filename)) {
9297 return src.getAttachment(doc._id, filename);
9298 }
9299
9300 return target.getAttachment(localDoc._id, filename);
9301 }));
9302 }).catch(function (error) {
9303 /* istanbul ignore if */
9304 if (error.status !== 404) {
9305 throw error;
9306 }
9307
9308 return getDocAttachments(src, doc);
9309 });
9310}
9311
9312function createBulkGetOpts(diffs) {
9313 var requests = [];
9314 Object.keys(diffs).forEach(function (id) {
9315 var missingRevs = diffs[id].missing;
9316 missingRevs.forEach(function (missingRev) {
9317 requests.push({
9318 id,
9319 rev: missingRev
9320 });
9321 });
9322 });
9323
9324 return {
9325 docs: requests,
9326 revs: true,
9327 latest: true
9328 };
9329}
9330
9331//
9332// Fetch all the documents from the src as described in the "diffs",
9333// which is a mapping of docs IDs to revisions. If the state ever
9334// changes to "cancelled", then the returned promise will be rejected.
9335// Else it will be resolved with a list of fetched documents.
9336//
9337function getDocs(src, target, diffs, state) {
9338 diffs = clone(diffs); // we do not need to modify this
9339
9340 var resultDocs = [],
9341 ok = true;
9342
9343 function getAllDocs() {
9344
9345 var bulkGetOpts = createBulkGetOpts(diffs);
9346
9347 if (!bulkGetOpts.docs.length) { // optimization: skip empty requests
9348 return;
9349 }
9350
9351 return src.bulkGet(bulkGetOpts).then(function (bulkGetResponse) {
9352 /* istanbul ignore if */
9353 if (state.cancelled) {
9354 throw new Error('cancelled');
9355 }
9356 return Promise.all(bulkGetResponse.results.map(function (bulkGetInfo) {
9357 return Promise.all(bulkGetInfo.docs.map(function (doc) {
9358 var remoteDoc = doc.ok;
9359
9360 if (doc.error) {
9361 // when AUTO_COMPACTION is set, docs can be returned which look
9362 // like this: {"missing":"1-7c3ac256b693c462af8442f992b83696"}
9363 ok = false;
9364 }
9365
9366 if (!remoteDoc || !remoteDoc._attachments) {
9367 return remoteDoc;
9368 }
9369
9370 return getDocAttachmentsFromTargetOrSource(target, src, remoteDoc).then((attachments) => {
9371 var filenames = Object.keys(remoteDoc._attachments);
9372 attachments.forEach(function (attachment, i) {
9373 var att = remoteDoc._attachments[filenames[i]];
9374 delete att.stub;
9375 delete att.length;
9376 att.data = attachment;
9377 });
9378
9379 return remoteDoc;
9380 });
9381 }));
9382 }))
9383
9384 .then(function (results) {
9385 resultDocs = resultDocs.concat(results.flat().filter(Boolean));
9386 });
9387 });
9388 }
9389
9390 function returnResult() {
9391 return { ok, docs:resultDocs };
9392 }
9393
9394 return Promise.resolve()
9395 .then(getAllDocs)
9396 .then(returnResult);
9397}
9398
9399var CHECKPOINT_VERSION = 1;
9400var REPLICATOR = "pouchdb";
9401// This is an arbitrary number to limit the
9402// amount of replication history we save in the checkpoint.
9403// If we save too much, the checkpoint docs will become very big,
9404// if we save fewer, we'll run a greater risk of having to
9405// read all the changes from 0 when checkpoint PUTs fail
9406// CouchDB 2.0 has a more involved history pruning,
9407// but let's go for the simple version for now.
9408var CHECKPOINT_HISTORY_SIZE = 5;
9409var LOWEST_SEQ = 0;
9410
9411function updateCheckpoint(db, id, checkpoint, session, returnValue) {
9412 return db.get(id).catch(function (err) {
9413 if (err.status === 404) {
9414 if (db.adapter === 'http' || db.adapter === 'https') ;
9415 return {
9416 session_id: session,
9417 _id: id,
9418 history: [],
9419 replicator: REPLICATOR,
9420 version: CHECKPOINT_VERSION
9421 };
9422 }
9423 throw err;
9424 }).then(function (doc) {
9425 if (returnValue.cancelled) {
9426 return;
9427 }
9428
9429 // if the checkpoint has not changed, do not update
9430 if (doc.last_seq === checkpoint) {
9431 return;
9432 }
9433
9434 // Filter out current entry for this replication
9435 doc.history = (doc.history || []).filter(function (item) {
9436 return item.session_id !== session;
9437 });
9438
9439 // Add the latest checkpoint to history
9440 doc.history.unshift({
9441 last_seq: checkpoint,
9442 session_id: session
9443 });
9444
9445 // Just take the last pieces in history, to
9446 // avoid really big checkpoint docs.
9447 // see comment on history size above
9448 doc.history = doc.history.slice(0, CHECKPOINT_HISTORY_SIZE);
9449
9450 doc.version = CHECKPOINT_VERSION;
9451 doc.replicator = REPLICATOR;
9452
9453 doc.session_id = session;
9454 doc.last_seq = checkpoint;
9455
9456 return db.put(doc).catch(function (err) {
9457 if (err.status === 409) {
9458 // retry; someone is trying to write a checkpoint simultaneously
9459 return updateCheckpoint(db, id, checkpoint, session, returnValue);
9460 }
9461 throw err;
9462 });
9463 });
9464}
9465
9466class CheckpointerInternal {
9467 constructor(src, target, id, returnValue, opts = {
9468 writeSourceCheckpoint: true,
9469 writeTargetCheckpoint: true,
9470 }) {
9471 this.src = src;
9472 this.target = target;
9473 this.id = id;
9474 this.returnValue = returnValue;
9475 this.opts = opts;
9476
9477 if (typeof opts.writeSourceCheckpoint === "undefined") {
9478 opts.writeSourceCheckpoint = true;
9479 }
9480
9481 if (typeof opts.writeTargetCheckpoint === "undefined") {
9482 opts.writeTargetCheckpoint = true;
9483 }
9484 }
9485
9486 writeCheckpoint(checkpoint, session) {
9487 var self = this;
9488 return this.updateTarget(checkpoint, session).then(function () {
9489 return self.updateSource(checkpoint, session);
9490 });
9491 }
9492
9493 updateTarget(checkpoint, session) {
9494 if (this.opts.writeTargetCheckpoint) {
9495 return updateCheckpoint(this.target, this.id, checkpoint,
9496 session, this.returnValue);
9497 } else {
9498 return Promise.resolve(true);
9499 }
9500 }
9501
9502 updateSource(checkpoint, session) {
9503 if (this.opts.writeSourceCheckpoint) {
9504 var self = this;
9505 return updateCheckpoint(this.src, this.id, checkpoint,
9506 session, this.returnValue)
9507 .catch(function (err) {
9508 if (isForbiddenError(err)) {
9509 self.opts.writeSourceCheckpoint = false;
9510 return true;
9511 }
9512 throw err;
9513 });
9514 } else {
9515 return Promise.resolve(true);
9516 }
9517 }
9518
9519 getCheckpoint() {
9520 var self = this;
9521
9522 if (!self.opts.writeSourceCheckpoint && !self.opts.writeTargetCheckpoint) {
9523 return Promise.resolve(LOWEST_SEQ);
9524 }
9525
9526 if (self.opts && self.opts.writeSourceCheckpoint && !self.opts.writeTargetCheckpoint) {
9527 return self.src.get(self.id).then(function (sourceDoc) {
9528 return sourceDoc.last_seq || LOWEST_SEQ;
9529 }).catch(function (err) {
9530 /* istanbul ignore if */
9531 if (err.status !== 404) {
9532 throw err;
9533 }
9534 return LOWEST_SEQ;
9535 });
9536 }
9537
9538 return self.target.get(self.id).then(function (targetDoc) {
9539 if (self.opts && self.opts.writeTargetCheckpoint && !self.opts.writeSourceCheckpoint) {
9540 return targetDoc.last_seq || LOWEST_SEQ;
9541 }
9542
9543 return self.src.get(self.id).then(function (sourceDoc) {
9544 // Since we can't migrate an old version doc to a new one
9545 // (no session id), we just go with the lowest seq in this case
9546 /* istanbul ignore if */
9547 if (targetDoc.version !== sourceDoc.version) {
9548 return LOWEST_SEQ;
9549 }
9550
9551 var version;
9552 if (targetDoc.version) {
9553 version = targetDoc.version.toString();
9554 } else {
9555 version = "undefined";
9556 }
9557
9558 if (version in comparisons) {
9559 return comparisons[version](targetDoc, sourceDoc);
9560 }
9561 /* istanbul ignore next */
9562 return LOWEST_SEQ;
9563 }, function (err) {
9564 if (err.status === 404 && targetDoc.last_seq) {
9565 return self.src.put({
9566 _id: self.id,
9567 last_seq: LOWEST_SEQ
9568 }).then(function () {
9569 return LOWEST_SEQ;
9570 }, function (err) {
9571 if (isForbiddenError(err)) {
9572 self.opts.writeSourceCheckpoint = false;
9573 return targetDoc.last_seq;
9574 }
9575 /* istanbul ignore next */
9576 return LOWEST_SEQ;
9577 });
9578 }
9579 throw err;
9580 });
9581 }).catch(function (err) {
9582 if (err.status !== 404) {
9583 throw err;
9584 }
9585 return LOWEST_SEQ;
9586 });
9587 }
9588}
9589
9590var comparisons = {
9591 "undefined": function (targetDoc, sourceDoc) {
9592 // This is the previous comparison function
9593 if (collate(targetDoc.last_seq, sourceDoc.last_seq) === 0) {
9594 return sourceDoc.last_seq;
9595 }
9596 /* istanbul ignore next */
9597 return 0;
9598 },
9599 "1": function (targetDoc, sourceDoc) {
9600 // This is the comparison function ported from CouchDB
9601 return compareReplicationLogs(sourceDoc, targetDoc).last_seq;
9602 }
9603};
9604
9605// This checkpoint comparison is ported from CouchDBs source
9606// they come from here:
9607// https://github.com/apache/couchdb-couch-replicator/blob/master/src/couch_replicator.erl#L863-L906
9608
9609function compareReplicationLogs(srcDoc, tgtDoc) {
9610 if (srcDoc.session_id === tgtDoc.session_id) {
9611 return {
9612 last_seq: srcDoc.last_seq,
9613 history: srcDoc.history
9614 };
9615 }
9616
9617 return compareReplicationHistory(srcDoc.history, tgtDoc.history);
9618}
9619
9620function compareReplicationHistory(sourceHistory, targetHistory) {
9621 // the erlang loop via function arguments is not so easy to repeat in JS
9622 // therefore, doing this as recursion
9623 var S = sourceHistory[0];
9624 var sourceRest = sourceHistory.slice(1);
9625 var T = targetHistory[0];
9626 var targetRest = targetHistory.slice(1);
9627
9628 if (!S || targetHistory.length === 0) {
9629 return {
9630 last_seq: LOWEST_SEQ,
9631 history: []
9632 };
9633 }
9634
9635 var sourceId = S.session_id;
9636 /* istanbul ignore if */
9637 if (hasSessionId(sourceId, targetHistory)) {
9638 return {
9639 last_seq: S.last_seq,
9640 history: sourceHistory
9641 };
9642 }
9643
9644 var targetId = T.session_id;
9645 if (hasSessionId(targetId, sourceRest)) {
9646 return {
9647 last_seq: T.last_seq,
9648 history: targetRest
9649 };
9650 }
9651
9652 return compareReplicationHistory(sourceRest, targetRest);
9653}
9654
9655function hasSessionId(sessionId, history) {
9656 var props = history[0];
9657 var rest = history.slice(1);
9658
9659 if (!sessionId || history.length === 0) {
9660 return false;
9661 }
9662
9663 if (sessionId === props.session_id) {
9664 return true;
9665 }
9666
9667 return hasSessionId(sessionId, rest);
9668}
9669
9670function isForbiddenError(err) {
9671 return typeof err.status === 'number' && Math.floor(err.status / 100) === 4;
9672}
9673
9674function Checkpointer(src, target, id, returnValue, opts) {
9675 if (!(this instanceof CheckpointerInternal)) {
9676 return new CheckpointerInternal(src, target, id, returnValue, opts);
9677 }
9678 return Checkpointer;
9679}
9680
9681var STARTING_BACK_OFF = 0;
9682
9683function backOff(opts, returnValue, error, callback) {
9684 if (opts.retry === false) {
9685 returnValue.emit('error', error);
9686 returnValue.removeAllListeners();
9687 return;
9688 }
9689 /* istanbul ignore if */
9690 if (typeof opts.back_off_function !== 'function') {
9691 opts.back_off_function = defaultBackOff;
9692 }
9693 returnValue.emit('requestError', error);
9694 if (returnValue.state === 'active' || returnValue.state === 'pending') {
9695 returnValue.emit('paused', error);
9696 returnValue.state = 'stopped';
9697 var backOffSet = function backoffTimeSet() {
9698 opts.current_back_off = STARTING_BACK_OFF;
9699 };
9700 var removeBackOffSetter = function removeBackOffTimeSet() {
9701 returnValue.removeListener('active', backOffSet);
9702 };
9703 returnValue.once('paused', removeBackOffSetter);
9704 returnValue.once('active', backOffSet);
9705 }
9706
9707 opts.current_back_off = opts.current_back_off || STARTING_BACK_OFF;
9708 opts.current_back_off = opts.back_off_function(opts.current_back_off);
9709 setTimeout(callback, opts.current_back_off);
9710}
9711
9712function sortObjectPropertiesByKey(queryParams) {
9713 return Object.keys(queryParams).sort(collate).reduce(function (result, key) {
9714 result[key] = queryParams[key];
9715 return result;
9716 }, {});
9717}
9718
9719// Generate a unique id particular to this replication.
9720// Not guaranteed to align perfectly with CouchDB's rep ids.
9721function generateReplicationId(src, target, opts) {
9722 var docIds = opts.doc_ids ? opts.doc_ids.sort(collate) : '';
9723 var filterFun = opts.filter ? opts.filter.toString() : '';
9724 var queryParams = '';
9725 var filterViewName = '';
9726 var selector = '';
9727
9728 // possibility for checkpoints to be lost here as behaviour of
9729 // JSON.stringify is not stable (see #6226)
9730 /* istanbul ignore if */
9731 if (opts.selector) {
9732 selector = JSON.stringify(opts.selector);
9733 }
9734
9735 if (opts.filter && opts.query_params) {
9736 queryParams = JSON.stringify(sortObjectPropertiesByKey(opts.query_params));
9737 }
9738
9739 if (opts.filter && opts.filter === '_view') {
9740 filterViewName = opts.view.toString();
9741 }
9742
9743 return Promise.all([src.id(), target.id()]).then(function (res) {
9744 var queryData = res[0] + res[1] + filterFun + filterViewName +
9745 queryParams + docIds + selector;
9746 return new Promise(function (resolve) {
9747 binaryMd5(queryData, resolve);
9748 });
9749 }).then(function (md5sum) {
9750 // can't use straight-up md5 alphabet, because
9751 // the char '/' is interpreted as being for attachments,
9752 // and + is also not url-safe
9753 md5sum = md5sum.replace(/\//g, '.').replace(/\+/g, '_');
9754 return '_local/' + md5sum;
9755 });
9756}
9757
9758function replicate(src, target, opts, returnValue, result) {
9759 var batches = []; // list of batches to be processed
9760 var currentBatch; // the batch currently being processed
9761 var pendingBatch = {
9762 seq: 0,
9763 changes: [],
9764 docs: []
9765 }; // next batch, not yet ready to be processed
9766 var writingCheckpoint = false; // true while checkpoint is being written
9767 var changesCompleted = false; // true when all changes received
9768 var replicationCompleted = false; // true when replication has completed
9769 // initial_last_seq is the state of the source db before
9770 // replication started, and it is _not_ updated during
9771 // replication or used anywhere else, as opposed to last_seq
9772 var initial_last_seq = 0;
9773 var last_seq = 0;
9774 var continuous = opts.continuous || opts.live || false;
9775 var batch_size = opts.batch_size || 100;
9776 var batches_limit = opts.batches_limit || 10;
9777 var style = opts.style || 'all_docs';
9778 var changesPending = false; // true while src.changes is running
9779 var doc_ids = opts.doc_ids;
9780 var selector = opts.selector;
9781 var repId;
9782 var checkpointer;
9783 var changedDocs = [];
9784 // Like couchdb, every replication gets a unique session id
9785 var session = uuid();
9786 var taskId;
9787
9788 result = result || {
9789 ok: true,
9790 start_time: new Date().toISOString(),
9791 docs_read: 0,
9792 docs_written: 0,
9793 doc_write_failures: 0,
9794 errors: []
9795 };
9796
9797 var changesOpts = {};
9798 returnValue.ready(src, target);
9799
9800 function initCheckpointer() {
9801 if (checkpointer) {
9802 return Promise.resolve();
9803 }
9804 return generateReplicationId(src, target, opts).then(function (res$$1) {
9805 repId = res$$1;
9806
9807 var checkpointOpts = {};
9808 if (opts.checkpoint === false) {
9809 checkpointOpts = { writeSourceCheckpoint: false, writeTargetCheckpoint: false };
9810 } else if (opts.checkpoint === 'source') {
9811 checkpointOpts = { writeSourceCheckpoint: true, writeTargetCheckpoint: false };
9812 } else if (opts.checkpoint === 'target') {
9813 checkpointOpts = { writeSourceCheckpoint: false, writeTargetCheckpoint: true };
9814 } else {
9815 checkpointOpts = { writeSourceCheckpoint: true, writeTargetCheckpoint: true };
9816 }
9817
9818 checkpointer = new Checkpointer(src, target, repId, returnValue, checkpointOpts);
9819 });
9820 }
9821
9822 function writeDocs() {
9823 changedDocs = [];
9824
9825 if (currentBatch.docs.length === 0) {
9826 return;
9827 }
9828 var docs = currentBatch.docs;
9829 var bulkOpts = {timeout: opts.timeout};
9830 return target.bulkDocs({docs, new_edits: false}, bulkOpts).then(function (res$$1) {
9831 /* istanbul ignore if */
9832 if (returnValue.cancelled) {
9833 completeReplication();
9834 throw new Error('cancelled');
9835 }
9836
9837 // `res` doesn't include full documents (which live in `docs`), so we create a map of
9838 // (id -> error), and check for errors while iterating over `docs`
9839 var errorsById = Object.create(null);
9840 res$$1.forEach(function (res$$1) {
9841 if (res$$1.error) {
9842 errorsById[res$$1.id] = res$$1;
9843 }
9844 });
9845
9846 var errorsNo = Object.keys(errorsById).length;
9847 result.doc_write_failures += errorsNo;
9848 result.docs_written += docs.length - errorsNo;
9849
9850 docs.forEach(function (doc) {
9851 var error = errorsById[doc._id];
9852 if (error) {
9853 result.errors.push(error);
9854 // Normalize error name. i.e. 'Unauthorized' -> 'unauthorized' (eg Sync Gateway)
9855 var errorName = (error.name || '').toLowerCase();
9856 if (errorName === 'unauthorized' || errorName === 'forbidden') {
9857 returnValue.emit('denied', clone(error));
9858 } else {
9859 throw error;
9860 }
9861 } else {
9862 changedDocs.push(doc);
9863 }
9864 });
9865
9866 }, function (err) {
9867 result.doc_write_failures += docs.length;
9868 throw err;
9869 });
9870 }
9871
9872 function finishBatch() {
9873 if (currentBatch.error) {
9874 throw new Error('There was a problem getting docs.');
9875 }
9876 result.last_seq = last_seq = currentBatch.seq;
9877 var outResult = clone(result);
9878 if (changedDocs.length) {
9879 outResult.docs = changedDocs;
9880 // Attach 'pending' property if server supports it (CouchDB 2.0+)
9881 /* istanbul ignore if */
9882 if (typeof currentBatch.pending === 'number') {
9883 outResult.pending = currentBatch.pending;
9884 delete currentBatch.pending;
9885 }
9886 returnValue.emit('change', outResult);
9887 }
9888 writingCheckpoint = true;
9889
9890 src.info().then(function (info) {
9891 var task = src.activeTasks.get(taskId);
9892 if (!currentBatch || !task) {
9893 return;
9894 }
9895
9896 var completed = task.completed_items || 0;
9897 var total_items = parseInt(info.update_seq, 10) - parseInt(initial_last_seq, 10);
9898 src.activeTasks.update(taskId, {
9899 completed_items: completed + currentBatch.changes.length,
9900 total_items
9901 });
9902 });
9903
9904 return checkpointer.writeCheckpoint(currentBatch.seq,
9905 session).then(function () {
9906 returnValue.emit('checkpoint', { 'checkpoint': currentBatch.seq });
9907 writingCheckpoint = false;
9908 /* istanbul ignore if */
9909 if (returnValue.cancelled) {
9910 completeReplication();
9911 throw new Error('cancelled');
9912 }
9913 currentBatch = undefined;
9914 getChanges();
9915 }).catch(function (err) {
9916 onCheckpointError(err);
9917 throw err;
9918 });
9919 }
9920
9921 function getDiffs() {
9922 var diff = {};
9923 currentBatch.changes.forEach(function (change) {
9924 returnValue.emit('checkpoint', { 'revs_diff': change });
9925 // Couchbase Sync Gateway emits these, but we can ignore them
9926 /* istanbul ignore if */
9927 if (change.id === "_user/") {
9928 return;
9929 }
9930 diff[change.id] = change.changes.map(function (x) {
9931 return x.rev;
9932 });
9933 });
9934 return target.revsDiff(diff).then(function (diffs) {
9935 /* istanbul ignore if */
9936 if (returnValue.cancelled) {
9937 completeReplication();
9938 throw new Error('cancelled');
9939 }
9940 // currentBatch.diffs elements are deleted as the documents are written
9941 currentBatch.diffs = diffs;
9942 });
9943 }
9944
9945 function getBatchDocs() {
9946 return getDocs(src, target, currentBatch.diffs, returnValue).then(function (got) {
9947 currentBatch.error = !got.ok;
9948 got.docs.forEach(function (doc) {
9949 delete currentBatch.diffs[doc._id];
9950 result.docs_read++;
9951 currentBatch.docs.push(doc);
9952 });
9953 });
9954 }
9955
9956 function startNextBatch() {
9957 if (returnValue.cancelled || currentBatch) {
9958 return;
9959 }
9960 if (batches.length === 0) {
9961 processPendingBatch(true);
9962 return;
9963 }
9964 currentBatch = batches.shift();
9965 returnValue.emit('checkpoint', { 'start_next_batch': currentBatch.seq });
9966 getDiffs()
9967 .then(getBatchDocs)
9968 .then(writeDocs)
9969 .then(finishBatch)
9970 .then(startNextBatch)
9971 .catch(function (err) {
9972 abortReplication('batch processing terminated with error', err);
9973 });
9974 }
9975
9976
9977 function processPendingBatch(immediate) {
9978 if (pendingBatch.changes.length === 0) {
9979 if (batches.length === 0 && !currentBatch) {
9980 if ((continuous && changesOpts.live) || changesCompleted) {
9981 returnValue.state = 'pending';
9982 returnValue.emit('paused');
9983 }
9984 if (changesCompleted) {
9985 completeReplication();
9986 }
9987 }
9988 return;
9989 }
9990 if (
9991 immediate ||
9992 changesCompleted ||
9993 pendingBatch.changes.length >= batch_size
9994 ) {
9995 batches.push(pendingBatch);
9996 pendingBatch = {
9997 seq: 0,
9998 changes: [],
9999 docs: []
10000 };
10001 if (returnValue.state === 'pending' || returnValue.state === 'stopped') {
10002 returnValue.state = 'active';
10003 returnValue.emit('active');
10004 }
10005 startNextBatch();
10006 }
10007 }
10008
10009
10010 function abortReplication(reason, err) {
10011 if (replicationCompleted) {
10012 return;
10013 }
10014 if (!err.message) {
10015 err.message = reason;
10016 }
10017 result.ok = false;
10018 result.status = 'aborting';
10019 batches = [];
10020 pendingBatch = {
10021 seq: 0,
10022 changes: [],
10023 docs: []
10024 };
10025 completeReplication(err);
10026 }
10027
10028
10029 function completeReplication(fatalError) {
10030 if (replicationCompleted) {
10031 return;
10032 }
10033 /* istanbul ignore if */
10034 if (returnValue.cancelled) {
10035 result.status = 'cancelled';
10036 if (writingCheckpoint) {
10037 return;
10038 }
10039 }
10040 result.status = result.status || 'complete';
10041 result.end_time = new Date().toISOString();
10042 result.last_seq = last_seq;
10043 replicationCompleted = true;
10044
10045 src.activeTasks.remove(taskId, fatalError);
10046
10047 if (fatalError) {
10048 // need to extend the error because Firefox considers ".result" read-only
10049 fatalError = createError(fatalError);
10050 fatalError.result = result;
10051
10052 // Normalize error name. i.e. 'Unauthorized' -> 'unauthorized' (eg Sync Gateway)
10053 var errorName = (fatalError.name || '').toLowerCase();
10054 if (errorName === 'unauthorized' || errorName === 'forbidden') {
10055 returnValue.emit('error', fatalError);
10056 returnValue.removeAllListeners();
10057 } else {
10058 backOff(opts, returnValue, fatalError, function () {
10059 replicate(src, target, opts, returnValue);
10060 });
10061 }
10062 } else {
10063 returnValue.emit('complete', result);
10064 returnValue.removeAllListeners();
10065 }
10066 }
10067
10068 function onChange(change, pending, lastSeq) {
10069 /* istanbul ignore if */
10070 if (returnValue.cancelled) {
10071 return completeReplication();
10072 }
10073 // Attach 'pending' property if server supports it (CouchDB 2.0+)
10074 /* istanbul ignore if */
10075 if (typeof pending === 'number') {
10076 pendingBatch.pending = pending;
10077 }
10078
10079 var filter = filterChange(opts)(change);
10080 if (!filter) {
10081 // update processed items count by 1
10082 var task = src.activeTasks.get(taskId);
10083 if (task) {
10084 // we can assume that task exists here? shouldn't be deleted by here.
10085 var completed = task.completed_items || 0;
10086 src.activeTasks.update(taskId, {completed_items: ++completed});
10087 }
10088 return;
10089 }
10090 pendingBatch.seq = change.seq || lastSeq;
10091 pendingBatch.changes.push(change);
10092 returnValue.emit('checkpoint', { 'pending_batch': pendingBatch.seq });
10093 nextTick(function () {
10094 processPendingBatch(batches.length === 0 && changesOpts.live);
10095 });
10096 }
10097
10098
10099 function onChangesComplete(changes) {
10100 changesPending = false;
10101 /* istanbul ignore if */
10102 if (returnValue.cancelled) {
10103 return completeReplication();
10104 }
10105
10106 // if no results were returned then we're done,
10107 // else fetch more
10108 if (changes.results.length > 0) {
10109 changesOpts.since = changes.results[changes.results.length - 1].seq;
10110 getChanges();
10111 processPendingBatch(true);
10112 } else {
10113
10114 var complete = function () {
10115 if (continuous) {
10116 changesOpts.live = true;
10117 getChanges();
10118 } else {
10119 changesCompleted = true;
10120 }
10121 processPendingBatch(true);
10122 };
10123
10124 // update the checkpoint so we start from the right seq next time
10125 if (!currentBatch && changes.results.length === 0) {
10126 writingCheckpoint = true;
10127 checkpointer.writeCheckpoint(changes.last_seq,
10128 session).then(function () {
10129 writingCheckpoint = false;
10130 result.last_seq = last_seq = changes.last_seq;
10131 if (returnValue.cancelled) {
10132 completeReplication();
10133 throw new Error('cancelled');
10134 } else {
10135 complete();
10136 }
10137 })
10138 .catch(onCheckpointError);
10139 } else {
10140 complete();
10141 }
10142 }
10143 }
10144
10145
10146 function onChangesError(err) {
10147 changesPending = false;
10148 /* istanbul ignore if */
10149 if (returnValue.cancelled) {
10150 return completeReplication();
10151 }
10152 abortReplication('changes rejected', err);
10153 }
10154
10155
10156 function getChanges() {
10157 if (!(
10158 !changesPending &&
10159 !changesCompleted &&
10160 batches.length < batches_limit
10161 )) {
10162 return;
10163 }
10164 changesPending = true;
10165 function abortChanges() {
10166 changes.cancel();
10167 }
10168 function removeListener() {
10169 returnValue.removeListener('cancel', abortChanges);
10170 }
10171
10172 if (returnValue._changes) { // remove old changes() and listeners
10173 returnValue.removeListener('cancel', returnValue._abortChanges);
10174 returnValue._changes.cancel();
10175 }
10176 returnValue.once('cancel', abortChanges);
10177
10178 var changes = src.changes(changesOpts)
10179 .on('change', onChange);
10180 changes.then(removeListener, removeListener);
10181 changes.then(onChangesComplete)
10182 .catch(onChangesError);
10183
10184 if (opts.retry) {
10185 // save for later so we can cancel if necessary
10186 returnValue._changes = changes;
10187 returnValue._abortChanges = abortChanges;
10188 }
10189 }
10190
10191 function createTask(checkpoint) {
10192 return src.info().then(function (info) {
10193 var total_items = typeof opts.since === 'undefined' ?
10194 parseInt(info.update_seq, 10) - parseInt(checkpoint, 10) :
10195 parseInt(info.update_seq, 10);
10196
10197 taskId = src.activeTasks.add({
10198 name: `${continuous ? 'continuous ' : ''}replication from ${info.db_name}` ,
10199 total_items,
10200 });
10201
10202 return checkpoint;
10203 });
10204 }
10205
10206 function startChanges() {
10207 initCheckpointer().then(function () {
10208 /* istanbul ignore if */
10209 if (returnValue.cancelled) {
10210 completeReplication();
10211 return;
10212 }
10213 return checkpointer.getCheckpoint().then(createTask).then(function (checkpoint) {
10214 last_seq = checkpoint;
10215 initial_last_seq = checkpoint;
10216 changesOpts = {
10217 since: last_seq,
10218 limit: batch_size,
10219 batch_size,
10220 style,
10221 doc_ids,
10222 selector,
10223 return_docs: true // required so we know when we're done
10224 };
10225 if (opts.filter) {
10226 if (typeof opts.filter !== 'string') {
10227 // required for the client-side filter in onChange
10228 changesOpts.include_docs = true;
10229 } else { // ddoc filter
10230 changesOpts.filter = opts.filter;
10231 }
10232 }
10233 if ('heartbeat' in opts) {
10234 changesOpts.heartbeat = opts.heartbeat;
10235 }
10236 if ('timeout' in opts) {
10237 changesOpts.timeout = opts.timeout;
10238 }
10239 if (opts.query_params) {
10240 changesOpts.query_params = opts.query_params;
10241 }
10242 if (opts.view) {
10243 changesOpts.view = opts.view;
10244 }
10245 getChanges();
10246 });
10247 }).catch(function (err) {
10248 abortReplication('getCheckpoint rejected with ', err);
10249 });
10250 }
10251
10252 /* istanbul ignore next */
10253 function onCheckpointError(err) {
10254 writingCheckpoint = false;
10255 abortReplication('writeCheckpoint completed with error', err);
10256 }
10257
10258 /* istanbul ignore if */
10259 if (returnValue.cancelled) { // cancelled immediately
10260 completeReplication();
10261 return;
10262 }
10263
10264 if (!returnValue._addedListeners) {
10265 returnValue.once('cancel', completeReplication);
10266
10267 if (typeof opts.complete === 'function') {
10268 returnValue.once('error', opts.complete);
10269 returnValue.once('complete', function (result) {
10270 opts.complete(null, result);
10271 });
10272 }
10273 returnValue._addedListeners = true;
10274 }
10275
10276 if (typeof opts.since === 'undefined') {
10277 startChanges();
10278 } else {
10279 initCheckpointer().then(function () {
10280 writingCheckpoint = true;
10281 return checkpointer.writeCheckpoint(opts.since, session);
10282 }).then(function () {
10283 writingCheckpoint = false;
10284 /* istanbul ignore if */
10285 if (returnValue.cancelled) {
10286 completeReplication();
10287 return;
10288 }
10289 last_seq = opts.since;
10290 startChanges();
10291 }).catch(onCheckpointError);
10292 }
10293}
10294
10295// We create a basic promise so the caller can cancel the replication possibly
10296// before we have actually started listening to changes etc
10297class Replication extends EE {
10298 constructor() {
10299 super();
10300 this.cancelled = false;
10301 this.state = 'pending';
10302 const promise = new Promise((fulfill, reject) => {
10303 this.once('complete', fulfill);
10304 this.once('error', reject);
10305 });
10306 this.then = function (resolve, reject) {
10307 return promise.then(resolve, reject);
10308 };
10309 this.catch = function (reject) {
10310 return promise.catch(reject);
10311 };
10312 // As we allow error handling via "error" event as well,
10313 // put a stub in here so that rejecting never throws UnhandledError.
10314 this.catch(function () {});
10315 }
10316
10317 cancel() {
10318 this.cancelled = true;
10319 this.state = 'cancelled';
10320 this.emit('cancel');
10321 }
10322
10323 ready(src, target) {
10324 if (this._readyCalled) {
10325 return;
10326 }
10327 this._readyCalled = true;
10328
10329 const onDestroy = () => {
10330 this.cancel();
10331 };
10332 src.once('destroyed', onDestroy);
10333 target.once('destroyed', onDestroy);
10334 function cleanup() {
10335 src.removeListener('destroyed', onDestroy);
10336 target.removeListener('destroyed', onDestroy);
10337 }
10338 this.once('complete', cleanup);
10339 this.once('error', cleanup);
10340 }
10341}
10342
10343function toPouch(db, opts) {
10344 var PouchConstructor = opts.PouchConstructor;
10345 if (typeof db === 'string') {
10346 return new PouchConstructor(db, opts);
10347 } else {
10348 return db;
10349 }
10350}
10351
10352function replicateWrapper(src, target, opts, callback) {
10353
10354 if (typeof opts === 'function') {
10355 callback = opts;
10356 opts = {};
10357 }
10358 if (typeof opts === 'undefined') {
10359 opts = {};
10360 }
10361
10362 if (opts.doc_ids && !Array.isArray(opts.doc_ids)) {
10363 throw createError(BAD_REQUEST,
10364 "`doc_ids` filter parameter is not a list.");
10365 }
10366
10367 opts.complete = callback;
10368 opts = clone(opts);
10369 opts.continuous = opts.continuous || opts.live;
10370 opts.retry = ('retry' in opts) ? opts.retry : false;
10371 opts.PouchConstructor = opts.PouchConstructor || this;
10372 var replicateRet = new Replication(opts);
10373 var srcPouch = toPouch(src, opts);
10374 var targetPouch = toPouch(target, opts);
10375 replicate(srcPouch, targetPouch, opts, replicateRet);
10376 return replicateRet;
10377}
10378
10379function sync(src, target, opts, callback) {
10380 if (typeof opts === 'function') {
10381 callback = opts;
10382 opts = {};
10383 }
10384 if (typeof opts === 'undefined') {
10385 opts = {};
10386 }
10387 opts = clone(opts);
10388 opts.PouchConstructor = opts.PouchConstructor || this;
10389 src = toPouch(src, opts);
10390 target = toPouch(target, opts);
10391 return new Sync(src, target, opts, callback);
10392}
10393
10394class Sync extends EE {
10395 constructor(src, target, opts, callback) {
10396 super();
10397 this.canceled = false;
10398
10399 const optsPush = opts.push ? Object.assign({}, opts, opts.push) : opts;
10400 const optsPull = opts.pull ? Object.assign({}, opts, opts.pull) : opts;
10401
10402 this.push = replicateWrapper(src, target, optsPush);
10403 this.pull = replicateWrapper(target, src, optsPull);
10404
10405 this.pushPaused = true;
10406 this.pullPaused = true;
10407
10408 const pullChange = (change) => {
10409 this.emit('change', {
10410 direction: 'pull',
10411 change
10412 });
10413 };
10414 const pushChange = (change) => {
10415 this.emit('change', {
10416 direction: 'push',
10417 change
10418 });
10419 };
10420 const pushDenied = (doc) => {
10421 this.emit('denied', {
10422 direction: 'push',
10423 doc
10424 });
10425 };
10426 const pullDenied = (doc) => {
10427 this.emit('denied', {
10428 direction: 'pull',
10429 doc
10430 });
10431 };
10432 const pushPaused = () => {
10433 this.pushPaused = true;
10434 /* istanbul ignore if */
10435 if (this.pullPaused) {
10436 this.emit('paused');
10437 }
10438 };
10439 const pullPaused = () => {
10440 this.pullPaused = true;
10441 /* istanbul ignore if */
10442 if (this.pushPaused) {
10443 this.emit('paused');
10444 }
10445 };
10446 const pushActive = () => {
10447 this.pushPaused = false;
10448 /* istanbul ignore if */
10449 if (this.pullPaused) {
10450 this.emit('active', {
10451 direction: 'push'
10452 });
10453 }
10454 };
10455 const pullActive = () => {
10456 this.pullPaused = false;
10457 /* istanbul ignore if */
10458 if (this.pushPaused) {
10459 this.emit('active', {
10460 direction: 'pull'
10461 });
10462 }
10463 };
10464
10465 let removed = {};
10466
10467 const removeAll = (type) => { // type is 'push' or 'pull'
10468 return (event, func) => {
10469 const isChange = event === 'change' &&
10470 (func === pullChange || func === pushChange);
10471 const isDenied = event === 'denied' &&
10472 (func === pullDenied || func === pushDenied);
10473 const isPaused = event === 'paused' &&
10474 (func === pullPaused || func === pushPaused);
10475 const isActive = event === 'active' &&
10476 (func === pullActive || func === pushActive);
10477
10478 if (isChange || isDenied || isPaused || isActive) {
10479 if (!(event in removed)) {
10480 removed[event] = {};
10481 }
10482 removed[event][type] = true;
10483 if (Object.keys(removed[event]).length === 2) {
10484 // both push and pull have asked to be removed
10485 this.removeAllListeners(event);
10486 }
10487 }
10488 };
10489 };
10490
10491 if (opts.live) {
10492 this.push.on('complete', this.pull.cancel.bind(this.pull));
10493 this.pull.on('complete', this.push.cancel.bind(this.push));
10494 }
10495
10496 function addOneListener(ee, event, listener) {
10497 if (ee.listeners(event).indexOf(listener) == -1) {
10498 ee.on(event, listener);
10499 }
10500 }
10501
10502 this.on('newListener', function (event) {
10503 if (event === 'change') {
10504 addOneListener(this.pull, 'change', pullChange);
10505 addOneListener(this.push, 'change', pushChange);
10506 } else if (event === 'denied') {
10507 addOneListener(this.pull, 'denied', pullDenied);
10508 addOneListener(this.push, 'denied', pushDenied);
10509 } else if (event === 'active') {
10510 addOneListener(this.pull, 'active', pullActive);
10511 addOneListener(this.push, 'active', pushActive);
10512 } else if (event === 'paused') {
10513 addOneListener(this.pull, 'paused', pullPaused);
10514 addOneListener(this.push, 'paused', pushPaused);
10515 }
10516 });
10517
10518 this.on('removeListener', function (event) {
10519 if (event === 'change') {
10520 this.pull.removeListener('change', pullChange);
10521 this.push.removeListener('change', pushChange);
10522 } else if (event === 'denied') {
10523 this.pull.removeListener('denied', pullDenied);
10524 this.push.removeListener('denied', pushDenied);
10525 } else if (event === 'active') {
10526 this.pull.removeListener('active', pullActive);
10527 this.push.removeListener('active', pushActive);
10528 } else if (event === 'paused') {
10529 this.pull.removeListener('paused', pullPaused);
10530 this.push.removeListener('paused', pushPaused);
10531 }
10532 });
10533
10534 this.pull.on('removeListener', removeAll('pull'));
10535 this.push.on('removeListener', removeAll('push'));
10536
10537 const promise = Promise.all([
10538 this.push,
10539 this.pull
10540 ]).then((resp) => {
10541 const out = {
10542 push: resp[0],
10543 pull: resp[1]
10544 };
10545 this.emit('complete', out);
10546 if (callback) {
10547 callback(null, out);
10548 }
10549 this.removeAllListeners();
10550 return out;
10551 }, (err) => {
10552 this.cancel();
10553 if (callback) {
10554 // if there's a callback, then the callback can receive
10555 // the error event
10556 callback(err);
10557 } else {
10558 // if there's no callback, then we're safe to emit an error
10559 // event, which would otherwise throw an unhandled error
10560 // due to 'error' being a special event in EventEmitters
10561 this.emit('error', err);
10562 }
10563 this.removeAllListeners();
10564 if (callback) {
10565 // no sense throwing if we're already emitting an 'error' event
10566 throw err;
10567 }
10568 });
10569
10570 this.then = function (success, err) {
10571 return promise.then(success, err);
10572 };
10573
10574 this.catch = function (err) {
10575 return promise.catch(err);
10576 };
10577 }
10578
10579 cancel() {
10580 if (!this.canceled) {
10581 this.canceled = true;
10582 this.push.cancel();
10583 this.pull.cancel();
10584 }
10585 }
10586}
10587
10588function replication(PouchDB) {
10589 PouchDB.replicate = replicateWrapper;
10590 PouchDB.sync = sync;
10591
10592 Object.defineProperty(PouchDB.prototype, 'replicate', {
10593 get: function () {
10594 var self = this;
10595 if (typeof this.replicateMethods === 'undefined') {
10596 this.replicateMethods = {
10597 from: function (other, opts, callback) {
10598 return self.constructor.replicate(other, self, opts, callback);
10599 },
10600 to: function (other, opts, callback) {
10601 return self.constructor.replicate(self, other, opts, callback);
10602 }
10603 };
10604 }
10605 return this.replicateMethods;
10606 }
10607 });
10608
10609 PouchDB.prototype.sync = function (dbName, opts, callback) {
10610 return this.constructor.sync(this, dbName, opts, callback);
10611 };
10612}
10613
10614PouchDB.plugin(LevelPouch$1)
10615 .plugin(HttpPouch$1)
10616 .plugin(mapreduce)
10617 .plugin(replication);
10618
10619// Pull from src because pouchdb-node/pouchdb-browser themselves
10620
10621export default PouchDB;
10622
\No newline at end of file