UNPKG

172 kBJavaScriptView Raw
1/**
2 * This file contains the full Bottleneck library (MIT) compiled to ES5.
3 * https://github.com/SGrondin/bottleneck
4 * It also contains the regenerator-runtime (MIT), necessary for Babel-generated ES5 code to execute promise and async/await code.
5 * See the following link for Copyright and License information:
6 * https://github.com/facebook/regenerator/blob/master/packages/regenerator-runtime/runtime.js
7 */
8(function (global, factory) {
9 typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
10 typeof define === 'function' && define.amd ? define(factory) :
11 (global.Bottleneck = factory());
12}(this, (function () { 'use strict';
13
14 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
15
16 function createCommonjsModule(fn, module) {
17 return module = { exports: {} }, fn(module, module.exports), module.exports;
18 }
19
20 function getCjsExportFromNamespace (n) {
21 return n && n['default'] || n;
22 }
23
24 var runtime = createCommonjsModule(function (module) {
25 /**
26 * Copyright (c) 2014-present, Facebook, Inc.
27 *
28 * This source code is licensed under the MIT license found in the
29 * LICENSE file in the root directory of this source tree.
30 */
31
32 !(function(global) {
33
34 var Op = Object.prototype;
35 var hasOwn = Op.hasOwnProperty;
36 var undefined; // More compressible than void 0.
37 var $Symbol = typeof Symbol === "function" ? Symbol : {};
38 var iteratorSymbol = $Symbol.iterator || "@@iterator";
39 var asyncIteratorSymbol = $Symbol.asyncIterator || "@@asyncIterator";
40 var toStringTagSymbol = $Symbol.toStringTag || "@@toStringTag";
41 var runtime = global.regeneratorRuntime;
42 if (runtime) {
43 {
44 // If regeneratorRuntime is defined globally and we're in a module,
45 // make the exports object identical to regeneratorRuntime.
46 module.exports = runtime;
47 }
48 // Don't bother evaluating the rest of this file if the runtime was
49 // already defined globally.
50 return;
51 }
52
53 // Define the runtime globally (as expected by generated code) as either
54 // module.exports (if we're in a module) or a new, empty object.
55 runtime = global.regeneratorRuntime = module.exports;
56
57 function wrap(innerFn, outerFn, self, tryLocsList) {
58 // If outerFn provided and outerFn.prototype is a Generator, then outerFn.prototype instanceof Generator.
59 var protoGenerator = outerFn && outerFn.prototype instanceof Generator ? outerFn : Generator;
60 var generator = Object.create(protoGenerator.prototype);
61 var context = new Context(tryLocsList || []);
62
63 // The ._invoke method unifies the implementations of the .next,
64 // .throw, and .return methods.
65 generator._invoke = makeInvokeMethod(innerFn, self, context);
66
67 return generator;
68 }
69 runtime.wrap = wrap;
70
71 // Try/catch helper to minimize deoptimizations. Returns a completion
72 // record like context.tryEntries[i].completion. This interface could
73 // have been (and was previously) designed to take a closure to be
74 // invoked without arguments, but in all the cases we care about we
75 // already have an existing method we want to call, so there's no need
76 // to create a new function object. We can even get away with assuming
77 // the method takes exactly one argument, since that happens to be true
78 // in every case, so we don't have to touch the arguments object. The
79 // only additional allocation required is the completion record, which
80 // has a stable shape and so hopefully should be cheap to allocate.
81 function tryCatch(fn, obj, arg) {
82 try {
83 return { type: "normal", arg: fn.call(obj, arg) };
84 } catch (err) {
85 return { type: "throw", arg: err };
86 }
87 }
88
89 var GenStateSuspendedStart = "suspendedStart";
90 var GenStateSuspendedYield = "suspendedYield";
91 var GenStateExecuting = "executing";
92 var GenStateCompleted = "completed";
93
94 // Returning this object from the innerFn has the same effect as
95 // breaking out of the dispatch switch statement.
96 var ContinueSentinel = {};
97
98 // Dummy constructor functions that we use as the .constructor and
99 // .constructor.prototype properties for functions that return Generator
100 // objects. For full spec compliance, you may wish to configure your
101 // minifier not to mangle the names of these two functions.
102 function Generator() {}
103 function GeneratorFunction() {}
104 function GeneratorFunctionPrototype() {}
105
106 // This is a polyfill for %IteratorPrototype% for environments that
107 // don't natively support it.
108 var IteratorPrototype = {};
109 IteratorPrototype[iteratorSymbol] = function () {
110 return this;
111 };
112
113 var getProto = Object.getPrototypeOf;
114 var NativeIteratorPrototype = getProto && getProto(getProto(values([])));
115 if (NativeIteratorPrototype &&
116 NativeIteratorPrototype !== Op &&
117 hasOwn.call(NativeIteratorPrototype, iteratorSymbol)) {
118 // This environment has a native %IteratorPrototype%; use it instead
119 // of the polyfill.
120 IteratorPrototype = NativeIteratorPrototype;
121 }
122
123 var Gp = GeneratorFunctionPrototype.prototype =
124 Generator.prototype = Object.create(IteratorPrototype);
125 GeneratorFunction.prototype = Gp.constructor = GeneratorFunctionPrototype;
126 GeneratorFunctionPrototype.constructor = GeneratorFunction;
127 GeneratorFunctionPrototype[toStringTagSymbol] =
128 GeneratorFunction.displayName = "GeneratorFunction";
129
130 // Helper for defining the .next, .throw, and .return methods of the
131 // Iterator interface in terms of a single ._invoke method.
132 function defineIteratorMethods(prototype) {
133 ["next", "throw", "return"].forEach(function(method) {
134 prototype[method] = function(arg) {
135 return this._invoke(method, arg);
136 };
137 });
138 }
139
140 runtime.isGeneratorFunction = function(genFun) {
141 var ctor = typeof genFun === "function" && genFun.constructor;
142 return ctor
143 ? ctor === GeneratorFunction ||
144 // For the native GeneratorFunction constructor, the best we can
145 // do is to check its .name property.
146 (ctor.displayName || ctor.name) === "GeneratorFunction"
147 : false;
148 };
149
150 runtime.mark = function(genFun) {
151 if (Object.setPrototypeOf) {
152 Object.setPrototypeOf(genFun, GeneratorFunctionPrototype);
153 } else {
154 genFun.__proto__ = GeneratorFunctionPrototype;
155 if (!(toStringTagSymbol in genFun)) {
156 genFun[toStringTagSymbol] = "GeneratorFunction";
157 }
158 }
159 genFun.prototype = Object.create(Gp);
160 return genFun;
161 };
162
163 // Within the body of any async function, `await x` is transformed to
164 // `yield regeneratorRuntime.awrap(x)`, so that the runtime can test
165 // `hasOwn.call(value, "__await")` to determine if the yielded value is
166 // meant to be awaited.
167 runtime.awrap = function(arg) {
168 return { __await: arg };
169 };
170
171 function AsyncIterator(generator) {
172 function invoke(method, arg, resolve, reject) {
173 var record = tryCatch(generator[method], generator, arg);
174 if (record.type === "throw") {
175 reject(record.arg);
176 } else {
177 var result = record.arg;
178 var value = result.value;
179 if (value &&
180 typeof value === "object" &&
181 hasOwn.call(value, "__await")) {
182 return Promise.resolve(value.__await).then(function(value) {
183 invoke("next", value, resolve, reject);
184 }, function(err) {
185 invoke("throw", err, resolve, reject);
186 });
187 }
188
189 return Promise.resolve(value).then(function(unwrapped) {
190 // When a yielded Promise is resolved, its final value becomes
191 // the .value of the Promise<{value,done}> result for the
192 // current iteration.
193 result.value = unwrapped;
194 resolve(result);
195 }, function(error) {
196 // If a rejected Promise was yielded, throw the rejection back
197 // into the async generator function so it can be handled there.
198 return invoke("throw", error, resolve, reject);
199 });
200 }
201 }
202
203 var previousPromise;
204
205 function enqueue(method, arg) {
206 function callInvokeWithMethodAndArg() {
207 return new Promise(function(resolve, reject) {
208 invoke(method, arg, resolve, reject);
209 });
210 }
211
212 return previousPromise =
213 // If enqueue has been called before, then we want to wait until
214 // all previous Promises have been resolved before calling invoke,
215 // so that results are always delivered in the correct order. If
216 // enqueue has not been called before, then it is important to
217 // call invoke immediately, without waiting on a callback to fire,
218 // so that the async generator function has the opportunity to do
219 // any necessary setup in a predictable way. This predictability
220 // is why the Promise constructor synchronously invokes its
221 // executor callback, and why async functions synchronously
222 // execute code before the first await. Since we implement simple
223 // async functions in terms of async generators, it is especially
224 // important to get this right, even though it requires care.
225 previousPromise ? previousPromise.then(
226 callInvokeWithMethodAndArg,
227 // Avoid propagating failures to Promises returned by later
228 // invocations of the iterator.
229 callInvokeWithMethodAndArg
230 ) : callInvokeWithMethodAndArg();
231 }
232
233 // Define the unified helper method that is used to implement .next,
234 // .throw, and .return (see defineIteratorMethods).
235 this._invoke = enqueue;
236 }
237
238 defineIteratorMethods(AsyncIterator.prototype);
239 AsyncIterator.prototype[asyncIteratorSymbol] = function () {
240 return this;
241 };
242 runtime.AsyncIterator = AsyncIterator;
243
244 // Note that simple async functions are implemented on top of
245 // AsyncIterator objects; they just return a Promise for the value of
246 // the final result produced by the iterator.
247 runtime.async = function(innerFn, outerFn, self, tryLocsList) {
248 var iter = new AsyncIterator(
249 wrap(innerFn, outerFn, self, tryLocsList)
250 );
251
252 return runtime.isGeneratorFunction(outerFn)
253 ? iter // If outerFn is a generator, return the full iterator.
254 : iter.next().then(function(result) {
255 return result.done ? result.value : iter.next();
256 });
257 };
258
259 function makeInvokeMethod(innerFn, self, context) {
260 var state = GenStateSuspendedStart;
261
262 return function invoke(method, arg) {
263 if (state === GenStateExecuting) {
264 throw new Error("Generator is already running");
265 }
266
267 if (state === GenStateCompleted) {
268 if (method === "throw") {
269 throw arg;
270 }
271
272 // Be forgiving, per 25.3.3.3.3 of the spec:
273 // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-generatorresume
274 return doneResult();
275 }
276
277 context.method = method;
278 context.arg = arg;
279
280 while (true) {
281 var delegate = context.delegate;
282 if (delegate) {
283 var delegateResult = maybeInvokeDelegate(delegate, context);
284 if (delegateResult) {
285 if (delegateResult === ContinueSentinel) continue;
286 return delegateResult;
287 }
288 }
289
290 if (context.method === "next") {
291 // Setting context._sent for legacy support of Babel's
292 // function.sent implementation.
293 context.sent = context._sent = context.arg;
294
295 } else if (context.method === "throw") {
296 if (state === GenStateSuspendedStart) {
297 state = GenStateCompleted;
298 throw context.arg;
299 }
300
301 context.dispatchException(context.arg);
302
303 } else if (context.method === "return") {
304 context.abrupt("return", context.arg);
305 }
306
307 state = GenStateExecuting;
308
309 var record = tryCatch(innerFn, self, context);
310 if (record.type === "normal") {
311 // If an exception is thrown from innerFn, we leave state ===
312 // GenStateExecuting and loop back for another invocation.
313 state = context.done
314 ? GenStateCompleted
315 : GenStateSuspendedYield;
316
317 if (record.arg === ContinueSentinel) {
318 continue;
319 }
320
321 return {
322 value: record.arg,
323 done: context.done
324 };
325
326 } else if (record.type === "throw") {
327 state = GenStateCompleted;
328 // Dispatch the exception by looping back around to the
329 // context.dispatchException(context.arg) call above.
330 context.method = "throw";
331 context.arg = record.arg;
332 }
333 }
334 };
335 }
336
337 // Call delegate.iterator[context.method](context.arg) and handle the
338 // result, either by returning a { value, done } result from the
339 // delegate iterator, or by modifying context.method and context.arg,
340 // setting context.delegate to null, and returning the ContinueSentinel.
341 function maybeInvokeDelegate(delegate, context) {
342 var method = delegate.iterator[context.method];
343 if (method === undefined) {
344 // A .throw or .return when the delegate iterator has no .throw
345 // method always terminates the yield* loop.
346 context.delegate = null;
347
348 if (context.method === "throw") {
349 if (delegate.iterator.return) {
350 // If the delegate iterator has a return method, give it a
351 // chance to clean up.
352 context.method = "return";
353 context.arg = undefined;
354 maybeInvokeDelegate(delegate, context);
355
356 if (context.method === "throw") {
357 // If maybeInvokeDelegate(context) changed context.method from
358 // "return" to "throw", let that override the TypeError below.
359 return ContinueSentinel;
360 }
361 }
362
363 context.method = "throw";
364 context.arg = new TypeError(
365 "The iterator does not provide a 'throw' method");
366 }
367
368 return ContinueSentinel;
369 }
370
371 var record = tryCatch(method, delegate.iterator, context.arg);
372
373 if (record.type === "throw") {
374 context.method = "throw";
375 context.arg = record.arg;
376 context.delegate = null;
377 return ContinueSentinel;
378 }
379
380 var info = record.arg;
381
382 if (! info) {
383 context.method = "throw";
384 context.arg = new TypeError("iterator result is not an object");
385 context.delegate = null;
386 return ContinueSentinel;
387 }
388
389 if (info.done) {
390 // Assign the result of the finished delegate to the temporary
391 // variable specified by delegate.resultName (see delegateYield).
392 context[delegate.resultName] = info.value;
393
394 // Resume execution at the desired location (see delegateYield).
395 context.next = delegate.nextLoc;
396
397 // If context.method was "throw" but the delegate handled the
398 // exception, let the outer generator proceed normally. If
399 // context.method was "next", forget context.arg since it has been
400 // "consumed" by the delegate iterator. If context.method was
401 // "return", allow the original .return call to continue in the
402 // outer generator.
403 if (context.method !== "return") {
404 context.method = "next";
405 context.arg = undefined;
406 }
407
408 } else {
409 // Re-yield the result returned by the delegate method.
410 return info;
411 }
412
413 // The delegate iterator is finished, so forget it and continue with
414 // the outer generator.
415 context.delegate = null;
416 return ContinueSentinel;
417 }
418
419 // Define Generator.prototype.{next,throw,return} in terms of the
420 // unified ._invoke helper method.
421 defineIteratorMethods(Gp);
422
423 Gp[toStringTagSymbol] = "Generator";
424
425 // A Generator should always return itself as the iterator object when the
426 // @@iterator function is called on it. Some browsers' implementations of the
427 // iterator prototype chain incorrectly implement this, causing the Generator
428 // object to not be returned from this call. This ensures that doesn't happen.
429 // See https://github.com/facebook/regenerator/issues/274 for more details.
430 Gp[iteratorSymbol] = function() {
431 return this;
432 };
433
434 Gp.toString = function() {
435 return "[object Generator]";
436 };
437
438 function pushTryEntry(locs) {
439 var entry = { tryLoc: locs[0] };
440
441 if (1 in locs) {
442 entry.catchLoc = locs[1];
443 }
444
445 if (2 in locs) {
446 entry.finallyLoc = locs[2];
447 entry.afterLoc = locs[3];
448 }
449
450 this.tryEntries.push(entry);
451 }
452
453 function resetTryEntry(entry) {
454 var record = entry.completion || {};
455 record.type = "normal";
456 delete record.arg;
457 entry.completion = record;
458 }
459
460 function Context(tryLocsList) {
461 // The root entry object (effectively a try statement without a catch
462 // or a finally block) gives us a place to store values thrown from
463 // locations where there is no enclosing try statement.
464 this.tryEntries = [{ tryLoc: "root" }];
465 tryLocsList.forEach(pushTryEntry, this);
466 this.reset(true);
467 }
468
469 runtime.keys = function(object) {
470 var keys = [];
471 for (var key in object) {
472 keys.push(key);
473 }
474 keys.reverse();
475
476 // Rather than returning an object with a next method, we keep
477 // things simple and return the next function itself.
478 return function next() {
479 while (keys.length) {
480 var key = keys.pop();
481 if (key in object) {
482 next.value = key;
483 next.done = false;
484 return next;
485 }
486 }
487
488 // To avoid creating an additional object, we just hang the .value
489 // and .done properties off the next function object itself. This
490 // also ensures that the minifier will not anonymize the function.
491 next.done = true;
492 return next;
493 };
494 };
495
496 function values(iterable) {
497 if (iterable) {
498 var iteratorMethod = iterable[iteratorSymbol];
499 if (iteratorMethod) {
500 return iteratorMethod.call(iterable);
501 }
502
503 if (typeof iterable.next === "function") {
504 return iterable;
505 }
506
507 if (!isNaN(iterable.length)) {
508 var i = -1, next = function next() {
509 while (++i < iterable.length) {
510 if (hasOwn.call(iterable, i)) {
511 next.value = iterable[i];
512 next.done = false;
513 return next;
514 }
515 }
516
517 next.value = undefined;
518 next.done = true;
519
520 return next;
521 };
522
523 return next.next = next;
524 }
525 }
526
527 // Return an iterator with no values.
528 return { next: doneResult };
529 }
530 runtime.values = values;
531
532 function doneResult() {
533 return { value: undefined, done: true };
534 }
535
536 Context.prototype = {
537 constructor: Context,
538
539 reset: function(skipTempReset) {
540 this.prev = 0;
541 this.next = 0;
542 // Resetting context._sent for legacy support of Babel's
543 // function.sent implementation.
544 this.sent = this._sent = undefined;
545 this.done = false;
546 this.delegate = null;
547
548 this.method = "next";
549 this.arg = undefined;
550
551 this.tryEntries.forEach(resetTryEntry);
552
553 if (!skipTempReset) {
554 for (var name in this) {
555 // Not sure about the optimal order of these conditions:
556 if (name.charAt(0) === "t" &&
557 hasOwn.call(this, name) &&
558 !isNaN(+name.slice(1))) {
559 this[name] = undefined;
560 }
561 }
562 }
563 },
564
565 stop: function() {
566 this.done = true;
567
568 var rootEntry = this.tryEntries[0];
569 var rootRecord = rootEntry.completion;
570 if (rootRecord.type === "throw") {
571 throw rootRecord.arg;
572 }
573
574 return this.rval;
575 },
576
577 dispatchException: function(exception) {
578 if (this.done) {
579 throw exception;
580 }
581
582 var context = this;
583 function handle(loc, caught) {
584 record.type = "throw";
585 record.arg = exception;
586 context.next = loc;
587
588 if (caught) {
589 // If the dispatched exception was caught by a catch block,
590 // then let that catch block handle the exception normally.
591 context.method = "next";
592 context.arg = undefined;
593 }
594
595 return !! caught;
596 }
597
598 for (var i = this.tryEntries.length - 1; i >= 0; --i) {
599 var entry = this.tryEntries[i];
600 var record = entry.completion;
601
602 if (entry.tryLoc === "root") {
603 // Exception thrown outside of any try block that could handle
604 // it, so set the completion value of the entire function to
605 // throw the exception.
606 return handle("end");
607 }
608
609 if (entry.tryLoc <= this.prev) {
610 var hasCatch = hasOwn.call(entry, "catchLoc");
611 var hasFinally = hasOwn.call(entry, "finallyLoc");
612
613 if (hasCatch && hasFinally) {
614 if (this.prev < entry.catchLoc) {
615 return handle(entry.catchLoc, true);
616 } else if (this.prev < entry.finallyLoc) {
617 return handle(entry.finallyLoc);
618 }
619
620 } else if (hasCatch) {
621 if (this.prev < entry.catchLoc) {
622 return handle(entry.catchLoc, true);
623 }
624
625 } else if (hasFinally) {
626 if (this.prev < entry.finallyLoc) {
627 return handle(entry.finallyLoc);
628 }
629
630 } else {
631 throw new Error("try statement without catch or finally");
632 }
633 }
634 }
635 },
636
637 abrupt: function(type, arg) {
638 for (var i = this.tryEntries.length - 1; i >= 0; --i) {
639 var entry = this.tryEntries[i];
640 if (entry.tryLoc <= this.prev &&
641 hasOwn.call(entry, "finallyLoc") &&
642 this.prev < entry.finallyLoc) {
643 var finallyEntry = entry;
644 break;
645 }
646 }
647
648 if (finallyEntry &&
649 (type === "break" ||
650 type === "continue") &&
651 finallyEntry.tryLoc <= arg &&
652 arg <= finallyEntry.finallyLoc) {
653 // Ignore the finally entry if control is not jumping to a
654 // location outside the try/catch block.
655 finallyEntry = null;
656 }
657
658 var record = finallyEntry ? finallyEntry.completion : {};
659 record.type = type;
660 record.arg = arg;
661
662 if (finallyEntry) {
663 this.method = "next";
664 this.next = finallyEntry.finallyLoc;
665 return ContinueSentinel;
666 }
667
668 return this.complete(record);
669 },
670
671 complete: function(record, afterLoc) {
672 if (record.type === "throw") {
673 throw record.arg;
674 }
675
676 if (record.type === "break" ||
677 record.type === "continue") {
678 this.next = record.arg;
679 } else if (record.type === "return") {
680 this.rval = this.arg = record.arg;
681 this.method = "return";
682 this.next = "end";
683 } else if (record.type === "normal" && afterLoc) {
684 this.next = afterLoc;
685 }
686
687 return ContinueSentinel;
688 },
689
690 finish: function(finallyLoc) {
691 for (var i = this.tryEntries.length - 1; i >= 0; --i) {
692 var entry = this.tryEntries[i];
693 if (entry.finallyLoc === finallyLoc) {
694 this.complete(entry.completion, entry.afterLoc);
695 resetTryEntry(entry);
696 return ContinueSentinel;
697 }
698 }
699 },
700
701 "catch": function(tryLoc) {
702 for (var i = this.tryEntries.length - 1; i >= 0; --i) {
703 var entry = this.tryEntries[i];
704 if (entry.tryLoc === tryLoc) {
705 var record = entry.completion;
706 if (record.type === "throw") {
707 var thrown = record.arg;
708 resetTryEntry(entry);
709 }
710 return thrown;
711 }
712 }
713
714 // The context.catch method must only be called with a location
715 // argument that corresponds to a known catch block.
716 throw new Error("illegal catch attempt");
717 },
718
719 delegateYield: function(iterable, resultName, nextLoc) {
720 this.delegate = {
721 iterator: values(iterable),
722 resultName: resultName,
723 nextLoc: nextLoc
724 };
725
726 if (this.method === "next") {
727 // Deliberately forget the last sent value so that we don't
728 // accidentally pass it on to the delegate.
729 this.arg = undefined;
730 }
731
732 return ContinueSentinel;
733 }
734 };
735 })(
736 // In sloppy mode, unbound `this` refers to the global object, fallback to
737 // Function constructor if we're in global strict mode. That is sadly a form
738 // of indirect eval which violates Content Security Policy.
739 (function() {
740 return this || (typeof self === "object" && self);
741 })() || Function("return this")()
742 );
743 });
744
745 function _typeof(obj) {
746 if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") {
747 _typeof = function (obj) {
748 return typeof obj;
749 };
750 } else {
751 _typeof = function (obj) {
752 return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;
753 };
754 }
755
756 return _typeof(obj);
757 }
758
759 function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) {
760 try {
761 var info = gen[key](arg);
762 var value = info.value;
763 } catch (error) {
764 reject(error);
765 return;
766 }
767
768 if (info.done) {
769 resolve(value);
770 } else {
771 Promise.resolve(value).then(_next, _throw);
772 }
773 }
774
775 function _asyncToGenerator(fn) {
776 return function () {
777 var self = this,
778 args = arguments;
779 return new Promise(function (resolve, reject) {
780 var gen = fn.apply(self, args);
781
782 function _next(value) {
783 asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value);
784 }
785
786 function _throw(err) {
787 asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err);
788 }
789
790 _next(undefined);
791 });
792 };
793 }
794
795 function _classCallCheck(instance, Constructor) {
796 if (!(instance instanceof Constructor)) {
797 throw new TypeError("Cannot call a class as a function");
798 }
799 }
800
801 function _defineProperties(target, props) {
802 for (var i = 0; i < props.length; i++) {
803 var descriptor = props[i];
804 descriptor.enumerable = descriptor.enumerable || false;
805 descriptor.configurable = true;
806 if ("value" in descriptor) descriptor.writable = true;
807 Object.defineProperty(target, descriptor.key, descriptor);
808 }
809 }
810
811 function _createClass(Constructor, protoProps, staticProps) {
812 if (protoProps) _defineProperties(Constructor.prototype, protoProps);
813 if (staticProps) _defineProperties(Constructor, staticProps);
814 return Constructor;
815 }
816
817 function _inherits(subClass, superClass) {
818 if (typeof superClass !== "function" && superClass !== null) {
819 throw new TypeError("Super expression must either be null or a function");
820 }
821
822 subClass.prototype = Object.create(superClass && superClass.prototype, {
823 constructor: {
824 value: subClass,
825 writable: true,
826 configurable: true
827 }
828 });
829 if (superClass) _setPrototypeOf(subClass, superClass);
830 }
831
832 function _getPrototypeOf(o) {
833 _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) {
834 return o.__proto__ || Object.getPrototypeOf(o);
835 };
836 return _getPrototypeOf(o);
837 }
838
839 function _setPrototypeOf(o, p) {
840 _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
841 o.__proto__ = p;
842 return o;
843 };
844
845 return _setPrototypeOf(o, p);
846 }
847
848 function isNativeReflectConstruct() {
849 if (typeof Reflect === "undefined" || !Reflect.construct) return false;
850 if (Reflect.construct.sham) return false;
851 if (typeof Proxy === "function") return true;
852
853 try {
854 Date.prototype.toString.call(Reflect.construct(Date, [], function () {}));
855 return true;
856 } catch (e) {
857 return false;
858 }
859 }
860
861 function _construct(Parent, args, Class) {
862 if (isNativeReflectConstruct()) {
863 _construct = Reflect.construct;
864 } else {
865 _construct = function _construct(Parent, args, Class) {
866 var a = [null];
867 a.push.apply(a, args);
868 var Constructor = Function.bind.apply(Parent, a);
869 var instance = new Constructor();
870 if (Class) _setPrototypeOf(instance, Class.prototype);
871 return instance;
872 };
873 }
874
875 return _construct.apply(null, arguments);
876 }
877
878 function _isNativeFunction(fn) {
879 return Function.toString.call(fn).indexOf("[native code]") !== -1;
880 }
881
882 function _wrapNativeSuper(Class) {
883 var _cache = typeof Map === "function" ? new Map() : undefined;
884
885 _wrapNativeSuper = function _wrapNativeSuper(Class) {
886 if (Class === null || !_isNativeFunction(Class)) return Class;
887
888 if (typeof Class !== "function") {
889 throw new TypeError("Super expression must either be null or a function");
890 }
891
892 if (typeof _cache !== "undefined") {
893 if (_cache.has(Class)) return _cache.get(Class);
894
895 _cache.set(Class, Wrapper);
896 }
897
898 function Wrapper() {
899 return _construct(Class, arguments, _getPrototypeOf(this).constructor);
900 }
901
902 Wrapper.prototype = Object.create(Class.prototype, {
903 constructor: {
904 value: Wrapper,
905 enumerable: false,
906 writable: true,
907 configurable: true
908 }
909 });
910 return _setPrototypeOf(Wrapper, Class);
911 };
912
913 return _wrapNativeSuper(Class);
914 }
915
916 function _assertThisInitialized(self) {
917 if (self === void 0) {
918 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
919 }
920
921 return self;
922 }
923
924 function _possibleConstructorReturn(self, call) {
925 if (call && (typeof call === "object" || typeof call === "function")) {
926 return call;
927 }
928
929 return _assertThisInitialized(self);
930 }
931
932 function _slicedToArray(arr, i) {
933 return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest();
934 }
935
936 function _toArray(arr) {
937 return _arrayWithHoles(arr) || _iterableToArray(arr) || _nonIterableRest();
938 }
939
940 function _toConsumableArray(arr) {
941 return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _nonIterableSpread();
942 }
943
944 function _arrayWithoutHoles(arr) {
945 if (Array.isArray(arr)) {
946 for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) arr2[i] = arr[i];
947
948 return arr2;
949 }
950 }
951
952 function _arrayWithHoles(arr) {
953 if (Array.isArray(arr)) return arr;
954 }
955
956 function _iterableToArray(iter) {
957 if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter);
958 }
959
960 function _iterableToArrayLimit(arr, i) {
961 var _arr = [];
962 var _n = true;
963 var _d = false;
964 var _e = undefined;
965
966 try {
967 for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) {
968 _arr.push(_s.value);
969
970 if (i && _arr.length === i) break;
971 }
972 } catch (err) {
973 _d = true;
974 _e = err;
975 } finally {
976 try {
977 if (!_n && _i["return"] != null) _i["return"]();
978 } finally {
979 if (_d) throw _e;
980 }
981 }
982
983 return _arr;
984 }
985
986 function _nonIterableSpread() {
987 throw new TypeError("Invalid attempt to spread non-iterable instance");
988 }
989
990 function _nonIterableRest() {
991 throw new TypeError("Invalid attempt to destructure non-iterable instance");
992 }
993
994 var load = function load(received, defaults) {
995 var onto = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
996 var k, ref, v;
997
998 for (k in defaults) {
999 v = defaults[k];
1000 onto[k] = (ref = received[k]) != null ? ref : v;
1001 }
1002
1003 return onto;
1004 };
1005
1006 var overwrite = function overwrite(received, defaults) {
1007 var onto = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
1008 var k, v;
1009
1010 for (k in received) {
1011 v = received[k];
1012
1013 if (defaults[k] !== void 0) {
1014 onto[k] = v;
1015 }
1016 }
1017
1018 return onto;
1019 };
1020
1021 var parser = {
1022 load: load,
1023 overwrite: overwrite
1024 };
1025
1026 var DLList;
1027
1028 DLList =
1029 /*#__PURE__*/
1030 function () {
1031 function DLList(incr, decr) {
1032 _classCallCheck(this, DLList);
1033
1034 this.incr = incr;
1035 this.decr = decr;
1036 this._first = null;
1037 this._last = null;
1038 this.length = 0;
1039 }
1040
1041 _createClass(DLList, [{
1042 key: "push",
1043 value: function push(value) {
1044 var node;
1045 this.length++;
1046
1047 if (typeof this.incr === "function") {
1048 this.incr();
1049 }
1050
1051 node = {
1052 value: value,
1053 prev: this._last,
1054 next: null
1055 };
1056
1057 if (this._last != null) {
1058 this._last.next = node;
1059 this._last = node;
1060 } else {
1061 this._first = this._last = node;
1062 }
1063
1064 return void 0;
1065 }
1066 }, {
1067 key: "shift",
1068 value: function shift() {
1069 var value;
1070
1071 if (this._first == null) {
1072 return;
1073 } else {
1074 this.length--;
1075
1076 if (typeof this.decr === "function") {
1077 this.decr();
1078 }
1079 }
1080
1081 value = this._first.value;
1082
1083 if ((this._first = this._first.next) != null) {
1084 this._first.prev = null;
1085 } else {
1086 this._last = null;
1087 }
1088
1089 return value;
1090 }
1091 }, {
1092 key: "first",
1093 value: function first() {
1094 if (this._first != null) {
1095 return this._first.value;
1096 }
1097 }
1098 }, {
1099 key: "getArray",
1100 value: function getArray() {
1101 var node, ref, results;
1102 node = this._first;
1103 results = [];
1104
1105 while (node != null) {
1106 results.push((ref = node, node = node.next, ref.value));
1107 }
1108
1109 return results;
1110 }
1111 }, {
1112 key: "forEachShift",
1113 value: function forEachShift(cb) {
1114 var node;
1115 node = this.shift();
1116
1117 while (node != null) {
1118 cb(node), node = this.shift();
1119 }
1120
1121 return void 0;
1122 }
1123 }, {
1124 key: "debug",
1125 value: function debug() {
1126 var node, ref, ref1, ref2, results;
1127 node = this._first;
1128 results = [];
1129
1130 while (node != null) {
1131 results.push((ref = node, node = node.next, {
1132 value: ref.value,
1133 prev: (ref1 = ref.prev) != null ? ref1.value : void 0,
1134 next: (ref2 = ref.next) != null ? ref2.value : void 0
1135 }));
1136 }
1137
1138 return results;
1139 }
1140 }]);
1141
1142 return DLList;
1143 }();
1144
1145 var DLList_1 = DLList;
1146
1147 var Events;
1148
1149 Events =
1150 /*#__PURE__*/
1151 function () {
1152 function Events(instance) {
1153 var _this = this;
1154
1155 _classCallCheck(this, Events);
1156
1157 this.instance = instance;
1158 this._events = {};
1159
1160 if (this.instance.on != null || this.instance.once != null || this.instance.removeAllListeners != null) {
1161 throw new Error("An Emitter already exists for this object");
1162 }
1163
1164 this.instance.on = function (name, cb) {
1165 return _this._addListener(name, "many", cb);
1166 };
1167
1168 this.instance.once = function (name, cb) {
1169 return _this._addListener(name, "once", cb);
1170 };
1171
1172 this.instance.removeAllListeners = function () {
1173 var name = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
1174
1175 if (name != null) {
1176 return delete _this._events[name];
1177 } else {
1178 return _this._events = {};
1179 }
1180 };
1181 }
1182
1183 _createClass(Events, [{
1184 key: "_addListener",
1185 value: function _addListener(name, status, cb) {
1186 var base;
1187
1188 if ((base = this._events)[name] == null) {
1189 base[name] = [];
1190 }
1191
1192 this._events[name].push({
1193 cb: cb,
1194 status: status
1195 });
1196
1197 return this.instance;
1198 }
1199 }, {
1200 key: "listenerCount",
1201 value: function listenerCount(name) {
1202 if (this._events[name] != null) {
1203 return this._events[name].length;
1204 } else {
1205 return 0;
1206 }
1207 }
1208 }, {
1209 key: "trigger",
1210 value: function () {
1211 var _trigger = _asyncToGenerator(
1212 /*#__PURE__*/
1213 regeneratorRuntime.mark(function _callee2(name) {
1214 var _this2 = this;
1215
1216 var _len,
1217 args,
1218 _key,
1219 e,
1220 promises,
1221 _args2 = arguments;
1222
1223 return regeneratorRuntime.wrap(function _callee2$(_context2) {
1224 while (1) {
1225 switch (_context2.prev = _context2.next) {
1226 case 0:
1227 for (_len = _args2.length, args = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
1228 args[_key - 1] = _args2[_key];
1229 }
1230
1231 _context2.prev = 1;
1232
1233 if (name !== "debug") {
1234 this.trigger("debug", "Event triggered: ".concat(name), args);
1235 }
1236
1237 if (!(this._events[name] == null)) {
1238 _context2.next = 5;
1239 break;
1240 }
1241
1242 return _context2.abrupt("return");
1243
1244 case 5:
1245 this._events[name] = this._events[name].filter(function (listener) {
1246 return listener.status !== "none";
1247 });
1248 promises = this._events[name].map(
1249 /*#__PURE__*/
1250 function () {
1251 var _ref = _asyncToGenerator(
1252 /*#__PURE__*/
1253 regeneratorRuntime.mark(function _callee(listener) {
1254 var e, returned;
1255 return regeneratorRuntime.wrap(function _callee$(_context) {
1256 while (1) {
1257 switch (_context.prev = _context.next) {
1258 case 0:
1259 if (!(listener.status === "none")) {
1260 _context.next = 2;
1261 break;
1262 }
1263
1264 return _context.abrupt("return");
1265
1266 case 2:
1267 if (listener.status === "once") {
1268 listener.status = "none";
1269 }
1270
1271 _context.prev = 3;
1272 returned = typeof listener.cb === "function" ? listener.cb.apply(listener, args) : void 0;
1273
1274 if (!(typeof (returned != null ? returned.then : void 0) === "function")) {
1275 _context.next = 11;
1276 break;
1277 }
1278
1279 _context.next = 8;
1280 return returned;
1281
1282 case 8:
1283 return _context.abrupt("return", _context.sent);
1284
1285 case 11:
1286 return _context.abrupt("return", returned);
1287
1288 case 12:
1289 _context.next = 19;
1290 break;
1291
1292 case 14:
1293 _context.prev = 14;
1294 _context.t0 = _context["catch"](3);
1295 e = _context.t0;
1296
1297 {
1298 _this2.trigger("error", e);
1299 }
1300
1301 return _context.abrupt("return", null);
1302
1303 case 19:
1304 case "end":
1305 return _context.stop();
1306 }
1307 }
1308 }, _callee, null, [[3, 14]]);
1309 }));
1310
1311 return function (_x2) {
1312 return _ref.apply(this, arguments);
1313 };
1314 }());
1315 _context2.next = 9;
1316 return Promise.all(promises);
1317
1318 case 9:
1319 _context2.t0 = function (x) {
1320 return x != null;
1321 };
1322
1323 return _context2.abrupt("return", _context2.sent.find(_context2.t0));
1324
1325 case 13:
1326 _context2.prev = 13;
1327 _context2.t1 = _context2["catch"](1);
1328 e = _context2.t1;
1329
1330 {
1331 this.trigger("error", e);
1332 }
1333
1334 return _context2.abrupt("return", null);
1335
1336 case 18:
1337 case "end":
1338 return _context2.stop();
1339 }
1340 }
1341 }, _callee2, this, [[1, 13]]);
1342 }));
1343
1344 function trigger(_x) {
1345 return _trigger.apply(this, arguments);
1346 }
1347
1348 return trigger;
1349 }()
1350 }]);
1351
1352 return Events;
1353 }();
1354
1355 var Events_1 = Events;
1356
1357 var DLList$1, Events$1, Queues;
1358 DLList$1 = DLList_1;
1359 Events$1 = Events_1;
1360
1361 Queues =
1362 /*#__PURE__*/
1363 function () {
1364 function Queues(num_priorities) {
1365 _classCallCheck(this, Queues);
1366
1367 var i;
1368 this.Events = new Events$1(this);
1369 this._length = 0;
1370
1371 this._lists = function () {
1372 var _this = this;
1373
1374 var j, ref, results;
1375 results = [];
1376
1377 for (i = j = 1, ref = num_priorities; 1 <= ref ? j <= ref : j >= ref; i = 1 <= ref ? ++j : --j) {
1378 results.push(new DLList$1(function () {
1379 return _this.incr();
1380 }, function () {
1381 return _this.decr();
1382 }));
1383 }
1384
1385 return results;
1386 }.call(this);
1387 }
1388
1389 _createClass(Queues, [{
1390 key: "incr",
1391 value: function incr() {
1392 if (this._length++ === 0) {
1393 return this.Events.trigger("leftzero");
1394 }
1395 }
1396 }, {
1397 key: "decr",
1398 value: function decr() {
1399 if (--this._length === 0) {
1400 return this.Events.trigger("zero");
1401 }
1402 }
1403 }, {
1404 key: "push",
1405 value: function push(job) {
1406 return this._lists[job.options.priority].push(job);
1407 }
1408 }, {
1409 key: "queued",
1410 value: function queued(priority) {
1411 if (priority != null) {
1412 return this._lists[priority].length;
1413 } else {
1414 return this._length;
1415 }
1416 }
1417 }, {
1418 key: "shiftAll",
1419 value: function shiftAll(fn) {
1420 return this._lists.forEach(function (list) {
1421 return list.forEachShift(fn);
1422 });
1423 }
1424 }, {
1425 key: "getFirst",
1426 value: function getFirst() {
1427 var arr = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : this._lists;
1428 var j, len, list;
1429
1430 for (j = 0, len = arr.length; j < len; j++) {
1431 list = arr[j];
1432
1433 if (list.length > 0) {
1434 return list;
1435 }
1436 }
1437
1438 return [];
1439 }
1440 }, {
1441 key: "shiftLastFrom",
1442 value: function shiftLastFrom(priority) {
1443 return this.getFirst(this._lists.slice(priority).reverse()).shift();
1444 }
1445 }]);
1446
1447 return Queues;
1448 }();
1449
1450 var Queues_1 = Queues;
1451
1452 var BottleneckError;
1453
1454 BottleneckError =
1455 /*#__PURE__*/
1456 function (_Error) {
1457 _inherits(BottleneckError, _Error);
1458
1459 function BottleneckError() {
1460 _classCallCheck(this, BottleneckError);
1461
1462 return _possibleConstructorReturn(this, _getPrototypeOf(BottleneckError).apply(this, arguments));
1463 }
1464
1465 return BottleneckError;
1466 }(_wrapNativeSuper(Error));
1467
1468 var BottleneckError_1 = BottleneckError;
1469
1470 var BottleneckError$1, DEFAULT_PRIORITY, Job, NUM_PRIORITIES, parser$1;
1471 NUM_PRIORITIES = 10;
1472 DEFAULT_PRIORITY = 5;
1473 parser$1 = parser;
1474 BottleneckError$1 = BottleneckError_1;
1475
1476 Job =
1477 /*#__PURE__*/
1478 function () {
1479 function Job(task, args, options, jobDefaults, rejectOnDrop, Events, _states, Promise) {
1480 var _this = this;
1481
1482 _classCallCheck(this, Job);
1483
1484 this.task = task;
1485 this.args = args;
1486 this.rejectOnDrop = rejectOnDrop;
1487 this.Events = Events;
1488 this._states = _states;
1489 this.Promise = Promise;
1490 this.options = parser$1.load(options, jobDefaults);
1491 this.options.priority = this._sanitizePriority(this.options.priority);
1492
1493 if (this.options.id === jobDefaults.id) {
1494 this.options.id = "".concat(this.options.id, "-").concat(this._randomIndex());
1495 }
1496
1497 this.promise = new this.Promise(function (_resolve, _reject) {
1498 _this._resolve = _resolve;
1499 _this._reject = _reject;
1500 });
1501 this.retryCount = 0;
1502 }
1503
1504 _createClass(Job, [{
1505 key: "_sanitizePriority",
1506 value: function _sanitizePriority(priority) {
1507 var sProperty;
1508 sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority;
1509
1510 if (sProperty < 0) {
1511 return 0;
1512 } else if (sProperty > NUM_PRIORITIES - 1) {
1513 return NUM_PRIORITIES - 1;
1514 } else {
1515 return sProperty;
1516 }
1517 }
1518 }, {
1519 key: "_randomIndex",
1520 value: function _randomIndex() {
1521 return Math.random().toString(36).slice(2);
1522 }
1523 }, {
1524 key: "doDrop",
1525 value: function doDrop() {
1526 var _ref = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {},
1527 error = _ref.error,
1528 _ref$message = _ref.message,
1529 message = _ref$message === void 0 ? "This job has been dropped by Bottleneck" : _ref$message;
1530
1531 if (this._states.remove(this.options.id)) {
1532 if (this.rejectOnDrop) {
1533 this._reject(error != null ? error : new BottleneckError$1(message));
1534 }
1535
1536 this.Events.trigger("dropped", {
1537 args: this.args,
1538 options: this.options,
1539 task: this.task,
1540 promise: this.promise
1541 });
1542 return true;
1543 } else {
1544 return false;
1545 }
1546 }
1547 }, {
1548 key: "_assertStatus",
1549 value: function _assertStatus(expected) {
1550 var status;
1551 status = this._states.jobStatus(this.options.id);
1552
1553 if (!(status === expected || expected === "DONE" && status === null)) {
1554 throw new BottleneckError$1("Invalid job status ".concat(status, ", expected ").concat(expected, ". Please open an issue at https://github.com/SGrondin/bottleneck/issues"));
1555 }
1556 }
1557 }, {
1558 key: "doReceive",
1559 value: function doReceive() {
1560 this._states.start(this.options.id);
1561
1562 return this.Events.trigger("received", {
1563 args: this.args,
1564 options: this.options
1565 });
1566 }
1567 }, {
1568 key: "doQueue",
1569 value: function doQueue(reachedHWM, blocked) {
1570 this._assertStatus("RECEIVED");
1571
1572 this._states.next(this.options.id);
1573
1574 return this.Events.trigger("queued", {
1575 args: this.args,
1576 options: this.options,
1577 reachedHWM: reachedHWM,
1578 blocked: blocked
1579 });
1580 }
1581 }, {
1582 key: "doRun",
1583 value: function doRun() {
1584 if (this.retryCount === 0) {
1585 this._assertStatus("QUEUED");
1586
1587 this._states.next(this.options.id);
1588 } else {
1589 this._assertStatus("EXECUTING");
1590 }
1591
1592 return this.Events.trigger("scheduled", {
1593 args: this.args,
1594 options: this.options
1595 });
1596 }
1597 }, {
1598 key: "doExecute",
1599 value: function () {
1600 var _doExecute = _asyncToGenerator(
1601 /*#__PURE__*/
1602 regeneratorRuntime.mark(function _callee(chained, clearGlobalState, run, free) {
1603 var error, eventInfo, passed;
1604 return regeneratorRuntime.wrap(function _callee$(_context) {
1605 while (1) {
1606 switch (_context.prev = _context.next) {
1607 case 0:
1608 if (this.retryCount === 0) {
1609 this._assertStatus("RUNNING");
1610
1611 this._states.next(this.options.id);
1612 } else {
1613 this._assertStatus("EXECUTING");
1614 }
1615
1616 eventInfo = {
1617 args: this.args,
1618 options: this.options,
1619 retryCount: this.retryCount
1620 };
1621 this.Events.trigger("executing", eventInfo);
1622 _context.prev = 3;
1623 _context.next = 6;
1624 return chained != null ? chained.schedule.apply(chained, [this.options, this.task].concat(_toConsumableArray(this.args))) : this.task.apply(this, _toConsumableArray(this.args));
1625
1626 case 6:
1627 passed = _context.sent;
1628
1629 if (!clearGlobalState()) {
1630 _context.next = 13;
1631 break;
1632 }
1633
1634 this.doDone(eventInfo);
1635 _context.next = 11;
1636 return free(this.options, eventInfo);
1637
1638 case 11:
1639 this._assertStatus("DONE");
1640
1641 return _context.abrupt("return", this._resolve(passed));
1642
1643 case 13:
1644 _context.next = 19;
1645 break;
1646
1647 case 15:
1648 _context.prev = 15;
1649 _context.t0 = _context["catch"](3);
1650 error = _context.t0;
1651 return _context.abrupt("return", this._onFailure(error, eventInfo, clearGlobalState, run, free));
1652
1653 case 19:
1654 case "end":
1655 return _context.stop();
1656 }
1657 }
1658 }, _callee, this, [[3, 15]]);
1659 }));
1660
1661 function doExecute(_x, _x2, _x3, _x4) {
1662 return _doExecute.apply(this, arguments);
1663 }
1664
1665 return doExecute;
1666 }()
1667 }, {
1668 key: "doExpire",
1669 value: function doExpire(clearGlobalState, run, free) {
1670 var error, eventInfo;
1671
1672 this._assertStatus("EXECUTING");
1673
1674 eventInfo = {
1675 args: this.args,
1676 options: this.options,
1677 retryCount: this.retryCount
1678 };
1679 error = new BottleneckError$1("This job timed out after ".concat(this.options.expiration, " ms."));
1680 return this._onFailure(error, eventInfo, clearGlobalState, run, free);
1681 }
1682 }, {
1683 key: "_onFailure",
1684 value: function () {
1685 var _onFailure2 = _asyncToGenerator(
1686 /*#__PURE__*/
1687 regeneratorRuntime.mark(function _callee2(error, eventInfo, clearGlobalState, run, free) {
1688 var retry, retryAfter;
1689 return regeneratorRuntime.wrap(function _callee2$(_context2) {
1690 while (1) {
1691 switch (_context2.prev = _context2.next) {
1692 case 0:
1693 if (!clearGlobalState()) {
1694 _context2.next = 16;
1695 break;
1696 }
1697
1698 _context2.next = 3;
1699 return this.Events.trigger("failed", error, eventInfo);
1700
1701 case 3:
1702 retry = _context2.sent;
1703
1704 if (!(retry != null)) {
1705 _context2.next = 11;
1706 break;
1707 }
1708
1709 retryAfter = ~~retry;
1710 this.Events.trigger("retry", "Retrying ".concat(this.options.id, " after ").concat(retryAfter, " ms"), eventInfo);
1711 this.retryCount++;
1712 return _context2.abrupt("return", run(retryAfter));
1713
1714 case 11:
1715 this.doDone(eventInfo);
1716 _context2.next = 14;
1717 return free(this.options, eventInfo);
1718
1719 case 14:
1720 this._assertStatus("DONE");
1721
1722 return _context2.abrupt("return", this._reject(error));
1723
1724 case 16:
1725 case "end":
1726 return _context2.stop();
1727 }
1728 }
1729 }, _callee2, this);
1730 }));
1731
1732 function _onFailure(_x5, _x6, _x7, _x8, _x9) {
1733 return _onFailure2.apply(this, arguments);
1734 }
1735
1736 return _onFailure;
1737 }()
1738 }, {
1739 key: "doDone",
1740 value: function doDone(eventInfo) {
1741 this._assertStatus("EXECUTING");
1742
1743 this._states.next(this.options.id);
1744
1745 return this.Events.trigger("done", eventInfo);
1746 }
1747 }]);
1748
1749 return Job;
1750 }();
1751
1752 var Job_1 = Job;
1753
1754 var BottleneckError$2, LocalDatastore, parser$2;
1755 parser$2 = parser;
1756 BottleneckError$2 = BottleneckError_1;
1757
1758 LocalDatastore =
1759 /*#__PURE__*/
1760 function () {
1761 function LocalDatastore(instance, storeOptions, storeInstanceOptions) {
1762 _classCallCheck(this, LocalDatastore);
1763
1764 this.instance = instance;
1765 this.storeOptions = storeOptions;
1766 this.clientId = this.instance._randomIndex();
1767 parser$2.load(storeInstanceOptions, storeInstanceOptions, this);
1768 this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now();
1769 this._running = 0;
1770 this._done = 0;
1771 this._unblockTime = 0;
1772 this.ready = this.Promise.resolve();
1773 this.clients = {};
1774
1775 this._startHeartbeat();
1776 }
1777
1778 _createClass(LocalDatastore, [{
1779 key: "_startHeartbeat",
1780 value: function _startHeartbeat() {
1781 var _this = this;
1782
1783 var base;
1784
1785 if (this.heartbeat == null && (this.storeOptions.reservoirRefreshInterval != null && this.storeOptions.reservoirRefreshAmount != null || this.storeOptions.reservoirIncreaseInterval != null && this.storeOptions.reservoirIncreaseAmount != null)) {
1786 return typeof (base = this.heartbeat = setInterval(function () {
1787 var amount, incr, maximum, now, reservoir;
1788 now = Date.now();
1789
1790 if (_this.storeOptions.reservoirRefreshInterval != null && now >= _this._lastReservoirRefresh + _this.storeOptions.reservoirRefreshInterval) {
1791 _this._lastReservoirRefresh = now;
1792 _this.storeOptions.reservoir = _this.storeOptions.reservoirRefreshAmount;
1793
1794 _this.instance._drainAll(_this.computeCapacity());
1795 }
1796
1797 if (_this.storeOptions.reservoirIncreaseInterval != null && now >= _this._lastReservoirIncrease + _this.storeOptions.reservoirIncreaseInterval) {
1798 var _this$storeOptions = _this.storeOptions;
1799 amount = _this$storeOptions.reservoirIncreaseAmount;
1800 maximum = _this$storeOptions.reservoirIncreaseMaximum;
1801 reservoir = _this$storeOptions.reservoir;
1802 _this._lastReservoirIncrease = now;
1803 incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount;
1804
1805 if (incr > 0) {
1806 _this.storeOptions.reservoir += incr;
1807 return _this.instance._drainAll(_this.computeCapacity());
1808 }
1809 }
1810 }, this.heartbeatInterval)).unref === "function" ? base.unref() : void 0;
1811 } else {
1812 return clearInterval(this.heartbeat);
1813 }
1814 }
1815 }, {
1816 key: "__publish__",
1817 value: function () {
1818 var _publish__ = _asyncToGenerator(
1819 /*#__PURE__*/
1820 regeneratorRuntime.mark(function _callee(message) {
1821 return regeneratorRuntime.wrap(function _callee$(_context) {
1822 while (1) {
1823 switch (_context.prev = _context.next) {
1824 case 0:
1825 _context.next = 2;
1826 return this.yieldLoop();
1827
1828 case 2:
1829 return _context.abrupt("return", this.instance.Events.trigger("message", message.toString()));
1830
1831 case 3:
1832 case "end":
1833 return _context.stop();
1834 }
1835 }
1836 }, _callee, this);
1837 }));
1838
1839 function __publish__(_x) {
1840 return _publish__.apply(this, arguments);
1841 }
1842
1843 return __publish__;
1844 }()
1845 }, {
1846 key: "__disconnect__",
1847 value: function () {
1848 var _disconnect__ = _asyncToGenerator(
1849 /*#__PURE__*/
1850 regeneratorRuntime.mark(function _callee2(flush) {
1851 return regeneratorRuntime.wrap(function _callee2$(_context2) {
1852 while (1) {
1853 switch (_context2.prev = _context2.next) {
1854 case 0:
1855 _context2.next = 2;
1856 return this.yieldLoop();
1857
1858 case 2:
1859 clearInterval(this.heartbeat);
1860 return _context2.abrupt("return", this.Promise.resolve());
1861
1862 case 4:
1863 case "end":
1864 return _context2.stop();
1865 }
1866 }
1867 }, _callee2, this);
1868 }));
1869
1870 function __disconnect__(_x2) {
1871 return _disconnect__.apply(this, arguments);
1872 }
1873
1874 return __disconnect__;
1875 }()
1876 }, {
1877 key: "yieldLoop",
1878 value: function yieldLoop() {
1879 var t = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0;
1880 return new this.Promise(function (resolve, reject) {
1881 return setTimeout(resolve, t);
1882 });
1883 }
1884 }, {
1885 key: "computePenalty",
1886 value: function computePenalty() {
1887 var ref;
1888 return (ref = this.storeOptions.penalty) != null ? ref : 15 * this.storeOptions.minTime || 5000;
1889 }
1890 }, {
1891 key: "__updateSettings__",
1892 value: function () {
1893 var _updateSettings__ = _asyncToGenerator(
1894 /*#__PURE__*/
1895 regeneratorRuntime.mark(function _callee3(options) {
1896 return regeneratorRuntime.wrap(function _callee3$(_context3) {
1897 while (1) {
1898 switch (_context3.prev = _context3.next) {
1899 case 0:
1900 _context3.next = 2;
1901 return this.yieldLoop();
1902
1903 case 2:
1904 parser$2.overwrite(options, options, this.storeOptions);
1905
1906 this._startHeartbeat();
1907
1908 this.instance._drainAll(this.computeCapacity());
1909
1910 return _context3.abrupt("return", true);
1911
1912 case 6:
1913 case "end":
1914 return _context3.stop();
1915 }
1916 }
1917 }, _callee3, this);
1918 }));
1919
1920 function __updateSettings__(_x3) {
1921 return _updateSettings__.apply(this, arguments);
1922 }
1923
1924 return __updateSettings__;
1925 }()
1926 }, {
1927 key: "__running__",
1928 value: function () {
1929 var _running__ = _asyncToGenerator(
1930 /*#__PURE__*/
1931 regeneratorRuntime.mark(function _callee4() {
1932 return regeneratorRuntime.wrap(function _callee4$(_context4) {
1933 while (1) {
1934 switch (_context4.prev = _context4.next) {
1935 case 0:
1936 _context4.next = 2;
1937 return this.yieldLoop();
1938
1939 case 2:
1940 return _context4.abrupt("return", this._running);
1941
1942 case 3:
1943 case "end":
1944 return _context4.stop();
1945 }
1946 }
1947 }, _callee4, this);
1948 }));
1949
1950 function __running__() {
1951 return _running__.apply(this, arguments);
1952 }
1953
1954 return __running__;
1955 }()
1956 }, {
1957 key: "__queued__",
1958 value: function () {
1959 var _queued__ = _asyncToGenerator(
1960 /*#__PURE__*/
1961 regeneratorRuntime.mark(function _callee5() {
1962 return regeneratorRuntime.wrap(function _callee5$(_context5) {
1963 while (1) {
1964 switch (_context5.prev = _context5.next) {
1965 case 0:
1966 _context5.next = 2;
1967 return this.yieldLoop();
1968
1969 case 2:
1970 return _context5.abrupt("return", this.instance.queued());
1971
1972 case 3:
1973 case "end":
1974 return _context5.stop();
1975 }
1976 }
1977 }, _callee5, this);
1978 }));
1979
1980 function __queued__() {
1981 return _queued__.apply(this, arguments);
1982 }
1983
1984 return __queued__;
1985 }()
1986 }, {
1987 key: "__done__",
1988 value: function () {
1989 var _done__ = _asyncToGenerator(
1990 /*#__PURE__*/
1991 regeneratorRuntime.mark(function _callee6() {
1992 return regeneratorRuntime.wrap(function _callee6$(_context6) {
1993 while (1) {
1994 switch (_context6.prev = _context6.next) {
1995 case 0:
1996 _context6.next = 2;
1997 return this.yieldLoop();
1998
1999 case 2:
2000 return _context6.abrupt("return", this._done);
2001
2002 case 3:
2003 case "end":
2004 return _context6.stop();
2005 }
2006 }
2007 }, _callee6, this);
2008 }));
2009
2010 function __done__() {
2011 return _done__.apply(this, arguments);
2012 }
2013
2014 return __done__;
2015 }()
2016 }, {
2017 key: "__groupCheck__",
2018 value: function () {
2019 var _groupCheck__ = _asyncToGenerator(
2020 /*#__PURE__*/
2021 regeneratorRuntime.mark(function _callee7(time) {
2022 return regeneratorRuntime.wrap(function _callee7$(_context7) {
2023 while (1) {
2024 switch (_context7.prev = _context7.next) {
2025 case 0:
2026 _context7.next = 2;
2027 return this.yieldLoop();
2028
2029 case 2:
2030 return _context7.abrupt("return", this._nextRequest + this.timeout < time);
2031
2032 case 3:
2033 case "end":
2034 return _context7.stop();
2035 }
2036 }
2037 }, _callee7, this);
2038 }));
2039
2040 function __groupCheck__(_x4) {
2041 return _groupCheck__.apply(this, arguments);
2042 }
2043
2044 return __groupCheck__;
2045 }()
2046 }, {
2047 key: "computeCapacity",
2048 value: function computeCapacity() {
2049 var maxConcurrent, reservoir;
2050 var _this$storeOptions2 = this.storeOptions;
2051 maxConcurrent = _this$storeOptions2.maxConcurrent;
2052 reservoir = _this$storeOptions2.reservoir;
2053
2054 if (maxConcurrent != null && reservoir != null) {
2055 return Math.min(maxConcurrent - this._running, reservoir);
2056 } else if (maxConcurrent != null) {
2057 return maxConcurrent - this._running;
2058 } else if (reservoir != null) {
2059 return reservoir;
2060 } else {
2061 return null;
2062 }
2063 }
2064 }, {
2065 key: "conditionsCheck",
2066 value: function conditionsCheck(weight) {
2067 var capacity;
2068 capacity = this.computeCapacity();
2069 return capacity == null || weight <= capacity;
2070 }
2071 }, {
2072 key: "__incrementReservoir__",
2073 value: function () {
2074 var _incrementReservoir__ = _asyncToGenerator(
2075 /*#__PURE__*/
2076 regeneratorRuntime.mark(function _callee8(incr) {
2077 var reservoir;
2078 return regeneratorRuntime.wrap(function _callee8$(_context8) {
2079 while (1) {
2080 switch (_context8.prev = _context8.next) {
2081 case 0:
2082 _context8.next = 2;
2083 return this.yieldLoop();
2084
2085 case 2:
2086 reservoir = this.storeOptions.reservoir += incr;
2087
2088 this.instance._drainAll(this.computeCapacity());
2089
2090 return _context8.abrupt("return", reservoir);
2091
2092 case 5:
2093 case "end":
2094 return _context8.stop();
2095 }
2096 }
2097 }, _callee8, this);
2098 }));
2099
2100 function __incrementReservoir__(_x5) {
2101 return _incrementReservoir__.apply(this, arguments);
2102 }
2103
2104 return __incrementReservoir__;
2105 }()
2106 }, {
2107 key: "__currentReservoir__",
2108 value: function () {
2109 var _currentReservoir__ = _asyncToGenerator(
2110 /*#__PURE__*/
2111 regeneratorRuntime.mark(function _callee9() {
2112 return regeneratorRuntime.wrap(function _callee9$(_context9) {
2113 while (1) {
2114 switch (_context9.prev = _context9.next) {
2115 case 0:
2116 _context9.next = 2;
2117 return this.yieldLoop();
2118
2119 case 2:
2120 return _context9.abrupt("return", this.storeOptions.reservoir);
2121
2122 case 3:
2123 case "end":
2124 return _context9.stop();
2125 }
2126 }
2127 }, _callee9, this);
2128 }));
2129
2130 function __currentReservoir__() {
2131 return _currentReservoir__.apply(this, arguments);
2132 }
2133
2134 return __currentReservoir__;
2135 }()
2136 }, {
2137 key: "isBlocked",
2138 value: function isBlocked(now) {
2139 return this._unblockTime >= now;
2140 }
2141 }, {
2142 key: "check",
2143 value: function check(weight, now) {
2144 return this.conditionsCheck(weight) && this._nextRequest - now <= 0;
2145 }
2146 }, {
2147 key: "__check__",
2148 value: function () {
2149 var _check__ = _asyncToGenerator(
2150 /*#__PURE__*/
2151 regeneratorRuntime.mark(function _callee10(weight) {
2152 var now;
2153 return regeneratorRuntime.wrap(function _callee10$(_context10) {
2154 while (1) {
2155 switch (_context10.prev = _context10.next) {
2156 case 0:
2157 _context10.next = 2;
2158 return this.yieldLoop();
2159
2160 case 2:
2161 now = Date.now();
2162 return _context10.abrupt("return", this.check(weight, now));
2163
2164 case 4:
2165 case "end":
2166 return _context10.stop();
2167 }
2168 }
2169 }, _callee10, this);
2170 }));
2171
2172 function __check__(_x6) {
2173 return _check__.apply(this, arguments);
2174 }
2175
2176 return __check__;
2177 }()
2178 }, {
2179 key: "__register__",
2180 value: function () {
2181 var _register__ = _asyncToGenerator(
2182 /*#__PURE__*/
2183 regeneratorRuntime.mark(function _callee11(index, weight, expiration) {
2184 var now, wait;
2185 return regeneratorRuntime.wrap(function _callee11$(_context11) {
2186 while (1) {
2187 switch (_context11.prev = _context11.next) {
2188 case 0:
2189 _context11.next = 2;
2190 return this.yieldLoop();
2191
2192 case 2:
2193 now = Date.now();
2194
2195 if (!this.conditionsCheck(weight)) {
2196 _context11.next = 11;
2197 break;
2198 }
2199
2200 this._running += weight;
2201
2202 if (this.storeOptions.reservoir != null) {
2203 this.storeOptions.reservoir -= weight;
2204 }
2205
2206 wait = Math.max(this._nextRequest - now, 0);
2207 this._nextRequest = now + wait + this.storeOptions.minTime;
2208 return _context11.abrupt("return", {
2209 success: true,
2210 wait: wait,
2211 reservoir: this.storeOptions.reservoir
2212 });
2213
2214 case 11:
2215 return _context11.abrupt("return", {
2216 success: false
2217 });
2218
2219 case 12:
2220 case "end":
2221 return _context11.stop();
2222 }
2223 }
2224 }, _callee11, this);
2225 }));
2226
2227 function __register__(_x7, _x8, _x9) {
2228 return _register__.apply(this, arguments);
2229 }
2230
2231 return __register__;
2232 }()
2233 }, {
2234 key: "strategyIsBlock",
2235 value: function strategyIsBlock() {
2236 return this.storeOptions.strategy === 3;
2237 }
2238 }, {
2239 key: "__submit__",
2240 value: function () {
2241 var _submit__ = _asyncToGenerator(
2242 /*#__PURE__*/
2243 regeneratorRuntime.mark(function _callee12(queueLength, weight) {
2244 var blocked, now, reachedHWM;
2245 return regeneratorRuntime.wrap(function _callee12$(_context12) {
2246 while (1) {
2247 switch (_context12.prev = _context12.next) {
2248 case 0:
2249 _context12.next = 2;
2250 return this.yieldLoop();
2251
2252 case 2:
2253 if (!(this.storeOptions.maxConcurrent != null && weight > this.storeOptions.maxConcurrent)) {
2254 _context12.next = 4;
2255 break;
2256 }
2257
2258 throw new BottleneckError$2("Impossible to add a job having a weight of ".concat(weight, " to a limiter having a maxConcurrent setting of ").concat(this.storeOptions.maxConcurrent));
2259
2260 case 4:
2261 now = Date.now();
2262 reachedHWM = this.storeOptions.highWater != null && queueLength === this.storeOptions.highWater && !this.check(weight, now);
2263 blocked = this.strategyIsBlock() && (reachedHWM || this.isBlocked(now));
2264
2265 if (blocked) {
2266 this._unblockTime = now + this.computePenalty();
2267 this._nextRequest = this._unblockTime + this.storeOptions.minTime;
2268
2269 this.instance._dropAllQueued();
2270 }
2271
2272 return _context12.abrupt("return", {
2273 reachedHWM: reachedHWM,
2274 blocked: blocked,
2275 strategy: this.storeOptions.strategy
2276 });
2277
2278 case 9:
2279 case "end":
2280 return _context12.stop();
2281 }
2282 }
2283 }, _callee12, this);
2284 }));
2285
2286 function __submit__(_x10, _x11) {
2287 return _submit__.apply(this, arguments);
2288 }
2289
2290 return __submit__;
2291 }()
2292 }, {
2293 key: "__free__",
2294 value: function () {
2295 var _free__ = _asyncToGenerator(
2296 /*#__PURE__*/
2297 regeneratorRuntime.mark(function _callee13(index, weight) {
2298 return regeneratorRuntime.wrap(function _callee13$(_context13) {
2299 while (1) {
2300 switch (_context13.prev = _context13.next) {
2301 case 0:
2302 _context13.next = 2;
2303 return this.yieldLoop();
2304
2305 case 2:
2306 this._running -= weight;
2307 this._done += weight;
2308
2309 this.instance._drainAll(this.computeCapacity());
2310
2311 return _context13.abrupt("return", {
2312 running: this._running
2313 });
2314
2315 case 6:
2316 case "end":
2317 return _context13.stop();
2318 }
2319 }
2320 }, _callee13, this);
2321 }));
2322
2323 function __free__(_x12, _x13) {
2324 return _free__.apply(this, arguments);
2325 }
2326
2327 return __free__;
2328 }()
2329 }]);
2330
2331 return LocalDatastore;
2332 }();
2333
2334 var LocalDatastore_1 = LocalDatastore;
2335
2336 var lua = {
2337 "blacklist_client.lua": "local blacklist = ARGV[num_static_argv + 1]\n\nif redis.call('zscore', client_last_seen_key, blacklist) then\n redis.call('zadd', client_last_seen_key, 0, blacklist)\nend\n\n\nreturn {}\n",
2338 "check.lua": "local weight = tonumber(ARGV[num_static_argv + 1])\n\nlocal capacity = process_tick(now, false)['capacity']\nlocal nextRequest = tonumber(redis.call('hget', settings_key, 'nextRequest'))\n\nreturn conditions_check(capacity, weight) and nextRequest - now <= 0\n",
2339 "conditions_check.lua": "local conditions_check = function (capacity, weight)\n return capacity == nil or weight <= capacity\nend\n",
2340 "current_reservoir.lua": "return process_tick(now, false)['reservoir']\n",
2341 "done.lua": "process_tick(now, false)\n\nreturn tonumber(redis.call('hget', settings_key, 'done'))\n",
2342 "free.lua": "local index = ARGV[num_static_argv + 1]\n\nredis.call('zadd', job_expirations_key, 0, index)\n\nreturn process_tick(now, false)['running']\n",
2343 "get_time.lua": "redis.replicate_commands()\n\nlocal get_time = function ()\n local time = redis.call('time')\n\n return tonumber(time[1]..string.sub(time[2], 1, 3))\nend\n",
2344 "group_check.lua": "return not (redis.call('exists', settings_key) == 1)\n",
2345 "heartbeat.lua": "process_tick(now, true)\n",
2346 "increment_reservoir.lua": "local incr = tonumber(ARGV[num_static_argv + 1])\n\nredis.call('hincrby', settings_key, 'reservoir', incr)\n\nlocal reservoir = process_tick(now, true)['reservoir']\n\nlocal groupTimeout = tonumber(redis.call('hget', settings_key, 'groupTimeout'))\nrefresh_expiration(0, 0, groupTimeout)\n\nreturn reservoir\n",
2347 "init.lua": "local clear = tonumber(ARGV[num_static_argv + 1])\nlocal limiter_version = ARGV[num_static_argv + 2]\nlocal num_local_argv = num_static_argv + 2\n\nif clear == 1 then\n redis.call('del', unpack(KEYS))\nend\n\nif redis.call('exists', settings_key) == 0 then\n -- Create\n local args = {'hmset', settings_key}\n\n for i = num_local_argv + 1, #ARGV do\n table.insert(args, ARGV[i])\n end\n\n redis.call(unpack(args))\n redis.call('hmset', settings_key,\n 'nextRequest', now,\n 'lastReservoirRefresh', now,\n 'lastReservoirIncrease', now,\n 'running', 0,\n 'done', 0,\n 'unblockTime', 0,\n 'capacityPriorityCounter', 0\n )\n\nelse\n -- Apply migrations\n local settings = redis.call('hmget', settings_key,\n 'id',\n 'version'\n )\n local id = settings[1]\n local current_version = settings[2]\n\n if current_version ~= limiter_version then\n local version_digits = {}\n for k, v in string.gmatch(current_version, \"([^.]+)\") do\n table.insert(version_digits, tonumber(k))\n end\n\n -- 2.10.0\n if version_digits[2] < 10 then\n redis.call('hsetnx', settings_key, 'reservoirRefreshInterval', '')\n redis.call('hsetnx', settings_key, 'reservoirRefreshAmount', '')\n redis.call('hsetnx', settings_key, 'lastReservoirRefresh', '')\n redis.call('hsetnx', settings_key, 'done', 0)\n redis.call('hset', settings_key, 'version', '2.10.0')\n end\n\n -- 2.11.1\n if version_digits[2] < 11 or (version_digits[2] == 11 and version_digits[3] < 1) then\n if redis.call('hstrlen', settings_key, 'lastReservoirRefresh') == 0 then\n redis.call('hmset', settings_key,\n 'lastReservoirRefresh', now,\n 'version', '2.11.1'\n )\n end\n end\n\n -- 2.14.0\n if version_digits[2] < 14 then\n local old_running_key = 'b_'..id..'_running'\n local old_executing_key = 'b_'..id..'_executing'\n\n if redis.call('exists', old_running_key) == 1 then\n redis.call('rename', old_running_key, job_weights_key)\n end\n if redis.call('exists', old_executing_key) == 1 then\n redis.call('rename', old_executing_key, job_expirations_key)\n end\n redis.call('hset', settings_key, 'version', '2.14.0')\n end\n\n -- 2.15.2\n if version_digits[2] < 15 or (version_digits[2] == 15 and version_digits[3] < 2) then\n redis.call('hsetnx', settings_key, 'capacityPriorityCounter', 0)\n redis.call('hset', settings_key, 'version', '2.15.2')\n end\n\n -- 2.17.0\n if version_digits[2] < 17 then\n redis.call('hsetnx', settings_key, 'clientTimeout', 10000)\n redis.call('hset', settings_key, 'version', '2.17.0')\n end\n\n -- 2.18.0\n if version_digits[2] < 18 then\n redis.call('hsetnx', settings_key, 'reservoirIncreaseInterval', '')\n redis.call('hsetnx', settings_key, 'reservoirIncreaseAmount', '')\n redis.call('hsetnx', settings_key, 'reservoirIncreaseMaximum', '')\n redis.call('hsetnx', settings_key, 'lastReservoirIncrease', now)\n redis.call('hset', settings_key, 'version', '2.18.0')\n end\n\n end\n\n process_tick(now, false)\nend\n\nlocal groupTimeout = tonumber(redis.call('hget', settings_key, 'groupTimeout'))\nrefresh_expiration(0, 0, groupTimeout)\n\nreturn {}\n",
2348 "process_tick.lua": "local process_tick = function (now, always_publish)\n\n local compute_capacity = function (maxConcurrent, running, reservoir)\n if maxConcurrent ~= nil and reservoir ~= nil then\n return math.min((maxConcurrent - running), reservoir)\n elseif maxConcurrent ~= nil then\n return maxConcurrent - running\n elseif reservoir ~= nil then\n return reservoir\n else\n return nil\n end\n end\n\n local settings = redis.call('hmget', settings_key,\n 'id',\n 'maxConcurrent',\n 'running',\n 'reservoir',\n 'reservoirRefreshInterval',\n 'reservoirRefreshAmount',\n 'lastReservoirRefresh',\n 'reservoirIncreaseInterval',\n 'reservoirIncreaseAmount',\n 'reservoirIncreaseMaximum',\n 'lastReservoirIncrease',\n 'capacityPriorityCounter',\n 'clientTimeout'\n )\n local id = settings[1]\n local maxConcurrent = tonumber(settings[2])\n local running = tonumber(settings[3])\n local reservoir = tonumber(settings[4])\n local reservoirRefreshInterval = tonumber(settings[5])\n local reservoirRefreshAmount = tonumber(settings[6])\n local lastReservoirRefresh = tonumber(settings[7])\n local reservoirIncreaseInterval = tonumber(settings[8])\n local reservoirIncreaseAmount = tonumber(settings[9])\n local reservoirIncreaseMaximum = tonumber(settings[10])\n local lastReservoirIncrease = tonumber(settings[11])\n local capacityPriorityCounter = tonumber(settings[12])\n local clientTimeout = tonumber(settings[13])\n\n local initial_capacity = compute_capacity(maxConcurrent, running, reservoir)\n\n --\n -- Process 'running' changes\n --\n local expired = redis.call('zrangebyscore', job_expirations_key, '-inf', '('..now)\n\n if #expired > 0 then\n redis.call('zremrangebyscore', job_expirations_key, '-inf', '('..now)\n\n local flush_batch = function (batch, acc)\n local weights = redis.call('hmget', job_weights_key, unpack(batch))\n redis.call('hdel', job_weights_key, unpack(batch))\n local clients = redis.call('hmget', job_clients_key, unpack(batch))\n redis.call('hdel', job_clients_key, unpack(batch))\n\n -- Calculate sum of removed weights\n for i = 1, #weights do\n acc['total'] = acc['total'] + (tonumber(weights[i]) or 0)\n end\n\n -- Calculate sum of removed weights by client\n local client_weights = {}\n for i = 1, #clients do\n local removed = tonumber(weights[i]) or 0\n if removed > 0 then\n acc['client_weights'][clients[i]] = (acc['client_weights'][clients[i]] or 0) + removed\n end\n end\n end\n\n local acc = {\n ['total'] = 0,\n ['client_weights'] = {}\n }\n local batch_size = 1000\n\n -- Compute changes to Zsets and apply changes to Hashes\n for i = 1, #expired, batch_size do\n local batch = {}\n for j = i, math.min(i + batch_size - 1, #expired) do\n table.insert(batch, expired[j])\n end\n\n flush_batch(batch, acc)\n end\n\n -- Apply changes to Zsets\n if acc['total'] > 0 then\n redis.call('hincrby', settings_key, 'done', acc['total'])\n running = tonumber(redis.call('hincrby', settings_key, 'running', -acc['total']))\n end\n\n for client, weight in pairs(acc['client_weights']) do\n redis.call('zincrby', client_running_key, -weight, client)\n end\n end\n\n --\n -- Process 'reservoir' changes\n --\n local reservoirRefreshActive = reservoirRefreshInterval ~= nil and reservoirRefreshAmount ~= nil\n if reservoirRefreshActive and now >= lastReservoirRefresh + reservoirRefreshInterval then\n reservoir = reservoirRefreshAmount\n redis.call('hmset', settings_key,\n 'reservoir', reservoir,\n 'lastReservoirRefresh', now\n )\n end\n\n local reservoirIncreaseActive = reservoirIncreaseInterval ~= nil and reservoirIncreaseAmount ~= nil\n if reservoirIncreaseActive and now >= lastReservoirIncrease + reservoirIncreaseInterval then\n local num_intervals = math.floor((now - lastReservoirIncrease) / reservoirIncreaseInterval)\n local incr = reservoirIncreaseAmount * num_intervals\n if reservoirIncreaseMaximum ~= nil then\n incr = math.min(incr, reservoirIncreaseMaximum - (reservoir or 0))\n end\n if incr > 0 then\n reservoir = (reservoir or 0) + incr\n end\n redis.call('hmset', settings_key,\n 'reservoir', reservoir,\n 'lastReservoirIncrease', lastReservoirIncrease + (num_intervals * reservoirIncreaseInterval)\n )\n end\n\n --\n -- Clear unresponsive clients\n --\n local unresponsive = redis.call('zrangebyscore', client_last_seen_key, '-inf', (now - clientTimeout))\n local unresponsive_lookup = {}\n local terminated_clients = {}\n for i = 1, #unresponsive do\n unresponsive_lookup[unresponsive[i]] = true\n if tonumber(redis.call('zscore', client_running_key, unresponsive[i])) == 0 then\n table.insert(terminated_clients, unresponsive[i])\n end\n end\n if #terminated_clients > 0 then\n redis.call('zrem', client_running_key, unpack(terminated_clients))\n redis.call('hdel', client_num_queued_key, unpack(terminated_clients))\n redis.call('zrem', client_last_registered_key, unpack(terminated_clients))\n redis.call('zrem', client_last_seen_key, unpack(terminated_clients))\n end\n\n --\n -- Broadcast capacity changes\n --\n local final_capacity = compute_capacity(maxConcurrent, running, reservoir)\n\n if always_publish or (initial_capacity ~= nil and final_capacity == nil) then\n -- always_publish or was not unlimited, now unlimited\n redis.call('publish', 'b_'..id, 'capacity:'..(final_capacity or ''))\n\n elseif initial_capacity ~= nil and final_capacity ~= nil and final_capacity > initial_capacity then\n -- capacity was increased\n -- send the capacity message to the limiter having the lowest number of running jobs\n -- the tiebreaker is the limiter having not registered a job in the longest time\n\n local lowest_concurrency_value = nil\n local lowest_concurrency_clients = {}\n local lowest_concurrency_last_registered = {}\n local client_concurrencies = redis.call('zrange', client_running_key, 0, -1, 'withscores')\n\n for i = 1, #client_concurrencies, 2 do\n local client = client_concurrencies[i]\n local concurrency = tonumber(client_concurrencies[i+1])\n\n if (\n lowest_concurrency_value == nil or lowest_concurrency_value == concurrency\n ) and (\n not unresponsive_lookup[client]\n ) and (\n tonumber(redis.call('hget', client_num_queued_key, client)) > 0\n ) then\n lowest_concurrency_value = concurrency\n table.insert(lowest_concurrency_clients, client)\n local last_registered = tonumber(redis.call('zscore', client_last_registered_key, client))\n table.insert(lowest_concurrency_last_registered, last_registered)\n end\n end\n\n if #lowest_concurrency_clients > 0 then\n local position = 1\n local earliest = lowest_concurrency_last_registered[1]\n\n for i,v in ipairs(lowest_concurrency_last_registered) do\n if v < earliest then\n position = i\n earliest = v\n end\n end\n\n local next_client = lowest_concurrency_clients[position]\n redis.call('publish', 'b_'..id,\n 'capacity-priority:'..(final_capacity or '')..\n ':'..next_client..\n ':'..capacityPriorityCounter\n )\n redis.call('hincrby', settings_key, 'capacityPriorityCounter', '1')\n else\n redis.call('publish', 'b_'..id, 'capacity:'..(final_capacity or ''))\n end\n end\n\n return {\n ['capacity'] = final_capacity,\n ['running'] = running,\n ['reservoir'] = reservoir\n }\nend\n",
2349 "queued.lua": "local clientTimeout = tonumber(redis.call('hget', settings_key, 'clientTimeout'))\nlocal valid_clients = redis.call('zrangebyscore', client_last_seen_key, (now - clientTimeout), 'inf')\nlocal client_queued = redis.call('hmget', client_num_queued_key, unpack(valid_clients))\n\nlocal sum = 0\nfor i = 1, #client_queued do\n sum = sum + tonumber(client_queued[i])\nend\n\nreturn sum\n",
2350 "refresh_expiration.lua": "local refresh_expiration = function (now, nextRequest, groupTimeout)\n\n if groupTimeout ~= nil then\n local ttl = (nextRequest + groupTimeout) - now\n\n for i = 1, #KEYS do\n redis.call('pexpire', KEYS[i], ttl)\n end\n end\n\nend\n",
2351 "refs.lua": "local settings_key = KEYS[1]\nlocal job_weights_key = KEYS[2]\nlocal job_expirations_key = KEYS[3]\nlocal job_clients_key = KEYS[4]\nlocal client_running_key = KEYS[5]\nlocal client_num_queued_key = KEYS[6]\nlocal client_last_registered_key = KEYS[7]\nlocal client_last_seen_key = KEYS[8]\n\nlocal now = tonumber(ARGV[1])\nlocal client = ARGV[2]\n\nlocal num_static_argv = 2\n",
2352 "register.lua": "local index = ARGV[num_static_argv + 1]\nlocal weight = tonumber(ARGV[num_static_argv + 2])\nlocal expiration = tonumber(ARGV[num_static_argv + 3])\n\nlocal state = process_tick(now, false)\nlocal capacity = state['capacity']\nlocal reservoir = state['reservoir']\n\nlocal settings = redis.call('hmget', settings_key,\n 'nextRequest',\n 'minTime',\n 'groupTimeout'\n)\nlocal nextRequest = tonumber(settings[1])\nlocal minTime = tonumber(settings[2])\nlocal groupTimeout = tonumber(settings[3])\n\nif conditions_check(capacity, weight) then\n\n redis.call('hincrby', settings_key, 'running', weight)\n redis.call('hset', job_weights_key, index, weight)\n if expiration ~= nil then\n redis.call('zadd', job_expirations_key, now + expiration, index)\n end\n redis.call('hset', job_clients_key, index, client)\n redis.call('zincrby', client_running_key, weight, client)\n redis.call('hincrby', client_num_queued_key, client, -1)\n redis.call('zadd', client_last_registered_key, now, client)\n\n local wait = math.max(nextRequest - now, 0)\n local newNextRequest = now + wait + minTime\n\n if reservoir == nil then\n redis.call('hset', settings_key,\n 'nextRequest', newNextRequest\n )\n else\n reservoir = reservoir - weight\n redis.call('hmset', settings_key,\n 'reservoir', reservoir,\n 'nextRequest', newNextRequest\n )\n end\n\n refresh_expiration(now, newNextRequest, groupTimeout)\n\n return {true, wait, reservoir}\n\nelse\n return {false}\nend\n",
2353 "register_client.lua": "local queued = tonumber(ARGV[num_static_argv + 1])\n\n-- Could have been re-registered concurrently\nif not redis.call('zscore', client_last_seen_key, client) then\n redis.call('zadd', client_running_key, 0, client)\n redis.call('hset', client_num_queued_key, client, queued)\n redis.call('zadd', client_last_registered_key, 0, client)\nend\n\nredis.call('zadd', client_last_seen_key, now, client)\n\nreturn {}\n",
2354 "running.lua": "return process_tick(now, false)['running']\n",
2355 "submit.lua": "local queueLength = tonumber(ARGV[num_static_argv + 1])\nlocal weight = tonumber(ARGV[num_static_argv + 2])\n\nlocal capacity = process_tick(now, false)['capacity']\n\nlocal settings = redis.call('hmget', settings_key,\n 'id',\n 'maxConcurrent',\n 'highWater',\n 'nextRequest',\n 'strategy',\n 'unblockTime',\n 'penalty',\n 'minTime',\n 'groupTimeout'\n)\nlocal id = settings[1]\nlocal maxConcurrent = tonumber(settings[2])\nlocal highWater = tonumber(settings[3])\nlocal nextRequest = tonumber(settings[4])\nlocal strategy = tonumber(settings[5])\nlocal unblockTime = tonumber(settings[6])\nlocal penalty = tonumber(settings[7])\nlocal minTime = tonumber(settings[8])\nlocal groupTimeout = tonumber(settings[9])\n\nif maxConcurrent ~= nil and weight > maxConcurrent then\n return redis.error_reply('OVERWEIGHT:'..weight..':'..maxConcurrent)\nend\n\nlocal reachedHWM = (highWater ~= nil and queueLength == highWater\n and not (\n conditions_check(capacity, weight)\n and nextRequest - now <= 0\n )\n)\n\nlocal blocked = strategy == 3 and (reachedHWM or unblockTime >= now)\n\nif blocked then\n local computedPenalty = penalty\n if computedPenalty == nil then\n if minTime == 0 then\n computedPenalty = 5000\n else\n computedPenalty = 15 * minTime\n end\n end\n\n local newNextRequest = now + computedPenalty + minTime\n\n redis.call('hmset', settings_key,\n 'unblockTime', now + computedPenalty,\n 'nextRequest', newNextRequest\n )\n\n local clients_queued_reset = redis.call('hkeys', client_num_queued_key)\n local queued_reset = {}\n for i = 1, #clients_queued_reset do\n table.insert(queued_reset, clients_queued_reset[i])\n table.insert(queued_reset, 0)\n end\n redis.call('hmset', client_num_queued_key, unpack(queued_reset))\n\n redis.call('publish', 'b_'..id, 'blocked:')\n\n refresh_expiration(now, newNextRequest, groupTimeout)\nend\n\nif not blocked and not reachedHWM then\n redis.call('hincrby', client_num_queued_key, client, 1)\nend\n\nreturn {reachedHWM, blocked, strategy}\n",
2356 "update_settings.lua": "local args = {'hmset', settings_key}\n\nfor i = num_static_argv + 1, #ARGV do\n table.insert(args, ARGV[i])\nend\n\nredis.call(unpack(args))\n\nprocess_tick(now, true)\n\nlocal groupTimeout = tonumber(redis.call('hget', settings_key, 'groupTimeout'))\nrefresh_expiration(0, 0, groupTimeout)\n\nreturn {}\n",
2357 "validate_client.lua": "if not redis.call('zscore', client_last_seen_key, client) then\n return redis.error_reply('UNKNOWN_CLIENT')\nend\n\nredis.call('zadd', client_last_seen_key, now, client)\n",
2358 "validate_keys.lua": "if not (redis.call('exists', settings_key) == 1) then\n return redis.error_reply('SETTINGS_KEY_NOT_FOUND')\nend\n"
2359 };
2360
2361 var lua$1 = /*#__PURE__*/Object.freeze({
2362 default: lua
2363 });
2364
2365 var require$$0 = getCjsExportFromNamespace(lua$1);
2366
2367 var Scripts = createCommonjsModule(function (module, exports) {
2368 var headers, lua, templates;
2369 lua = require$$0;
2370 headers = {
2371 refs: lua["refs.lua"],
2372 validate_keys: lua["validate_keys.lua"],
2373 validate_client: lua["validate_client.lua"],
2374 refresh_expiration: lua["refresh_expiration.lua"],
2375 process_tick: lua["process_tick.lua"],
2376 conditions_check: lua["conditions_check.lua"],
2377 get_time: lua["get_time.lua"]
2378 };
2379
2380 exports.allKeys = function (id) {
2381 return [
2382 /*
2383 HASH
2384 */
2385 "b_".concat(id, "_settings"),
2386 /*
2387 HASH
2388 job index -> weight
2389 */
2390 "b_".concat(id, "_job_weights"),
2391 /*
2392 ZSET
2393 job index -> expiration
2394 */
2395 "b_".concat(id, "_job_expirations"),
2396 /*
2397 HASH
2398 job index -> client
2399 */
2400 "b_".concat(id, "_job_clients"),
2401 /*
2402 ZSET
2403 client -> sum running
2404 */
2405 "b_".concat(id, "_client_running"),
2406 /*
2407 HASH
2408 client -> num queued
2409 */
2410 "b_".concat(id, "_client_num_queued"),
2411 /*
2412 ZSET
2413 client -> last job registered
2414 */
2415 "b_".concat(id, "_client_last_registered"),
2416 /*
2417 ZSET
2418 client -> last seen
2419 */
2420 "b_".concat(id, "_client_last_seen")];
2421 };
2422
2423 templates = {
2424 init: {
2425 keys: exports.allKeys,
2426 headers: ["process_tick"],
2427 refresh_expiration: true,
2428 code: lua["init.lua"]
2429 },
2430 group_check: {
2431 keys: exports.allKeys,
2432 headers: [],
2433 refresh_expiration: false,
2434 code: lua["group_check.lua"]
2435 },
2436 register_client: {
2437 keys: exports.allKeys,
2438 headers: ["validate_keys"],
2439 refresh_expiration: false,
2440 code: lua["register_client.lua"]
2441 },
2442 blacklist_client: {
2443 keys: exports.allKeys,
2444 headers: ["validate_keys", "validate_client"],
2445 refresh_expiration: false,
2446 code: lua["blacklist_client.lua"]
2447 },
2448 heartbeat: {
2449 keys: exports.allKeys,
2450 headers: ["validate_keys", "validate_client", "process_tick"],
2451 refresh_expiration: false,
2452 code: lua["heartbeat.lua"]
2453 },
2454 update_settings: {
2455 keys: exports.allKeys,
2456 headers: ["validate_keys", "validate_client", "process_tick"],
2457 refresh_expiration: true,
2458 code: lua["update_settings.lua"]
2459 },
2460 running: {
2461 keys: exports.allKeys,
2462 headers: ["validate_keys", "validate_client", "process_tick"],
2463 refresh_expiration: false,
2464 code: lua["running.lua"]
2465 },
2466 queued: {
2467 keys: exports.allKeys,
2468 headers: ["validate_keys", "validate_client"],
2469 refresh_expiration: false,
2470 code: lua["queued.lua"]
2471 },
2472 done: {
2473 keys: exports.allKeys,
2474 headers: ["validate_keys", "validate_client", "process_tick"],
2475 refresh_expiration: false,
2476 code: lua["done.lua"]
2477 },
2478 check: {
2479 keys: exports.allKeys,
2480 headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"],
2481 refresh_expiration: false,
2482 code: lua["check.lua"]
2483 },
2484 submit: {
2485 keys: exports.allKeys,
2486 headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"],
2487 refresh_expiration: true,
2488 code: lua["submit.lua"]
2489 },
2490 register: {
2491 keys: exports.allKeys,
2492 headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"],
2493 refresh_expiration: true,
2494 code: lua["register.lua"]
2495 },
2496 free: {
2497 keys: exports.allKeys,
2498 headers: ["validate_keys", "validate_client", "process_tick"],
2499 refresh_expiration: true,
2500 code: lua["free.lua"]
2501 },
2502 current_reservoir: {
2503 keys: exports.allKeys,
2504 headers: ["validate_keys", "validate_client", "process_tick"],
2505 refresh_expiration: false,
2506 code: lua["current_reservoir.lua"]
2507 },
2508 increment_reservoir: {
2509 keys: exports.allKeys,
2510 headers: ["validate_keys", "validate_client", "process_tick"],
2511 refresh_expiration: true,
2512 code: lua["increment_reservoir.lua"]
2513 }
2514 };
2515 exports.names = Object.keys(templates);
2516
2517 exports.keys = function (name, id) {
2518 return templates[name].keys(id);
2519 };
2520
2521 exports.payload = function (name) {
2522 var template;
2523 template = templates[name];
2524 return Array.prototype.concat(headers.refs, template.headers.map(function (h) {
2525 return headers[h];
2526 }), template.refresh_expiration ? headers.refresh_expiration : "", template.code).join("\n");
2527 };
2528 });
2529 var Scripts_1 = Scripts.allKeys;
2530 var Scripts_2 = Scripts.names;
2531 var Scripts_3 = Scripts.keys;
2532 var Scripts_4 = Scripts.payload;
2533
2534 var Events$2, RedisConnection, Scripts$1, parser$3;
2535 parser$3 = parser;
2536 Events$2 = Events_1;
2537 Scripts$1 = Scripts;
2538
2539 RedisConnection = function () {
2540 var RedisConnection =
2541 /*#__PURE__*/
2542 function () {
2543 function RedisConnection() {
2544 var _this = this;
2545
2546 var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
2547
2548 _classCallCheck(this, RedisConnection);
2549
2550 var Redis;
2551 Redis = eval("require")("redis"); // Obfuscated or else Webpack/Angular will try to inline the optional redis module
2552
2553 parser$3.load(options, this.defaults, this);
2554
2555 if (this.Events == null) {
2556 this.Events = new Events$2(this);
2557 }
2558
2559 this.terminated = false;
2560
2561 if (this.client == null) {
2562 this.client = Redis.createClient(this.clientOptions);
2563 }
2564
2565 this.subscriber = this.client.duplicate();
2566 this.limiters = {};
2567 this.shas = {};
2568 this.ready = this.Promise.all([this._setup(this.client, false), this._setup(this.subscriber, true)]).then(function () {
2569 return _this._loadScripts();
2570 }).then(function () {
2571 return {
2572 client: _this.client,
2573 subscriber: _this.subscriber
2574 };
2575 });
2576 }
2577
2578 _createClass(RedisConnection, [{
2579 key: "_setup",
2580 value: function _setup(client, sub) {
2581 var _this2 = this;
2582
2583 client.setMaxListeners(0);
2584 return new this.Promise(function (resolve, reject) {
2585 client.on("error", function (e) {
2586 return _this2.Events.trigger("error", e);
2587 });
2588
2589 if (sub) {
2590 client.on("message", function (channel, message) {
2591 var ref;
2592 return (ref = _this2.limiters[channel]) != null ? ref._store.onMessage(channel, message) : void 0;
2593 });
2594 }
2595
2596 if (client.ready) {
2597 return resolve();
2598 } else {
2599 return client.once("ready", resolve);
2600 }
2601 });
2602 }
2603 }, {
2604 key: "_loadScript",
2605 value: function _loadScript(name) {
2606 var _this3 = this;
2607
2608 return new this.Promise(function (resolve, reject) {
2609 var payload;
2610 payload = Scripts$1.payload(name);
2611 return _this3.client.multi([["script", "load", payload]]).exec(function (err, replies) {
2612 if (err != null) {
2613 return reject(err);
2614 }
2615
2616 _this3.shas[name] = replies[0];
2617 return resolve(replies[0]);
2618 });
2619 });
2620 }
2621 }, {
2622 key: "_loadScripts",
2623 value: function _loadScripts() {
2624 var _this4 = this;
2625
2626 return this.Promise.all(Scripts$1.names.map(function (k) {
2627 return _this4._loadScript(k);
2628 }));
2629 }
2630 }, {
2631 key: "__runCommand__",
2632 value: function () {
2633 var _runCommand__ = _asyncToGenerator(
2634 /*#__PURE__*/
2635 regeneratorRuntime.mark(function _callee(cmd) {
2636 var _this5 = this;
2637
2638 return regeneratorRuntime.wrap(function _callee$(_context) {
2639 while (1) {
2640 switch (_context.prev = _context.next) {
2641 case 0:
2642 _context.next = 2;
2643 return this.ready;
2644
2645 case 2:
2646 return _context.abrupt("return", new this.Promise(function (resolve, reject) {
2647 return _this5.client.multi([cmd]).exec_atomic(function (err, replies) {
2648 if (err != null) {
2649 return reject(err);
2650 } else {
2651 return resolve(replies[0]);
2652 }
2653 });
2654 }));
2655
2656 case 3:
2657 case "end":
2658 return _context.stop();
2659 }
2660 }
2661 }, _callee, this);
2662 }));
2663
2664 function __runCommand__(_x) {
2665 return _runCommand__.apply(this, arguments);
2666 }
2667
2668 return __runCommand__;
2669 }()
2670 }, {
2671 key: "__addLimiter__",
2672 value: function __addLimiter__(instance) {
2673 var _this6 = this;
2674
2675 return this.Promise.all([instance.channel(), instance.channel_client()].map(function (channel) {
2676 return new _this6.Promise(function (resolve, reject) {
2677 var _handler;
2678
2679 _handler = function handler(chan) {
2680 if (chan === channel) {
2681 _this6.subscriber.removeListener("subscribe", _handler);
2682
2683 _this6.limiters[channel] = instance;
2684 return resolve();
2685 }
2686 };
2687
2688 _this6.subscriber.on("subscribe", _handler);
2689
2690 return _this6.subscriber.subscribe(channel);
2691 });
2692 }));
2693 }
2694 }, {
2695 key: "__removeLimiter__",
2696 value: function __removeLimiter__(instance) {
2697 var _this7 = this;
2698
2699 return this.Promise.all([instance.channel(), instance.channel_client()].map(
2700 /*#__PURE__*/
2701 function () {
2702 var _ref = _asyncToGenerator(
2703 /*#__PURE__*/
2704 regeneratorRuntime.mark(function _callee2(channel) {
2705 return regeneratorRuntime.wrap(function _callee2$(_context2) {
2706 while (1) {
2707 switch (_context2.prev = _context2.next) {
2708 case 0:
2709 if (_this7.terminated) {
2710 _context2.next = 3;
2711 break;
2712 }
2713
2714 _context2.next = 3;
2715 return new _this7.Promise(function (resolve, reject) {
2716 return _this7.subscriber.unsubscribe(channel, function (err, chan) {
2717 if (err != null) {
2718 return reject(err);
2719 }
2720
2721 if (chan === channel) {
2722 return resolve();
2723 }
2724 });
2725 });
2726
2727 case 3:
2728 return _context2.abrupt("return", delete _this7.limiters[channel]);
2729
2730 case 4:
2731 case "end":
2732 return _context2.stop();
2733 }
2734 }
2735 }, _callee2);
2736 }));
2737
2738 return function (_x2) {
2739 return _ref.apply(this, arguments);
2740 };
2741 }()));
2742 }
2743 }, {
2744 key: "__scriptArgs__",
2745 value: function __scriptArgs__(name, id, args, cb) {
2746 var keys;
2747 keys = Scripts$1.keys(name, id);
2748 return [this.shas[name], keys.length].concat(keys, args, cb);
2749 }
2750 }, {
2751 key: "__scriptFn__",
2752 value: function __scriptFn__(name) {
2753 return this.client.evalsha.bind(this.client);
2754 }
2755 }, {
2756 key: "disconnect",
2757 value: function disconnect() {
2758 var flush = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true;
2759 var i, k, len, ref;
2760 ref = Object.keys(this.limiters);
2761
2762 for (i = 0, len = ref.length; i < len; i++) {
2763 k = ref[i];
2764 clearInterval(this.limiters[k]._store.heartbeat);
2765 }
2766
2767 this.limiters = {};
2768 this.terminated = true;
2769 this.client.end(flush);
2770 this.subscriber.end(flush);
2771 return this.Promise.resolve();
2772 }
2773 }]);
2774
2775 return RedisConnection;
2776 }();
2777 RedisConnection.prototype.datastore = "redis";
2778 RedisConnection.prototype.defaults = {
2779 clientOptions: {},
2780 client: null,
2781 Promise: Promise,
2782 Events: null
2783 };
2784 return RedisConnection;
2785 }.call(commonjsGlobal);
2786
2787 var RedisConnection_1 = RedisConnection;
2788
2789 var Events$3, IORedisConnection, Scripts$2, parser$4;
2790 parser$4 = parser;
2791 Events$3 = Events_1;
2792 Scripts$2 = Scripts;
2793
2794 IORedisConnection = function () {
2795 var IORedisConnection =
2796 /*#__PURE__*/
2797 function () {
2798 function IORedisConnection() {
2799 var _this = this;
2800
2801 var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
2802
2803 _classCallCheck(this, IORedisConnection);
2804
2805 var Redis;
2806 Redis = eval("require")("ioredis"); // Obfuscated or else Webpack/Angular will try to inline the optional ioredis module
2807
2808 parser$4.load(options, this.defaults, this);
2809
2810 if (this.Events == null) {
2811 this.Events = new Events$3(this);
2812 }
2813
2814 this.terminated = false;
2815
2816 if (this.clusterNodes != null) {
2817 this.client = new Redis.Cluster(this.clusterNodes, this.clientOptions);
2818 this.subscriber = new Redis.Cluster(this.clusterNodes, this.clientOptions);
2819 } else if (this.client != null && this.client.duplicate == null) {
2820 this.subscriber = new Redis.Cluster(this.client.startupNodes, this.client.options);
2821 } else {
2822 if (this.client == null) {
2823 this.client = new Redis(this.clientOptions);
2824 }
2825
2826 this.subscriber = this.client.duplicate();
2827 }
2828
2829 this.limiters = {};
2830 this.ready = this.Promise.all([this._setup(this.client, false), this._setup(this.subscriber, true)]).then(function () {
2831 _this._loadScripts();
2832
2833 return {
2834 client: _this.client,
2835 subscriber: _this.subscriber
2836 };
2837 });
2838 }
2839
2840 _createClass(IORedisConnection, [{
2841 key: "_setup",
2842 value: function _setup(client, sub) {
2843 var _this2 = this;
2844
2845 client.setMaxListeners(0);
2846 return new this.Promise(function (resolve, reject) {
2847 client.on("error", function (e) {
2848 return _this2.Events.trigger("error", e);
2849 });
2850
2851 if (sub) {
2852 client.on("message", function (channel, message) {
2853 var ref;
2854 return (ref = _this2.limiters[channel]) != null ? ref._store.onMessage(channel, message) : void 0;
2855 });
2856 }
2857
2858 if (client.status === "ready") {
2859 return resolve();
2860 } else {
2861 return client.once("ready", resolve);
2862 }
2863 });
2864 }
2865 }, {
2866 key: "_loadScripts",
2867 value: function _loadScripts() {
2868 var _this3 = this;
2869
2870 return Scripts$2.names.forEach(function (name) {
2871 return _this3.client.defineCommand(name, {
2872 lua: Scripts$2.payload(name)
2873 });
2874 });
2875 }
2876 }, {
2877 key: "__runCommand__",
2878 value: function () {
2879 var _runCommand__ = _asyncToGenerator(
2880 /*#__PURE__*/
2881 regeneratorRuntime.mark(function _callee(cmd) {
2882 var _, deleted, _ref, _ref2, _ref2$;
2883
2884 return regeneratorRuntime.wrap(function _callee$(_context) {
2885 while (1) {
2886 switch (_context.prev = _context.next) {
2887 case 0:
2888 _context.next = 2;
2889 return this.ready;
2890
2891 case 2:
2892 _context.next = 4;
2893 return this.client.pipeline([cmd]).exec();
2894
2895 case 4:
2896 _ref = _context.sent;
2897 _ref2 = _slicedToArray(_ref, 1);
2898 _ref2$ = _slicedToArray(_ref2[0], 2);
2899 _ = _ref2$[0];
2900 deleted = _ref2$[1];
2901 return _context.abrupt("return", deleted);
2902
2903 case 10:
2904 case "end":
2905 return _context.stop();
2906 }
2907 }
2908 }, _callee, this);
2909 }));
2910
2911 function __runCommand__(_x) {
2912 return _runCommand__.apply(this, arguments);
2913 }
2914
2915 return __runCommand__;
2916 }()
2917 }, {
2918 key: "__addLimiter__",
2919 value: function __addLimiter__(instance) {
2920 var _this4 = this;
2921
2922 return this.Promise.all([instance.channel(), instance.channel_client()].map(function (channel) {
2923 return new _this4.Promise(function (resolve, reject) {
2924 return _this4.subscriber.subscribe(channel, function () {
2925 _this4.limiters[channel] = instance;
2926 return resolve();
2927 });
2928 });
2929 }));
2930 }
2931 }, {
2932 key: "__removeLimiter__",
2933 value: function __removeLimiter__(instance) {
2934 var _this5 = this;
2935
2936 return [instance.channel(), instance.channel_client()].forEach(
2937 /*#__PURE__*/
2938 function () {
2939 var _ref3 = _asyncToGenerator(
2940 /*#__PURE__*/
2941 regeneratorRuntime.mark(function _callee2(channel) {
2942 return regeneratorRuntime.wrap(function _callee2$(_context2) {
2943 while (1) {
2944 switch (_context2.prev = _context2.next) {
2945 case 0:
2946 if (_this5.terminated) {
2947 _context2.next = 3;
2948 break;
2949 }
2950
2951 _context2.next = 3;
2952 return _this5.subscriber.unsubscribe(channel);
2953
2954 case 3:
2955 return _context2.abrupt("return", delete _this5.limiters[channel]);
2956
2957 case 4:
2958 case "end":
2959 return _context2.stop();
2960 }
2961 }
2962 }, _callee2);
2963 }));
2964
2965 return function (_x2) {
2966 return _ref3.apply(this, arguments);
2967 };
2968 }());
2969 }
2970 }, {
2971 key: "__scriptArgs__",
2972 value: function __scriptArgs__(name, id, args, cb) {
2973 var keys;
2974 keys = Scripts$2.keys(name, id);
2975 return [keys.length].concat(keys, args, cb);
2976 }
2977 }, {
2978 key: "__scriptFn__",
2979 value: function __scriptFn__(name) {
2980 return this.client[name].bind(this.client);
2981 }
2982 }, {
2983 key: "disconnect",
2984 value: function disconnect() {
2985 var flush = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true;
2986 var i, k, len, ref;
2987 ref = Object.keys(this.limiters);
2988
2989 for (i = 0, len = ref.length; i < len; i++) {
2990 k = ref[i];
2991 clearInterval(this.limiters[k]._store.heartbeat);
2992 }
2993
2994 this.limiters = {};
2995 this.terminated = true;
2996
2997 if (flush) {
2998 return this.Promise.all([this.client.quit(), this.subscriber.quit()]);
2999 } else {
3000 this.client.disconnect();
3001 this.subscriber.disconnect();
3002 return this.Promise.resolve();
3003 }
3004 }
3005 }]);
3006
3007 return IORedisConnection;
3008 }();
3009 IORedisConnection.prototype.datastore = "ioredis";
3010 IORedisConnection.prototype.defaults = {
3011 clientOptions: {},
3012 clusterNodes: null,
3013 client: null,
3014 Promise: Promise,
3015 Events: null
3016 };
3017 return IORedisConnection;
3018 }.call(commonjsGlobal);
3019
3020 var IORedisConnection_1 = IORedisConnection;
3021
3022 var BottleneckError$3, IORedisConnection$1, RedisConnection$1, RedisDatastore, parser$5;
3023 parser$5 = parser;
3024 BottleneckError$3 = BottleneckError_1;
3025 RedisConnection$1 = RedisConnection_1;
3026 IORedisConnection$1 = IORedisConnection_1;
3027
3028 RedisDatastore =
3029 /*#__PURE__*/
3030 function () {
3031 function RedisDatastore(instance, storeOptions, storeInstanceOptions) {
3032 var _this = this;
3033
3034 _classCallCheck(this, RedisDatastore);
3035
3036 this.instance = instance;
3037 this.storeOptions = storeOptions;
3038 this.originalId = this.instance.id;
3039 this.clientId = this.instance._randomIndex();
3040 parser$5.load(storeInstanceOptions, storeInstanceOptions, this);
3041 this.clients = {};
3042 this.capacityPriorityCounters = {};
3043 this.sharedConnection = this.connection != null;
3044
3045 if (this.connection == null) {
3046 this.connection = this.instance.datastore === "redis" ? new RedisConnection$1({
3047 clientOptions: this.clientOptions,
3048 Promise: this.Promise,
3049 Events: this.instance.Events
3050 }) : this.instance.datastore === "ioredis" ? new IORedisConnection$1({
3051 clientOptions: this.clientOptions,
3052 clusterNodes: this.clusterNodes,
3053 Promise: this.Promise,
3054 Events: this.instance.Events
3055 }) : void 0;
3056 }
3057
3058 this.instance.connection = this.connection;
3059 this.instance.datastore = this.connection.datastore;
3060 this.ready = this.connection.ready.then(function (clients) {
3061 _this.clients = clients;
3062 return _this.runScript("init", _this.prepareInitSettings(_this.clearDatastore));
3063 }).then(function () {
3064 return _this.connection.__addLimiter__(_this.instance);
3065 }).then(function () {
3066 return _this.runScript("register_client", [_this.instance.queued()]);
3067 }).then(function () {
3068 var base;
3069
3070 if (typeof (base = _this.heartbeat = setInterval(function () {
3071 return _this.runScript("heartbeat", [])["catch"](function (e) {
3072 return _this.instance.Events.trigger("error", e);
3073 });
3074 }, _this.heartbeatInterval)).unref === "function") {
3075 base.unref();
3076 }
3077
3078 return _this.clients;
3079 });
3080 }
3081
3082 _createClass(RedisDatastore, [{
3083 key: "__publish__",
3084 value: function () {
3085 var _publish__ = _asyncToGenerator(
3086 /*#__PURE__*/
3087 regeneratorRuntime.mark(function _callee(message) {
3088 var client, _ref;
3089
3090 return regeneratorRuntime.wrap(function _callee$(_context) {
3091 while (1) {
3092 switch (_context.prev = _context.next) {
3093 case 0:
3094 _context.next = 2;
3095 return this.ready;
3096
3097 case 2:
3098 _ref = _context.sent;
3099 client = _ref.client;
3100 return _context.abrupt("return", client.publish(this.instance.channel(), "message:".concat(message.toString())));
3101
3102 case 5:
3103 case "end":
3104 return _context.stop();
3105 }
3106 }
3107 }, _callee, this);
3108 }));
3109
3110 function __publish__(_x) {
3111 return _publish__.apply(this, arguments);
3112 }
3113
3114 return __publish__;
3115 }()
3116 }, {
3117 key: "onMessage",
3118 value: function () {
3119 var _onMessage = _asyncToGenerator(
3120 /*#__PURE__*/
3121 regeneratorRuntime.mark(function _callee3(channel, message) {
3122 var _this2 = this;
3123
3124 var capacity, counter, data, drained, e, newCapacity, pos, priorityClient, rawCapacity, type, _ref2, _data$split, _data$split2;
3125
3126 return regeneratorRuntime.wrap(function _callee3$(_context3) {
3127 while (1) {
3128 switch (_context3.prev = _context3.next) {
3129 case 0:
3130 _context3.prev = 0;
3131 pos = message.indexOf(":");
3132 _ref2 = [message.slice(0, pos), message.slice(pos + 1)];
3133 type = _ref2[0];
3134 data = _ref2[1];
3135
3136 if (!(type === "capacity")) {
3137 _context3.next = 11;
3138 break;
3139 }
3140
3141 _context3.next = 8;
3142 return this.instance._drainAll(data.length > 0 ? ~~data : void 0);
3143
3144 case 8:
3145 return _context3.abrupt("return", _context3.sent);
3146
3147 case 11:
3148 if (!(type === "capacity-priority")) {
3149 _context3.next = 37;
3150 break;
3151 }
3152
3153 _data$split = data.split(":");
3154 _data$split2 = _slicedToArray(_data$split, 3);
3155 rawCapacity = _data$split2[0];
3156 priorityClient = _data$split2[1];
3157 counter = _data$split2[2];
3158 capacity = rawCapacity.length > 0 ? ~~rawCapacity : void 0;
3159
3160 if (!(priorityClient === this.clientId)) {
3161 _context3.next = 28;
3162 break;
3163 }
3164
3165 _context3.next = 21;
3166 return this.instance._drainAll(capacity);
3167
3168 case 21:
3169 drained = _context3.sent;
3170 newCapacity = capacity != null ? capacity - (drained || 0) : "";
3171 _context3.next = 25;
3172 return this.clients.client.publish(this.instance.channel(), "capacity-priority:".concat(newCapacity, "::").concat(counter));
3173
3174 case 25:
3175 return _context3.abrupt("return", _context3.sent);
3176
3177 case 28:
3178 if (!(priorityClient === "")) {
3179 _context3.next = 34;
3180 break;
3181 }
3182
3183 clearTimeout(this.capacityPriorityCounters[counter]);
3184 delete this.capacityPriorityCounters[counter];
3185 return _context3.abrupt("return", this.instance._drainAll(capacity));
3186
3187 case 34:
3188 return _context3.abrupt("return", this.capacityPriorityCounters[counter] = setTimeout(
3189 /*#__PURE__*/
3190 _asyncToGenerator(
3191 /*#__PURE__*/
3192 regeneratorRuntime.mark(function _callee2() {
3193 var e;
3194 return regeneratorRuntime.wrap(function _callee2$(_context2) {
3195 while (1) {
3196 switch (_context2.prev = _context2.next) {
3197 case 0:
3198 _context2.prev = 0;
3199 delete _this2.capacityPriorityCounters[counter];
3200 _context2.next = 4;
3201 return _this2.runScript("blacklist_client", [priorityClient]);
3202
3203 case 4:
3204 _context2.next = 6;
3205 return _this2.instance._drainAll(capacity);
3206
3207 case 6:
3208 return _context2.abrupt("return", _context2.sent);
3209
3210 case 9:
3211 _context2.prev = 9;
3212 _context2.t0 = _context2["catch"](0);
3213 e = _context2.t0;
3214 return _context2.abrupt("return", _this2.instance.Events.trigger("error", e));
3215
3216 case 13:
3217 case "end":
3218 return _context2.stop();
3219 }
3220 }
3221 }, _callee2, null, [[0, 9]]);
3222 })), 1000));
3223
3224 case 35:
3225 _context3.next = 45;
3226 break;
3227
3228 case 37:
3229 if (!(type === "message")) {
3230 _context3.next = 41;
3231 break;
3232 }
3233
3234 return _context3.abrupt("return", this.instance.Events.trigger("message", data));
3235
3236 case 41:
3237 if (!(type === "blocked")) {
3238 _context3.next = 45;
3239 break;
3240 }
3241
3242 _context3.next = 44;
3243 return this.instance._dropAllQueued();
3244
3245 case 44:
3246 return _context3.abrupt("return", _context3.sent);
3247
3248 case 45:
3249 _context3.next = 51;
3250 break;
3251
3252 case 47:
3253 _context3.prev = 47;
3254 _context3.t0 = _context3["catch"](0);
3255 e = _context3.t0;
3256 return _context3.abrupt("return", this.instance.Events.trigger("error", e));
3257
3258 case 51:
3259 case "end":
3260 return _context3.stop();
3261 }
3262 }
3263 }, _callee3, this, [[0, 47]]);
3264 }));
3265
3266 function onMessage(_x2, _x3) {
3267 return _onMessage.apply(this, arguments);
3268 }
3269
3270 return onMessage;
3271 }()
3272 }, {
3273 key: "__disconnect__",
3274 value: function __disconnect__(flush) {
3275 clearInterval(this.heartbeat);
3276
3277 if (this.sharedConnection) {
3278 return this.connection.__removeLimiter__(this.instance);
3279 } else {
3280 return this.connection.disconnect(flush);
3281 }
3282 }
3283 }, {
3284 key: "runScript",
3285 value: function () {
3286 var _runScript = _asyncToGenerator(
3287 /*#__PURE__*/
3288 regeneratorRuntime.mark(function _callee4(name, args) {
3289 var _this3 = this;
3290
3291 return regeneratorRuntime.wrap(function _callee4$(_context4) {
3292 while (1) {
3293 switch (_context4.prev = _context4.next) {
3294 case 0:
3295 if (name === "init" || name === "register_client") {
3296 _context4.next = 3;
3297 break;
3298 }
3299
3300 _context4.next = 3;
3301 return this.ready;
3302
3303 case 3:
3304 return _context4.abrupt("return", new this.Promise(function (resolve, reject) {
3305 var all_args, arr;
3306 all_args = [Date.now(), _this3.clientId].concat(args);
3307
3308 _this3.instance.Events.trigger("debug", "Calling Redis script: ".concat(name, ".lua"), all_args);
3309
3310 arr = _this3.connection.__scriptArgs__(name, _this3.originalId, all_args, function (err, replies) {
3311 if (err != null) {
3312 return reject(err);
3313 }
3314
3315 return resolve(replies);
3316 });
3317 return _this3.connection.__scriptFn__(name).apply(void 0, _toConsumableArray(arr));
3318 })["catch"](function (e) {
3319 if (e.message === "SETTINGS_KEY_NOT_FOUND") {
3320 if (name === "heartbeat") {
3321 return _this3.Promise.resolve();
3322 } else {
3323 return _this3.runScript("init", _this3.prepareInitSettings(false)).then(function () {
3324 return _this3.runScript(name, args);
3325 });
3326 }
3327 } else if (e.message === "UNKNOWN_CLIENT") {
3328 return _this3.runScript("register_client", [_this3.instance.queued()]).then(function () {
3329 return _this3.runScript(name, args);
3330 });
3331 } else {
3332 return _this3.Promise.reject(e);
3333 }
3334 }));
3335
3336 case 4:
3337 case "end":
3338 return _context4.stop();
3339 }
3340 }
3341 }, _callee4, this);
3342 }));
3343
3344 function runScript(_x4, _x5) {
3345 return _runScript.apply(this, arguments);
3346 }
3347
3348 return runScript;
3349 }()
3350 }, {
3351 key: "prepareArray",
3352 value: function prepareArray(arr) {
3353 var i, len, results, x;
3354 results = [];
3355
3356 for (i = 0, len = arr.length; i < len; i++) {
3357 x = arr[i];
3358 results.push(x != null ? x.toString() : "");
3359 }
3360
3361 return results;
3362 }
3363 }, {
3364 key: "prepareObject",
3365 value: function prepareObject(obj) {
3366 var arr, k, v;
3367 arr = [];
3368
3369 for (k in obj) {
3370 v = obj[k];
3371 arr.push(k, v != null ? v.toString() : "");
3372 }
3373
3374 return arr;
3375 }
3376 }, {
3377 key: "prepareInitSettings",
3378 value: function prepareInitSettings(clear) {
3379 var args;
3380 args = this.prepareObject(Object.assign({}, this.storeOptions, {
3381 id: this.originalId,
3382 version: this.instance.version,
3383 groupTimeout: this.timeout,
3384 clientTimeout: this.clientTimeout
3385 }));
3386 args.unshift(clear ? 1 : 0, this.instance.version);
3387 return args;
3388 }
3389 }, {
3390 key: "convertBool",
3391 value: function convertBool(b) {
3392 return !!b;
3393 }
3394 }, {
3395 key: "__updateSettings__",
3396 value: function () {
3397 var _updateSettings__ = _asyncToGenerator(
3398 /*#__PURE__*/
3399 regeneratorRuntime.mark(function _callee5(options) {
3400 return regeneratorRuntime.wrap(function _callee5$(_context5) {
3401 while (1) {
3402 switch (_context5.prev = _context5.next) {
3403 case 0:
3404 _context5.next = 2;
3405 return this.runScript("update_settings", this.prepareObject(options));
3406
3407 case 2:
3408 return _context5.abrupt("return", parser$5.overwrite(options, options, this.storeOptions));
3409
3410 case 3:
3411 case "end":
3412 return _context5.stop();
3413 }
3414 }
3415 }, _callee5, this);
3416 }));
3417
3418 function __updateSettings__(_x6) {
3419 return _updateSettings__.apply(this, arguments);
3420 }
3421
3422 return __updateSettings__;
3423 }()
3424 }, {
3425 key: "__running__",
3426 value: function __running__() {
3427 return this.runScript("running", []);
3428 }
3429 }, {
3430 key: "__queued__",
3431 value: function __queued__() {
3432 return this.runScript("queued", []);
3433 }
3434 }, {
3435 key: "__done__",
3436 value: function __done__() {
3437 return this.runScript("done", []);
3438 }
3439 }, {
3440 key: "__groupCheck__",
3441 value: function () {
3442 var _groupCheck__ = _asyncToGenerator(
3443 /*#__PURE__*/
3444 regeneratorRuntime.mark(function _callee6() {
3445 return regeneratorRuntime.wrap(function _callee6$(_context6) {
3446 while (1) {
3447 switch (_context6.prev = _context6.next) {
3448 case 0:
3449 _context6.t0 = this;
3450 _context6.next = 3;
3451 return this.runScript("group_check", []);
3452
3453 case 3:
3454 _context6.t1 = _context6.sent;
3455 return _context6.abrupt("return", _context6.t0.convertBool.call(_context6.t0, _context6.t1));
3456
3457 case 5:
3458 case "end":
3459 return _context6.stop();
3460 }
3461 }
3462 }, _callee6, this);
3463 }));
3464
3465 function __groupCheck__() {
3466 return _groupCheck__.apply(this, arguments);
3467 }
3468
3469 return __groupCheck__;
3470 }()
3471 }, {
3472 key: "__incrementReservoir__",
3473 value: function __incrementReservoir__(incr) {
3474 return this.runScript("increment_reservoir", [incr]);
3475 }
3476 }, {
3477 key: "__currentReservoir__",
3478 value: function __currentReservoir__() {
3479 return this.runScript("current_reservoir", []);
3480 }
3481 }, {
3482 key: "__check__",
3483 value: function () {
3484 var _check__ = _asyncToGenerator(
3485 /*#__PURE__*/
3486 regeneratorRuntime.mark(function _callee7(weight) {
3487 return regeneratorRuntime.wrap(function _callee7$(_context7) {
3488 while (1) {
3489 switch (_context7.prev = _context7.next) {
3490 case 0:
3491 _context7.t0 = this;
3492 _context7.next = 3;
3493 return this.runScript("check", this.prepareArray([weight]));
3494
3495 case 3:
3496 _context7.t1 = _context7.sent;
3497 return _context7.abrupt("return", _context7.t0.convertBool.call(_context7.t0, _context7.t1));
3498
3499 case 5:
3500 case "end":
3501 return _context7.stop();
3502 }
3503 }
3504 }, _callee7, this);
3505 }));
3506
3507 function __check__(_x7) {
3508 return _check__.apply(this, arguments);
3509 }
3510
3511 return __check__;
3512 }()
3513 }, {
3514 key: "__register__",
3515 value: function () {
3516 var _register__ = _asyncToGenerator(
3517 /*#__PURE__*/
3518 regeneratorRuntime.mark(function _callee8(index, weight, expiration) {
3519 var reservoir, success, wait, _ref4, _ref5;
3520
3521 return regeneratorRuntime.wrap(function _callee8$(_context8) {
3522 while (1) {
3523 switch (_context8.prev = _context8.next) {
3524 case 0:
3525 _context8.next = 2;
3526 return this.runScript("register", this.prepareArray([index, weight, expiration]));
3527
3528 case 2:
3529 _ref4 = _context8.sent;
3530 _ref5 = _slicedToArray(_ref4, 3);
3531 success = _ref5[0];
3532 wait = _ref5[1];
3533 reservoir = _ref5[2];
3534 return _context8.abrupt("return", {
3535 success: this.convertBool(success),
3536 wait: wait,
3537 reservoir: reservoir
3538 });
3539
3540 case 8:
3541 case "end":
3542 return _context8.stop();
3543 }
3544 }
3545 }, _callee8, this);
3546 }));
3547
3548 function __register__(_x8, _x9, _x10) {
3549 return _register__.apply(this, arguments);
3550 }
3551
3552 return __register__;
3553 }()
3554 }, {
3555 key: "__submit__",
3556 value: function () {
3557 var _submit__ = _asyncToGenerator(
3558 /*#__PURE__*/
3559 regeneratorRuntime.mark(function _callee9(queueLength, weight) {
3560 var blocked, e, maxConcurrent, overweight, reachedHWM, strategy, _ref6, _ref7, _e$message$split, _e$message$split2;
3561
3562 return regeneratorRuntime.wrap(function _callee9$(_context9) {
3563 while (1) {
3564 switch (_context9.prev = _context9.next) {
3565 case 0:
3566 _context9.prev = 0;
3567 _context9.next = 3;
3568 return this.runScript("submit", this.prepareArray([queueLength, weight]));
3569
3570 case 3:
3571 _ref6 = _context9.sent;
3572 _ref7 = _slicedToArray(_ref6, 3);
3573 reachedHWM = _ref7[0];
3574 blocked = _ref7[1];
3575 strategy = _ref7[2];
3576 return _context9.abrupt("return", {
3577 reachedHWM: this.convertBool(reachedHWM),
3578 blocked: this.convertBool(blocked),
3579 strategy: strategy
3580 });
3581
3582 case 11:
3583 _context9.prev = 11;
3584 _context9.t0 = _context9["catch"](0);
3585 e = _context9.t0;
3586
3587 if (!(e.message.indexOf("OVERWEIGHT") === 0)) {
3588 _context9.next = 23;
3589 break;
3590 }
3591
3592 _e$message$split = e.message.split(":");
3593 _e$message$split2 = _slicedToArray(_e$message$split, 3);
3594 overweight = _e$message$split2[0];
3595 weight = _e$message$split2[1];
3596 maxConcurrent = _e$message$split2[2];
3597 throw new BottleneckError$3("Impossible to add a job having a weight of ".concat(weight, " to a limiter having a maxConcurrent setting of ").concat(maxConcurrent));
3598
3599 case 23:
3600 throw e;
3601
3602 case 24:
3603 case "end":
3604 return _context9.stop();
3605 }
3606 }
3607 }, _callee9, this, [[0, 11]]);
3608 }));
3609
3610 function __submit__(_x11, _x12) {
3611 return _submit__.apply(this, arguments);
3612 }
3613
3614 return __submit__;
3615 }()
3616 }, {
3617 key: "__free__",
3618 value: function () {
3619 var _free__ = _asyncToGenerator(
3620 /*#__PURE__*/
3621 regeneratorRuntime.mark(function _callee10(index, weight) {
3622 var running;
3623 return regeneratorRuntime.wrap(function _callee10$(_context10) {
3624 while (1) {
3625 switch (_context10.prev = _context10.next) {
3626 case 0:
3627 _context10.next = 2;
3628 return this.runScript("free", this.prepareArray([index]));
3629
3630 case 2:
3631 running = _context10.sent;
3632 return _context10.abrupt("return", {
3633 running: running
3634 });
3635
3636 case 4:
3637 case "end":
3638 return _context10.stop();
3639 }
3640 }
3641 }, _callee10, this);
3642 }));
3643
3644 function __free__(_x13, _x14) {
3645 return _free__.apply(this, arguments);
3646 }
3647
3648 return __free__;
3649 }()
3650 }]);
3651
3652 return RedisDatastore;
3653 }();
3654
3655 var RedisDatastore_1 = RedisDatastore;
3656
3657 var BottleneckError$4, States;
3658 BottleneckError$4 = BottleneckError_1;
3659
3660 States =
3661 /*#__PURE__*/
3662 function () {
3663 function States(status1) {
3664 _classCallCheck(this, States);
3665
3666 this.status = status1;
3667 this._jobs = {};
3668 this.counts = this.status.map(function () {
3669 return 0;
3670 });
3671 }
3672
3673 _createClass(States, [{
3674 key: "next",
3675 value: function next(id) {
3676 var current, next;
3677 current = this._jobs[id];
3678 next = current + 1;
3679
3680 if (current != null && next < this.status.length) {
3681 this.counts[current]--;
3682 this.counts[next]++;
3683 return this._jobs[id]++;
3684 } else if (current != null) {
3685 this.counts[current]--;
3686 return delete this._jobs[id];
3687 }
3688 }
3689 }, {
3690 key: "start",
3691 value: function start(id) {
3692 var initial;
3693 initial = 0;
3694 this._jobs[id] = initial;
3695 return this.counts[initial]++;
3696 }
3697 }, {
3698 key: "remove",
3699 value: function remove(id) {
3700 var current;
3701 current = this._jobs[id];
3702
3703 if (current != null) {
3704 this.counts[current]--;
3705 delete this._jobs[id];
3706 }
3707
3708 return current != null;
3709 }
3710 }, {
3711 key: "jobStatus",
3712 value: function jobStatus(id) {
3713 var ref;
3714 return (ref = this.status[this._jobs[id]]) != null ? ref : null;
3715 }
3716 }, {
3717 key: "statusJobs",
3718 value: function statusJobs(status) {
3719 var k, pos, ref, results, v;
3720
3721 if (status != null) {
3722 pos = this.status.indexOf(status);
3723
3724 if (pos < 0) {
3725 throw new BottleneckError$4("status must be one of ".concat(this.status.join(', ')));
3726 }
3727
3728 ref = this._jobs;
3729 results = [];
3730
3731 for (k in ref) {
3732 v = ref[k];
3733
3734 if (v === pos) {
3735 results.push(k);
3736 }
3737 }
3738
3739 return results;
3740 } else {
3741 return Object.keys(this._jobs);
3742 }
3743 }
3744 }, {
3745 key: "statusCounts",
3746 value: function statusCounts() {
3747 var _this = this;
3748
3749 return this.counts.reduce(function (acc, v, i) {
3750 acc[_this.status[i]] = v;
3751 return acc;
3752 }, {});
3753 }
3754 }]);
3755
3756 return States;
3757 }();
3758
3759 var States_1 = States;
3760
3761 var DLList$2, Sync;
3762 DLList$2 = DLList_1;
3763
3764 Sync =
3765 /*#__PURE__*/
3766 function () {
3767 function Sync(name, Promise) {
3768 _classCallCheck(this, Sync);
3769
3770 this.schedule = this.schedule.bind(this);
3771 this.name = name;
3772 this.Promise = Promise;
3773 this._running = 0;
3774 this._queue = new DLList$2();
3775 }
3776
3777 _createClass(Sync, [{
3778 key: "isEmpty",
3779 value: function isEmpty() {
3780 return this._queue.length === 0;
3781 }
3782 }, {
3783 key: "_tryToRun",
3784 value: function () {
3785 var _tryToRun2 = _asyncToGenerator(
3786 /*#__PURE__*/
3787 regeneratorRuntime.mark(function _callee2() {
3788 var args, cb, error, reject, resolve, returned, task, _this$_queue$shift;
3789
3790 return regeneratorRuntime.wrap(function _callee2$(_context2) {
3791 while (1) {
3792 switch (_context2.prev = _context2.next) {
3793 case 0:
3794 if (!(this._running < 1 && this._queue.length > 0)) {
3795 _context2.next = 13;
3796 break;
3797 }
3798
3799 this._running++;
3800 _this$_queue$shift = this._queue.shift();
3801 task = _this$_queue$shift.task;
3802 args = _this$_queue$shift.args;
3803 resolve = _this$_queue$shift.resolve;
3804 reject = _this$_queue$shift.reject;
3805 _context2.next = 9;
3806 return _asyncToGenerator(
3807 /*#__PURE__*/
3808 regeneratorRuntime.mark(function _callee() {
3809 return regeneratorRuntime.wrap(function _callee$(_context) {
3810 while (1) {
3811 switch (_context.prev = _context.next) {
3812 case 0:
3813 _context.prev = 0;
3814 _context.next = 3;
3815 return task.apply(void 0, _toConsumableArray(args));
3816
3817 case 3:
3818 returned = _context.sent;
3819 return _context.abrupt("return", function () {
3820 return resolve(returned);
3821 });
3822
3823 case 7:
3824 _context.prev = 7;
3825 _context.t0 = _context["catch"](0);
3826 error = _context.t0;
3827 return _context.abrupt("return", function () {
3828 return reject(error);
3829 });
3830
3831 case 11:
3832 case "end":
3833 return _context.stop();
3834 }
3835 }
3836 }, _callee, null, [[0, 7]]);
3837 }))();
3838
3839 case 9:
3840 cb = _context2.sent;
3841 this._running--;
3842
3843 this._tryToRun();
3844
3845 return _context2.abrupt("return", cb());
3846
3847 case 13:
3848 case "end":
3849 return _context2.stop();
3850 }
3851 }
3852 }, _callee2, this);
3853 }));
3854
3855 function _tryToRun() {
3856 return _tryToRun2.apply(this, arguments);
3857 }
3858
3859 return _tryToRun;
3860 }()
3861 }, {
3862 key: "schedule",
3863 value: function schedule(task) {
3864 var promise, reject, resolve;
3865 resolve = reject = null;
3866 promise = new this.Promise(function (_resolve, _reject) {
3867 resolve = _resolve;
3868 return reject = _reject;
3869 });
3870
3871 for (var _len = arguments.length, args = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
3872 args[_key - 1] = arguments[_key];
3873 }
3874
3875 this._queue.push({
3876 task: task,
3877 args: args,
3878 resolve: resolve,
3879 reject: reject
3880 });
3881
3882 this._tryToRun();
3883
3884 return promise;
3885 }
3886 }]);
3887
3888 return Sync;
3889 }();
3890
3891 var Sync_1 = Sync;
3892
3893 var version = "2.19.2";
3894 var version$1 = {
3895 version: version
3896 };
3897
3898 var version$2 = /*#__PURE__*/Object.freeze({
3899 version: version,
3900 default: version$1
3901 });
3902
3903 var Events$4, Group, IORedisConnection$2, RedisConnection$2, Scripts$3, parser$6;
3904 parser$6 = parser;
3905 Events$4 = Events_1;
3906 RedisConnection$2 = RedisConnection_1;
3907 IORedisConnection$2 = IORedisConnection_1;
3908 Scripts$3 = Scripts;
3909
3910 Group = function () {
3911 var Group =
3912 /*#__PURE__*/
3913 function () {
3914 function Group() {
3915 var limiterOptions = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
3916
3917 _classCallCheck(this, Group);
3918
3919 this.deleteKey = this.deleteKey.bind(this);
3920 this.limiterOptions = limiterOptions;
3921 parser$6.load(this.limiterOptions, this.defaults, this);
3922 this.Events = new Events$4(this);
3923 this.instances = {};
3924 this.Bottleneck = Bottleneck_1;
3925
3926 this._startAutoCleanup();
3927
3928 this.sharedConnection = this.connection != null;
3929
3930 if (this.connection == null) {
3931 if (this.limiterOptions.datastore === "redis") {
3932 this.connection = new RedisConnection$2(Object.assign({}, this.limiterOptions, {
3933 Events: this.Events
3934 }));
3935 } else if (this.limiterOptions.datastore === "ioredis") {
3936 this.connection = new IORedisConnection$2(Object.assign({}, this.limiterOptions, {
3937 Events: this.Events
3938 }));
3939 }
3940 }
3941 }
3942
3943 _createClass(Group, [{
3944 key: "key",
3945 value: function key() {
3946 var _this = this;
3947
3948 var _key = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : "";
3949
3950 var ref;
3951 return (ref = this.instances[_key]) != null ? ref : function () {
3952 var limiter;
3953 limiter = _this.instances[_key] = new _this.Bottleneck(Object.assign(_this.limiterOptions, {
3954 id: "".concat(_this.id, "-").concat(_key),
3955 timeout: _this.timeout,
3956 connection: _this.connection
3957 }));
3958
3959 _this.Events.trigger("created", limiter, _key);
3960
3961 return limiter;
3962 }();
3963 }
3964 }, {
3965 key: "deleteKey",
3966 value: function () {
3967 var _deleteKey = _asyncToGenerator(
3968 /*#__PURE__*/
3969 regeneratorRuntime.mark(function _callee() {
3970 var key,
3971 deleted,
3972 instance,
3973 _args = arguments;
3974 return regeneratorRuntime.wrap(function _callee$(_context) {
3975 while (1) {
3976 switch (_context.prev = _context.next) {
3977 case 0:
3978 key = _args.length > 0 && _args[0] !== undefined ? _args[0] : "";
3979 instance = this.instances[key];
3980
3981 if (!this.connection) {
3982 _context.next = 6;
3983 break;
3984 }
3985
3986 _context.next = 5;
3987 return this.connection.__runCommand__(['del'].concat(_toConsumableArray(Scripts$3.allKeys("".concat(this.id, "-").concat(key)))));
3988
3989 case 5:
3990 deleted = _context.sent;
3991
3992 case 6:
3993 if (!(instance != null)) {
3994 _context.next = 10;
3995 break;
3996 }
3997
3998 delete this.instances[key];
3999 _context.next = 10;
4000 return instance.disconnect();
4001
4002 case 10:
4003 return _context.abrupt("return", instance != null || deleted > 0);
4004
4005 case 11:
4006 case "end":
4007 return _context.stop();
4008 }
4009 }
4010 }, _callee, this);
4011 }));
4012
4013 function deleteKey() {
4014 return _deleteKey.apply(this, arguments);
4015 }
4016
4017 return deleteKey;
4018 }()
4019 }, {
4020 key: "limiters",
4021 value: function limiters() {
4022 var k, ref, results, v;
4023 ref = this.instances;
4024 results = [];
4025
4026 for (k in ref) {
4027 v = ref[k];
4028 results.push({
4029 key: k,
4030 limiter: v
4031 });
4032 }
4033
4034 return results;
4035 }
4036 }, {
4037 key: "keys",
4038 value: function keys() {
4039 return Object.keys(this.instances);
4040 }
4041 }, {
4042 key: "clusterKeys",
4043 value: function () {
4044 var _clusterKeys = _asyncToGenerator(
4045 /*#__PURE__*/
4046 regeneratorRuntime.mark(function _callee2() {
4047 var cursor, end, found, i, k, keys, len, next, start, _ref, _ref2;
4048
4049 return regeneratorRuntime.wrap(function _callee2$(_context2) {
4050 while (1) {
4051 switch (_context2.prev = _context2.next) {
4052 case 0:
4053 if (!(this.connection == null)) {
4054 _context2.next = 2;
4055 break;
4056 }
4057
4058 return _context2.abrupt("return", this.Promise.resolve(this.keys()));
4059
4060 case 2:
4061 keys = [];
4062 cursor = null;
4063 start = "b_".concat(this.id, "-").length;
4064 end = "_settings".length;
4065
4066 case 6:
4067 if (!(cursor !== 0)) {
4068 _context2.next = 17;
4069 break;
4070 }
4071
4072 _context2.next = 9;
4073 return this.connection.__runCommand__(["scan", cursor != null ? cursor : 0, "match", "b_".concat(this.id, "-*_settings"), "count", 10000]);
4074
4075 case 9:
4076 _ref = _context2.sent;
4077 _ref2 = _slicedToArray(_ref, 2);
4078 next = _ref2[0];
4079 found = _ref2[1];
4080 cursor = ~~next;
4081
4082 for (i = 0, len = found.length; i < len; i++) {
4083 k = found[i];
4084 keys.push(k.slice(start, -end));
4085 }
4086
4087 _context2.next = 6;
4088 break;
4089
4090 case 17:
4091 return _context2.abrupt("return", keys);
4092
4093 case 18:
4094 case "end":
4095 return _context2.stop();
4096 }
4097 }
4098 }, _callee2, this);
4099 }));
4100
4101 function clusterKeys() {
4102 return _clusterKeys.apply(this, arguments);
4103 }
4104
4105 return clusterKeys;
4106 }()
4107 }, {
4108 key: "_startAutoCleanup",
4109 value: function _startAutoCleanup() {
4110 var _this2 = this;
4111
4112 var base;
4113 clearInterval(this.interval);
4114 return typeof (base = this.interval = setInterval(
4115 /*#__PURE__*/
4116 _asyncToGenerator(
4117 /*#__PURE__*/
4118 regeneratorRuntime.mark(function _callee3() {
4119 var e, k, ref, results, time, v;
4120 return regeneratorRuntime.wrap(function _callee3$(_context3) {
4121 while (1) {
4122 switch (_context3.prev = _context3.next) {
4123 case 0:
4124 time = Date.now();
4125 ref = _this2.instances;
4126 results = [];
4127 _context3.t0 = regeneratorRuntime.keys(ref);
4128
4129 case 4:
4130 if ((_context3.t1 = _context3.t0()).done) {
4131 _context3.next = 23;
4132 break;
4133 }
4134
4135 k = _context3.t1.value;
4136 v = ref[k];
4137 _context3.prev = 7;
4138 _context3.next = 10;
4139 return v._store.__groupCheck__(time);
4140
4141 case 10:
4142 if (!_context3.sent) {
4143 _context3.next = 14;
4144 break;
4145 }
4146
4147 results.push(_this2.deleteKey(k));
4148 _context3.next = 15;
4149 break;
4150
4151 case 14:
4152 results.push(void 0);
4153
4154 case 15:
4155 _context3.next = 21;
4156 break;
4157
4158 case 17:
4159 _context3.prev = 17;
4160 _context3.t2 = _context3["catch"](7);
4161 e = _context3.t2;
4162 results.push(v.Events.trigger("error", e));
4163
4164 case 21:
4165 _context3.next = 4;
4166 break;
4167
4168 case 23:
4169 return _context3.abrupt("return", results);
4170
4171 case 24:
4172 case "end":
4173 return _context3.stop();
4174 }
4175 }
4176 }, _callee3, null, [[7, 17]]);
4177 })), this.timeout / 2)).unref === "function" ? base.unref() : void 0;
4178 }
4179 }, {
4180 key: "updateSettings",
4181 value: function updateSettings() {
4182 var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
4183 parser$6.overwrite(options, this.defaults, this);
4184 parser$6.overwrite(options, options, this.limiterOptions);
4185
4186 if (options.timeout != null) {
4187 return this._startAutoCleanup();
4188 }
4189 }
4190 }, {
4191 key: "disconnect",
4192 value: function disconnect() {
4193 var flush = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true;
4194 var ref;
4195
4196 if (!this.sharedConnection) {
4197 return (ref = this.connection) != null ? ref.disconnect(flush) : void 0;
4198 }
4199 }
4200 }]);
4201
4202 return Group;
4203 }();
4204 Group.prototype.defaults = {
4205 timeout: 1000 * 60 * 5,
4206 connection: null,
4207 Promise: Promise,
4208 id: "group-key"
4209 };
4210 return Group;
4211 }.call(commonjsGlobal);
4212
4213 var Group_1 = Group;
4214
4215 var Batcher, Events$5, parser$7;
4216 parser$7 = parser;
4217 Events$5 = Events_1;
4218
4219 Batcher = function () {
4220 var Batcher =
4221 /*#__PURE__*/
4222 function () {
4223 function Batcher() {
4224 var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
4225
4226 _classCallCheck(this, Batcher);
4227
4228 this.options = options;
4229 parser$7.load(this.options, this.defaults, this);
4230 this.Events = new Events$5(this);
4231 this._arr = [];
4232
4233 this._resetPromise();
4234
4235 this._lastFlush = Date.now();
4236 }
4237
4238 _createClass(Batcher, [{
4239 key: "_resetPromise",
4240 value: function _resetPromise() {
4241 var _this = this;
4242
4243 return this._promise = new this.Promise(function (res, rej) {
4244 return _this._resolve = res;
4245 });
4246 }
4247 }, {
4248 key: "_flush",
4249 value: function _flush() {
4250 clearTimeout(this._timeout);
4251 this._lastFlush = Date.now();
4252
4253 this._resolve();
4254
4255 this.Events.trigger("batch", this._arr);
4256 this._arr = [];
4257 return this._resetPromise();
4258 }
4259 }, {
4260 key: "add",
4261 value: function add(data) {
4262 var _this2 = this;
4263
4264 var ret;
4265
4266 this._arr.push(data);
4267
4268 ret = this._promise;
4269
4270 if (this._arr.length === this.maxSize) {
4271 this._flush();
4272 } else if (this.maxTime != null && this._arr.length === 1) {
4273 this._timeout = setTimeout(function () {
4274 return _this2._flush();
4275 }, this.maxTime);
4276 }
4277
4278 return ret;
4279 }
4280 }]);
4281
4282 return Batcher;
4283 }();
4284 Batcher.prototype.defaults = {
4285 maxTime: null,
4286 maxSize: null,
4287 Promise: Promise
4288 };
4289 return Batcher;
4290 }.call(commonjsGlobal);
4291
4292 var Batcher_1 = Batcher;
4293
4294 var require$$8 = getCjsExportFromNamespace(version$2);
4295
4296 var Bottleneck,
4297 DEFAULT_PRIORITY$1,
4298 Events$6,
4299 Job$1,
4300 LocalDatastore$1,
4301 NUM_PRIORITIES$1,
4302 Queues$1,
4303 RedisDatastore$1,
4304 States$1,
4305 Sync$1,
4306 parser$8,
4307 splice = [].splice;
4308 NUM_PRIORITIES$1 = 10;
4309 DEFAULT_PRIORITY$1 = 5;
4310 parser$8 = parser;
4311 Queues$1 = Queues_1;
4312 Job$1 = Job_1;
4313 LocalDatastore$1 = LocalDatastore_1;
4314 RedisDatastore$1 = RedisDatastore_1;
4315 Events$6 = Events_1;
4316 States$1 = States_1;
4317 Sync$1 = Sync_1;
4318
4319 Bottleneck = function () {
4320 var Bottleneck =
4321 /*#__PURE__*/
4322 function () {
4323 function Bottleneck() {
4324 var _this = this;
4325
4326 var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
4327
4328 _classCallCheck(this, Bottleneck);
4329
4330 var storeInstanceOptions, storeOptions;
4331 this._addToQueue = this._addToQueue.bind(this);
4332
4333 for (var _len = arguments.length, invalid = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
4334 invalid[_key - 1] = arguments[_key];
4335 }
4336
4337 this._validateOptions(options, invalid);
4338
4339 parser$8.load(options, this.instanceDefaults, this);
4340 this._queues = new Queues$1(NUM_PRIORITIES$1);
4341 this._scheduled = {};
4342 this._states = new States$1(["RECEIVED", "QUEUED", "RUNNING", "EXECUTING"].concat(this.trackDoneStatus ? ["DONE"] : []));
4343 this._limiter = null;
4344 this.Events = new Events$6(this);
4345 this._submitLock = new Sync$1("submit", this.Promise);
4346 this._registerLock = new Sync$1("register", this.Promise);
4347 storeOptions = parser$8.load(options, this.storeDefaults, {});
4348
4349 this._store = function () {
4350 if (this.datastore === "redis" || this.datastore === "ioredis" || this.connection != null) {
4351 storeInstanceOptions = parser$8.load(options, this.redisStoreDefaults, {});
4352 return new RedisDatastore$1(this, storeOptions, storeInstanceOptions);
4353 } else if (this.datastore === "local") {
4354 storeInstanceOptions = parser$8.load(options, this.localStoreDefaults, {});
4355 return new LocalDatastore$1(this, storeOptions, storeInstanceOptions);
4356 } else {
4357 throw new Bottleneck.prototype.BottleneckError("Invalid datastore type: ".concat(this.datastore));
4358 }
4359 }.call(this);
4360
4361 this._queues.on("leftzero", function () {
4362 var ref;
4363 return (ref = _this._store.heartbeat) != null ? typeof ref.ref === "function" ? ref.ref() : void 0 : void 0;
4364 });
4365
4366 this._queues.on("zero", function () {
4367 var ref;
4368 return (ref = _this._store.heartbeat) != null ? typeof ref.unref === "function" ? ref.unref() : void 0 : void 0;
4369 });
4370 }
4371
4372 _createClass(Bottleneck, [{
4373 key: "_validateOptions",
4374 value: function _validateOptions(options, invalid) {
4375 if (!(options != null && _typeof(options) === "object" && invalid.length === 0)) {
4376 throw new Bottleneck.prototype.BottleneckError("Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1.");
4377 }
4378 }
4379 }, {
4380 key: "ready",
4381 value: function ready() {
4382 return this._store.ready;
4383 }
4384 }, {
4385 key: "clients",
4386 value: function clients() {
4387 return this._store.clients;
4388 }
4389 }, {
4390 key: "channel",
4391 value: function channel() {
4392 return "b_".concat(this.id);
4393 }
4394 }, {
4395 key: "channel_client",
4396 value: function channel_client() {
4397 return "b_".concat(this.id, "_").concat(this._store.clientId);
4398 }
4399 }, {
4400 key: "publish",
4401 value: function publish(message) {
4402 return this._store.__publish__(message);
4403 }
4404 }, {
4405 key: "disconnect",
4406 value: function disconnect() {
4407 var flush = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true;
4408 return this._store.__disconnect__(flush);
4409 }
4410 }, {
4411 key: "chain",
4412 value: function chain(_limiter) {
4413 this._limiter = _limiter;
4414 return this;
4415 }
4416 }, {
4417 key: "queued",
4418 value: function queued(priority) {
4419 return this._queues.queued(priority);
4420 }
4421 }, {
4422 key: "clusterQueued",
4423 value: function clusterQueued() {
4424 return this._store.__queued__();
4425 }
4426 }, {
4427 key: "empty",
4428 value: function empty() {
4429 return this.queued() === 0 && this._submitLock.isEmpty();
4430 }
4431 }, {
4432 key: "running",
4433 value: function running() {
4434 return this._store.__running__();
4435 }
4436 }, {
4437 key: "done",
4438 value: function done() {
4439 return this._store.__done__();
4440 }
4441 }, {
4442 key: "jobStatus",
4443 value: function jobStatus(id) {
4444 return this._states.jobStatus(id);
4445 }
4446 }, {
4447 key: "jobs",
4448 value: function jobs(status) {
4449 return this._states.statusJobs(status);
4450 }
4451 }, {
4452 key: "counts",
4453 value: function counts() {
4454 return this._states.statusCounts();
4455 }
4456 }, {
4457 key: "_randomIndex",
4458 value: function _randomIndex() {
4459 return Math.random().toString(36).slice(2);
4460 }
4461 }, {
4462 key: "check",
4463 value: function check() {
4464 var weight = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 1;
4465 return this._store.__check__(weight);
4466 }
4467 }, {
4468 key: "_clearGlobalState",
4469 value: function _clearGlobalState(index) {
4470 if (this._scheduled[index] != null) {
4471 clearTimeout(this._scheduled[index].expiration);
4472 delete this._scheduled[index];
4473 return true;
4474 } else {
4475 return false;
4476 }
4477 }
4478 }, {
4479 key: "_free",
4480 value: function () {
4481 var _free2 = _asyncToGenerator(
4482 /*#__PURE__*/
4483 regeneratorRuntime.mark(function _callee(index, job, options, eventInfo) {
4484 var e, running, _ref;
4485
4486 return regeneratorRuntime.wrap(function _callee$(_context) {
4487 while (1) {
4488 switch (_context.prev = _context.next) {
4489 case 0:
4490 _context.prev = 0;
4491 _context.next = 3;
4492 return this._store.__free__(index, options.weight);
4493
4494 case 3:
4495 _ref = _context.sent;
4496 running = _ref.running;
4497 this.Events.trigger("debug", "Freed ".concat(options.id), eventInfo);
4498
4499 if (!(running === 0 && this.empty())) {
4500 _context.next = 8;
4501 break;
4502 }
4503
4504 return _context.abrupt("return", this.Events.trigger("idle"));
4505
4506 case 8:
4507 _context.next = 14;
4508 break;
4509
4510 case 10:
4511 _context.prev = 10;
4512 _context.t0 = _context["catch"](0);
4513 e = _context.t0;
4514 return _context.abrupt("return", this.Events.trigger("error", e));
4515
4516 case 14:
4517 case "end":
4518 return _context.stop();
4519 }
4520 }
4521 }, _callee, this, [[0, 10]]);
4522 }));
4523
4524 function _free(_x, _x2, _x3, _x4) {
4525 return _free2.apply(this, arguments);
4526 }
4527
4528 return _free;
4529 }()
4530 }, {
4531 key: "_run",
4532 value: function _run(index, job, wait) {
4533 var _this2 = this;
4534
4535 var clearGlobalState, free, run;
4536 job.doRun();
4537 clearGlobalState = this._clearGlobalState.bind(this, index);
4538 run = this._run.bind(this, index, job);
4539 free = this._free.bind(this, index, job);
4540 return this._scheduled[index] = {
4541 timeout: setTimeout(function () {
4542 return job.doExecute(_this2._limiter, clearGlobalState, run, free);
4543 }, wait),
4544 expiration: job.options.expiration != null ? setTimeout(function () {
4545 return job.doExpire(clearGlobalState, run, free);
4546 }, wait + job.options.expiration) : void 0,
4547 job: job
4548 };
4549 }
4550 }, {
4551 key: "_drainOne",
4552 value: function _drainOne(capacity) {
4553 var _this3 = this;
4554
4555 return this._registerLock.schedule(function () {
4556 var args, index, next, options, queue;
4557
4558 if (_this3.queued() === 0) {
4559 return _this3.Promise.resolve(null);
4560 }
4561
4562 queue = _this3._queues.getFirst();
4563
4564 var _next = next = queue.first();
4565
4566 options = _next.options;
4567 args = _next.args;
4568
4569 if (capacity != null && options.weight > capacity) {
4570 return _this3.Promise.resolve(null);
4571 }
4572
4573 _this3.Events.trigger("debug", "Draining ".concat(options.id), {
4574 args: args,
4575 options: options
4576 });
4577
4578 index = _this3._randomIndex();
4579 return _this3._store.__register__(index, options.weight, options.expiration).then(function (_ref2) {
4580 var success = _ref2.success,
4581 wait = _ref2.wait,
4582 reservoir = _ref2.reservoir;
4583 var empty;
4584
4585 _this3.Events.trigger("debug", "Drained ".concat(options.id), {
4586 success: success,
4587 args: args,
4588 options: options
4589 });
4590
4591 if (success) {
4592 queue.shift();
4593 empty = _this3.empty();
4594
4595 if (empty) {
4596 _this3.Events.trigger("empty");
4597 }
4598
4599 if (reservoir === 0) {
4600 _this3.Events.trigger("depleted", empty);
4601 }
4602
4603 _this3._run(index, next, wait);
4604
4605 return _this3.Promise.resolve(options.weight);
4606 } else {
4607 return _this3.Promise.resolve(null);
4608 }
4609 });
4610 });
4611 }
4612 }, {
4613 key: "_drainAll",
4614 value: function _drainAll(capacity) {
4615 var _this4 = this;
4616
4617 var total = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
4618 return this._drainOne(capacity).then(function (drained) {
4619 var newCapacity;
4620
4621 if (drained != null) {
4622 newCapacity = capacity != null ? capacity - drained : capacity;
4623 return _this4._drainAll(newCapacity, total + drained);
4624 } else {
4625 return _this4.Promise.resolve(total);
4626 }
4627 })["catch"](function (e) {
4628 return _this4.Events.trigger("error", e);
4629 });
4630 }
4631 }, {
4632 key: "_dropAllQueued",
4633 value: function _dropAllQueued(message) {
4634 return this._queues.shiftAll(function (job) {
4635 return job.doDrop({
4636 message: message
4637 });
4638 });
4639 }
4640 }, {
4641 key: "stop",
4642 value: function stop() {
4643 var _this5 = this;
4644
4645 var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
4646 var done, waitForExecuting;
4647 options = parser$8.load(options, this.stopDefaults);
4648
4649 waitForExecuting = function waitForExecuting(at) {
4650 var finished;
4651
4652 finished = function finished() {
4653 var counts;
4654 counts = _this5._states.counts;
4655 return counts[0] + counts[1] + counts[2] + counts[3] === at;
4656 };
4657
4658 return new _this5.Promise(function (resolve, reject) {
4659 if (finished()) {
4660 return resolve();
4661 } else {
4662 return _this5.on("done", function () {
4663 if (finished()) {
4664 _this5.removeAllListeners("done");
4665
4666 return resolve();
4667 }
4668 });
4669 }
4670 });
4671 };
4672
4673 done = options.dropWaitingJobs ? (this._run = function (index, next) {
4674 return next.doDrop({
4675 message: options.dropErrorMessage
4676 });
4677 }, this._drainOne = function () {
4678 return _this5.Promise.resolve(null);
4679 }, this._registerLock.schedule(function () {
4680 return _this5._submitLock.schedule(function () {
4681 var k, ref, v;
4682 ref = _this5._scheduled;
4683
4684 for (k in ref) {
4685 v = ref[k];
4686
4687 if (_this5.jobStatus(v.job.options.id) === "RUNNING") {
4688 clearTimeout(v.timeout);
4689 clearTimeout(v.expiration);
4690 v.job.doDrop({
4691 message: options.dropErrorMessage
4692 });
4693 }
4694 }
4695
4696 _this5._dropAllQueued(options.dropErrorMessage);
4697
4698 return waitForExecuting(0);
4699 });
4700 })) : this.schedule({
4701 priority: NUM_PRIORITIES$1 - 1,
4702 weight: 0
4703 }, function () {
4704 return waitForExecuting(1);
4705 });
4706
4707 this._receive = function (job) {
4708 return job._reject(new Bottleneck.prototype.BottleneckError(options.enqueueErrorMessage));
4709 };
4710
4711 this.stop = function () {
4712 return _this5.Promise.reject(new Bottleneck.prototype.BottleneckError("stop() has already been called"));
4713 };
4714
4715 return done;
4716 }
4717 }, {
4718 key: "_addToQueue",
4719 value: function () {
4720 var _addToQueue2 = _asyncToGenerator(
4721 /*#__PURE__*/
4722 regeneratorRuntime.mark(function _callee2(job) {
4723 var args, blocked, error, options, reachedHWM, shifted, strategy, _ref3;
4724
4725 return regeneratorRuntime.wrap(function _callee2$(_context2) {
4726 while (1) {
4727 switch (_context2.prev = _context2.next) {
4728 case 0:
4729 args = job.args;
4730 options = job.options;
4731 _context2.prev = 2;
4732 _context2.next = 5;
4733 return this._store.__submit__(this.queued(), options.weight);
4734
4735 case 5:
4736 _ref3 = _context2.sent;
4737 reachedHWM = _ref3.reachedHWM;
4738 blocked = _ref3.blocked;
4739 strategy = _ref3.strategy;
4740 _context2.next = 17;
4741 break;
4742
4743 case 11:
4744 _context2.prev = 11;
4745 _context2.t0 = _context2["catch"](2);
4746 error = _context2.t0;
4747 this.Events.trigger("debug", "Could not queue ".concat(options.id), {
4748 args: args,
4749 options: options,
4750 error: error
4751 });
4752 job.doDrop({
4753 error: error
4754 });
4755 return _context2.abrupt("return", false);
4756
4757 case 17:
4758 if (!blocked) {
4759 _context2.next = 22;
4760 break;
4761 }
4762
4763 job.doDrop();
4764 return _context2.abrupt("return", true);
4765
4766 case 22:
4767 if (!reachedHWM) {
4768 _context2.next = 28;
4769 break;
4770 }
4771
4772 shifted = strategy === Bottleneck.prototype.strategy.LEAK ? this._queues.shiftLastFrom(options.priority) : strategy === Bottleneck.prototype.strategy.OVERFLOW_PRIORITY ? this._queues.shiftLastFrom(options.priority + 1) : strategy === Bottleneck.prototype.strategy.OVERFLOW ? job : void 0;
4773
4774 if (shifted != null) {
4775 shifted.doDrop();
4776 }
4777
4778 if (!(shifted == null || strategy === Bottleneck.prototype.strategy.OVERFLOW)) {
4779 _context2.next = 28;
4780 break;
4781 }
4782
4783 if (shifted == null) {
4784 job.doDrop();
4785 }
4786
4787 return _context2.abrupt("return", reachedHWM);
4788
4789 case 28:
4790 job.doQueue(reachedHWM, blocked);
4791
4792 this._queues.push(job);
4793
4794 _context2.next = 32;
4795 return this._drainAll();
4796
4797 case 32:
4798 return _context2.abrupt("return", reachedHWM);
4799
4800 case 33:
4801 case "end":
4802 return _context2.stop();
4803 }
4804 }
4805 }, _callee2, this, [[2, 11]]);
4806 }));
4807
4808 function _addToQueue(_x5) {
4809 return _addToQueue2.apply(this, arguments);
4810 }
4811
4812 return _addToQueue;
4813 }()
4814 }, {
4815 key: "_receive",
4816 value: function _receive(job) {
4817 if (this._states.jobStatus(job.options.id) != null) {
4818 job._reject(new Bottleneck.prototype.BottleneckError("A job with the same id already exists (id=".concat(job.options.id, ")")));
4819
4820 return false;
4821 } else {
4822 job.doReceive();
4823 return this._submitLock.schedule(this._addToQueue, job);
4824 }
4825 }
4826 }, {
4827 key: "submit",
4828 value: function submit() {
4829 var _this6 = this;
4830
4831 for (var _len2 = arguments.length, args = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
4832 args[_key2] = arguments[_key2];
4833 }
4834
4835 var cb, fn, job, options, ref, ref1, task;
4836
4837 if (typeof args[0] === "function") {
4838 var _ref4, _ref5, _splice$call, _splice$call2;
4839
4840 ref = args, (_ref4 = ref, _ref5 = _toArray(_ref4), fn = _ref5[0], args = _ref5.slice(1), _ref4), (_splice$call = splice.call(args, -1), _splice$call2 = _slicedToArray(_splice$call, 1), cb = _splice$call2[0], _splice$call);
4841 options = parser$8.load({}, this.jobDefaults);
4842 } else {
4843 var _ref6, _ref7, _splice$call3, _splice$call4;
4844
4845 ref1 = args, (_ref6 = ref1, _ref7 = _toArray(_ref6), options = _ref7[0], fn = _ref7[1], args = _ref7.slice(2), _ref6), (_splice$call3 = splice.call(args, -1), _splice$call4 = _slicedToArray(_splice$call3, 1), cb = _splice$call4[0], _splice$call3);
4846 options = parser$8.load(options, this.jobDefaults);
4847 }
4848
4849 task = function task() {
4850 for (var _len3 = arguments.length, args = new Array(_len3), _key3 = 0; _key3 < _len3; _key3++) {
4851 args[_key3] = arguments[_key3];
4852 }
4853
4854 return new _this6.Promise(function (resolve, reject) {
4855 return fn.apply(void 0, args.concat([function () {
4856 for (var _len4 = arguments.length, args = new Array(_len4), _key4 = 0; _key4 < _len4; _key4++) {
4857 args[_key4] = arguments[_key4];
4858 }
4859
4860 return (args[0] != null ? reject : resolve)(args);
4861 }]));
4862 });
4863 };
4864
4865 job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise);
4866 job.promise.then(function (args) {
4867 return typeof cb === "function" ? cb.apply(void 0, _toConsumableArray(args)) : void 0;
4868 })["catch"](function (args) {
4869 if (Array.isArray(args)) {
4870 return typeof cb === "function" ? cb.apply(void 0, _toConsumableArray(args)) : void 0;
4871 } else {
4872 return typeof cb === "function" ? cb(args) : void 0;
4873 }
4874 });
4875 return this._receive(job);
4876 }
4877 }, {
4878 key: "schedule",
4879 value: function schedule() {
4880 for (var _len5 = arguments.length, args = new Array(_len5), _key5 = 0; _key5 < _len5; _key5++) {
4881 args[_key5] = arguments[_key5];
4882 }
4883
4884 var job, options, task;
4885
4886 if (typeof args[0] === "function") {
4887 var _args3 = args;
4888
4889 var _args4 = _toArray(_args3);
4890
4891 task = _args4[0];
4892 args = _args4.slice(1);
4893 options = {};
4894 } else {
4895 var _args5 = args;
4896
4897 var _args6 = _toArray(_args5);
4898
4899 options = _args6[0];
4900 task = _args6[1];
4901 args = _args6.slice(2);
4902 }
4903
4904 job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise);
4905
4906 this._receive(job);
4907
4908 return job.promise;
4909 }
4910 }, {
4911 key: "wrap",
4912 value: function wrap(fn) {
4913 var schedule, wrapped;
4914 schedule = this.schedule.bind(this);
4915
4916 wrapped = function wrapped() {
4917 for (var _len6 = arguments.length, args = new Array(_len6), _key6 = 0; _key6 < _len6; _key6++) {
4918 args[_key6] = arguments[_key6];
4919 }
4920
4921 return schedule.apply(void 0, [fn.bind(this)].concat(args));
4922 };
4923
4924 wrapped.withOptions = function (options) {
4925 for (var _len7 = arguments.length, args = new Array(_len7 > 1 ? _len7 - 1 : 0), _key7 = 1; _key7 < _len7; _key7++) {
4926 args[_key7 - 1] = arguments[_key7];
4927 }
4928
4929 return schedule.apply(void 0, [options, fn].concat(args));
4930 };
4931
4932 return wrapped;
4933 }
4934 }, {
4935 key: "updateSettings",
4936 value: function () {
4937 var _updateSettings = _asyncToGenerator(
4938 /*#__PURE__*/
4939 regeneratorRuntime.mark(function _callee3() {
4940 var options,
4941 _args7 = arguments;
4942 return regeneratorRuntime.wrap(function _callee3$(_context3) {
4943 while (1) {
4944 switch (_context3.prev = _context3.next) {
4945 case 0:
4946 options = _args7.length > 0 && _args7[0] !== undefined ? _args7[0] : {};
4947 _context3.next = 3;
4948 return this._store.__updateSettings__(parser$8.overwrite(options, this.storeDefaults));
4949
4950 case 3:
4951 parser$8.overwrite(options, this.instanceDefaults, this);
4952 return _context3.abrupt("return", this);
4953
4954 case 5:
4955 case "end":
4956 return _context3.stop();
4957 }
4958 }
4959 }, _callee3, this);
4960 }));
4961
4962 function updateSettings() {
4963 return _updateSettings.apply(this, arguments);
4964 }
4965
4966 return updateSettings;
4967 }()
4968 }, {
4969 key: "currentReservoir",
4970 value: function currentReservoir() {
4971 return this._store.__currentReservoir__();
4972 }
4973 }, {
4974 key: "incrementReservoir",
4975 value: function incrementReservoir() {
4976 var incr = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0;
4977 return this._store.__incrementReservoir__(incr);
4978 }
4979 }]);
4980
4981 return Bottleneck;
4982 }();
4983 Bottleneck["default"] = Bottleneck;
4984 Bottleneck.Events = Events$6;
4985 Bottleneck.version = Bottleneck.prototype.version = require$$8.version;
4986 Bottleneck.strategy = Bottleneck.prototype.strategy = {
4987 LEAK: 1,
4988 OVERFLOW: 2,
4989 OVERFLOW_PRIORITY: 4,
4990 BLOCK: 3
4991 };
4992 Bottleneck.BottleneckError = Bottleneck.prototype.BottleneckError = BottleneckError_1;
4993 Bottleneck.Group = Bottleneck.prototype.Group = Group_1;
4994 Bottleneck.RedisConnection = Bottleneck.prototype.RedisConnection = RedisConnection_1;
4995 Bottleneck.IORedisConnection = Bottleneck.prototype.IORedisConnection = IORedisConnection_1;
4996 Bottleneck.Batcher = Bottleneck.prototype.Batcher = Batcher_1;
4997 Bottleneck.prototype.jobDefaults = {
4998 priority: DEFAULT_PRIORITY$1,
4999 weight: 1,
5000 expiration: null,
5001 id: "<no-id>"
5002 };
5003 Bottleneck.prototype.storeDefaults = {
5004 maxConcurrent: null,
5005 minTime: 0,
5006 highWater: null,
5007 strategy: Bottleneck.prototype.strategy.LEAK,
5008 penalty: null,
5009 reservoir: null,
5010 reservoirRefreshInterval: null,
5011 reservoirRefreshAmount: null,
5012 reservoirIncreaseInterval: null,
5013 reservoirIncreaseAmount: null,
5014 reservoirIncreaseMaximum: null
5015 };
5016 Bottleneck.prototype.localStoreDefaults = {
5017 Promise: Promise,
5018 timeout: null,
5019 heartbeatInterval: 250
5020 };
5021 Bottleneck.prototype.redisStoreDefaults = {
5022 Promise: Promise,
5023 timeout: null,
5024 heartbeatInterval: 5000,
5025 clientTimeout: 10000,
5026 clientOptions: {},
5027 clusterNodes: null,
5028 clearDatastore: false,
5029 connection: null
5030 };
5031 Bottleneck.prototype.instanceDefaults = {
5032 datastore: "local",
5033 connection: null,
5034 id: "<no-id>",
5035 rejectOnDrop: true,
5036 trackDoneStatus: false,
5037 Promise: Promise
5038 };
5039 Bottleneck.prototype.stopDefaults = {
5040 enqueueErrorMessage: "This limiter has been stopped and cannot accept new jobs.",
5041 dropWaitingJobs: true,
5042 dropErrorMessage: "This limiter has been stopped."
5043 };
5044 return Bottleneck;
5045 }.call(commonjsGlobal);
5046
5047 var Bottleneck_1 = Bottleneck;
5048
5049 var es5 = Bottleneck_1;
5050
5051 return es5;
5052
5053})));