1 | this.workbox = this.workbox || {};
|
2 | this.workbox.backgroundSync = (function (exports, WorkboxError_js, logger_js, assert_js, getFriendlyURL_js) {
|
3 | ;
|
4 |
|
5 | function _extends() {
|
6 | _extends = Object.assign ? Object.assign.bind() : function (target) {
|
7 | for (var i = 1; i < arguments.length; i++) {
|
8 | var source = arguments[i];
|
9 | for (var key in source) {
|
10 | if (Object.prototype.hasOwnProperty.call(source, key)) {
|
11 | target[key] = source[key];
|
12 | }
|
13 | }
|
14 | }
|
15 | return target;
|
16 | };
|
17 | return _extends.apply(this, arguments);
|
18 | }
|
19 |
|
20 | const instanceOfAny = (object, constructors) => constructors.some(c => object instanceof c);
|
21 | let idbProxyableTypes;
|
22 | let cursorAdvanceMethods;
|
23 | // This is a function to prevent it throwing up in node environments.
|
24 | function getIdbProxyableTypes() {
|
25 | return idbProxyableTypes || (idbProxyableTypes = [IDBDatabase, IDBObjectStore, IDBIndex, IDBCursor, IDBTransaction]);
|
26 | }
|
27 | // This is a function to prevent it throwing up in node environments.
|
28 | function getCursorAdvanceMethods() {
|
29 | return cursorAdvanceMethods || (cursorAdvanceMethods = [IDBCursor.prototype.advance, IDBCursor.prototype.continue, IDBCursor.prototype.continuePrimaryKey]);
|
30 | }
|
31 | const cursorRequestMap = new WeakMap();
|
32 | const transactionDoneMap = new WeakMap();
|
33 | const transactionStoreNamesMap = new WeakMap();
|
34 | const transformCache = new WeakMap();
|
35 | const reverseTransformCache = new WeakMap();
|
36 | function promisifyRequest(request) {
|
37 | const promise = new Promise((resolve, reject) => {
|
38 | const unlisten = () => {
|
39 | request.removeEventListener('success', success);
|
40 | request.removeEventListener('error', error);
|
41 | };
|
42 | const success = () => {
|
43 | resolve(wrap(request.result));
|
44 | unlisten();
|
45 | };
|
46 | const error = () => {
|
47 | reject(request.error);
|
48 | unlisten();
|
49 | };
|
50 | request.addEventListener('success', success);
|
51 | request.addEventListener('error', error);
|
52 | });
|
53 | promise.then(value => {
|
54 | // Since cursoring reuses the IDBRequest (*sigh*), we cache it for later retrieval
|
55 | // (see wrapFunction).
|
56 | if (value instanceof IDBCursor) {
|
57 | cursorRequestMap.set(value, request);
|
58 | }
|
59 | // Catching to avoid "Uncaught Promise exceptions"
|
60 | }).catch(() => {});
|
61 | // This mapping exists in reverseTransformCache but doesn't doesn't exist in transformCache. This
|
62 | // is because we create many promises from a single IDBRequest.
|
63 | reverseTransformCache.set(promise, request);
|
64 | return promise;
|
65 | }
|
66 | function cacheDonePromiseForTransaction(tx) {
|
67 | // Early bail if we've already created a done promise for this transaction.
|
68 | if (transactionDoneMap.has(tx)) return;
|
69 | const done = new Promise((resolve, reject) => {
|
70 | const unlisten = () => {
|
71 | tx.removeEventListener('complete', complete);
|
72 | tx.removeEventListener('error', error);
|
73 | tx.removeEventListener('abort', error);
|
74 | };
|
75 | const complete = () => {
|
76 | resolve();
|
77 | unlisten();
|
78 | };
|
79 | const error = () => {
|
80 | reject(tx.error || new DOMException('AbortError', 'AbortError'));
|
81 | unlisten();
|
82 | };
|
83 | tx.addEventListener('complete', complete);
|
84 | tx.addEventListener('error', error);
|
85 | tx.addEventListener('abort', error);
|
86 | });
|
87 | // Cache it for later retrieval.
|
88 | transactionDoneMap.set(tx, done);
|
89 | }
|
90 | let idbProxyTraps = {
|
91 | get(target, prop, receiver) {
|
92 | if (target instanceof IDBTransaction) {
|
93 | // Special handling for transaction.done.
|
94 | if (prop === 'done') return transactionDoneMap.get(target);
|
95 | // Polyfill for objectStoreNames because of Edge.
|
96 | if (prop === 'objectStoreNames') {
|
97 | return target.objectStoreNames || transactionStoreNamesMap.get(target);
|
98 | }
|
99 | // Make tx.store return the only store in the transaction, or undefined if there are many.
|
100 | if (prop === 'store') {
|
101 | return receiver.objectStoreNames[1] ? undefined : receiver.objectStore(receiver.objectStoreNames[0]);
|
102 | }
|
103 | }
|
104 | // Else transform whatever we get back.
|
105 | return wrap(target[prop]);
|
106 | },
|
107 | set(target, prop, value) {
|
108 | target[prop] = value;
|
109 | return true;
|
110 | },
|
111 | has(target, prop) {
|
112 | if (target instanceof IDBTransaction && (prop === 'done' || prop === 'store')) {
|
113 | return true;
|
114 | }
|
115 | return prop in target;
|
116 | }
|
117 | };
|
118 | function replaceTraps(callback) {
|
119 | idbProxyTraps = callback(idbProxyTraps);
|
120 | }
|
121 | function wrapFunction(func) {
|
122 | // Due to expected object equality (which is enforced by the caching in `wrap`), we
|
123 | // only create one new func per func.
|
124 | // Edge doesn't support objectStoreNames (booo), so we polyfill it here.
|
125 | if (func === IDBDatabase.prototype.transaction && !('objectStoreNames' in IDBTransaction.prototype)) {
|
126 | return function (storeNames, ...args) {
|
127 | const tx = func.call(unwrap(this), storeNames, ...args);
|
128 | transactionStoreNamesMap.set(tx, storeNames.sort ? storeNames.sort() : [storeNames]);
|
129 | return wrap(tx);
|
130 | };
|
131 | }
|
132 | // Cursor methods are special, as the behaviour is a little more different to standard IDB. In
|
133 | // IDB, you advance the cursor and wait for a new 'success' on the IDBRequest that gave you the
|
134 | // cursor. It's kinda like a promise that can resolve with many values. That doesn't make sense
|
135 | // with real promises, so each advance methods returns a new promise for the cursor object, or
|
136 | // undefined if the end of the cursor has been reached.
|
137 | if (getCursorAdvanceMethods().includes(func)) {
|
138 | return function (...args) {
|
139 | // Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use
|
140 | // the original object.
|
141 | func.apply(unwrap(this), args);
|
142 | return wrap(cursorRequestMap.get(this));
|
143 | };
|
144 | }
|
145 | return function (...args) {
|
146 | // Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use
|
147 | // the original object.
|
148 | return wrap(func.apply(unwrap(this), args));
|
149 | };
|
150 | }
|
151 | function transformCachableValue(value) {
|
152 | if (typeof value === 'function') return wrapFunction(value);
|
153 | // This doesn't return, it just creates a 'done' promise for the transaction,
|
154 | // which is later returned for transaction.done (see idbObjectHandler).
|
155 | if (value instanceof IDBTransaction) cacheDonePromiseForTransaction(value);
|
156 | if (instanceOfAny(value, getIdbProxyableTypes())) return new Proxy(value, idbProxyTraps);
|
157 | // Return the same value back if we're not going to transform it.
|
158 | return value;
|
159 | }
|
160 | function wrap(value) {
|
161 | // We sometimes generate multiple promises from a single IDBRequest (eg when cursoring), because
|
162 | // IDB is weird and a single IDBRequest can yield many responses, so these can't be cached.
|
163 | if (value instanceof IDBRequest) return promisifyRequest(value);
|
164 | // If we've already transformed this value before, reuse the transformed value.
|
165 | // This is faster, but it also provides object equality.
|
166 | if (transformCache.has(value)) return transformCache.get(value);
|
167 | const newValue = transformCachableValue(value);
|
168 | // Not all types are transformed.
|
169 | // These may be primitive types, so they can't be WeakMap keys.
|
170 | if (newValue !== value) {
|
171 | transformCache.set(value, newValue);
|
172 | reverseTransformCache.set(newValue, value);
|
173 | }
|
174 | return newValue;
|
175 | }
|
176 | const unwrap = value => reverseTransformCache.get(value);
|
177 |
|
178 | /**
|
179 | * Open a database.
|
180 | *
|
181 | * @param name Name of the database.
|
182 | * @param version Schema version.
|
183 | * @param callbacks Additional callbacks.
|
184 | */
|
185 | function openDB(name, version, {
|
186 | blocked,
|
187 | upgrade,
|
188 | blocking,
|
189 | terminated
|
190 | } = {}) {
|
191 | const request = indexedDB.open(name, version);
|
192 | const openPromise = wrap(request);
|
193 | if (upgrade) {
|
194 | request.addEventListener('upgradeneeded', event => {
|
195 | upgrade(wrap(request.result), event.oldVersion, event.newVersion, wrap(request.transaction));
|
196 | });
|
197 | }
|
198 | if (blocked) request.addEventListener('blocked', () => blocked());
|
199 | openPromise.then(db => {
|
200 | if (terminated) db.addEventListener('close', () => terminated());
|
201 | if (blocking) db.addEventListener('versionchange', () => blocking());
|
202 | }).catch(() => {});
|
203 | return openPromise;
|
204 | }
|
205 | const readMethods = ['get', 'getKey', 'getAll', 'getAllKeys', 'count'];
|
206 | const writeMethods = ['put', 'add', 'delete', 'clear'];
|
207 | const cachedMethods = new Map();
|
208 | function getMethod(target, prop) {
|
209 | if (!(target instanceof IDBDatabase && !(prop in target) && typeof prop === 'string')) {
|
210 | return;
|
211 | }
|
212 | if (cachedMethods.get(prop)) return cachedMethods.get(prop);
|
213 | const targetFuncName = prop.replace(/FromIndex$/, '');
|
214 | const useIndex = prop !== targetFuncName;
|
215 | const isWrite = writeMethods.includes(targetFuncName);
|
216 | if (
|
217 | // Bail if the target doesn't exist on the target. Eg, getAll isn't in Edge.
|
218 | !(targetFuncName in (useIndex ? IDBIndex : IDBObjectStore).prototype) || !(isWrite || readMethods.includes(targetFuncName))) {
|
219 | return;
|
220 | }
|
221 | const method = async function (storeName, ...args) {
|
222 | // isWrite ? 'readwrite' : undefined gzipps better, but fails in Edge :(
|
223 | const tx = this.transaction(storeName, isWrite ? 'readwrite' : 'readonly');
|
224 | let target = tx.store;
|
225 | if (useIndex) target = target.index(args.shift());
|
226 | // Must reject if op rejects.
|
227 | // If it's a write operation, must reject if tx.done rejects.
|
228 | // Must reject with op rejection first.
|
229 | // Must resolve with op value.
|
230 | // Must handle both promises (no unhandled rejections)
|
231 | return (await Promise.all([target[targetFuncName](...args), isWrite && tx.done]))[0];
|
232 | };
|
233 | cachedMethods.set(prop, method);
|
234 | return method;
|
235 | }
|
236 | replaceTraps(oldTraps => _extends({}, oldTraps, {
|
237 | get: (target, prop, receiver) => getMethod(target, prop) || oldTraps.get(target, prop, receiver),
|
238 | has: (target, prop) => !!getMethod(target, prop) || oldTraps.has(target, prop)
|
239 | }));
|
240 |
|
241 | // @ts-ignore
|
242 | try {
|
243 | self['workbox:background-sync:7.0.0'] && _();
|
244 | } catch (e) {}
|
245 |
|
246 | /*
|
247 | Copyright 2021 Google LLC
|
248 |
|
249 | Use of this source code is governed by an MIT-style
|
250 | license that can be found in the LICENSE file or at
|
251 | https://opensource.org/licenses/MIT.
|
252 | */
|
253 | const DB_VERSION = 3;
|
254 | const DB_NAME = 'workbox-background-sync';
|
255 | const REQUEST_OBJECT_STORE_NAME = 'requests';
|
256 | const QUEUE_NAME_INDEX = 'queueName';
|
257 | /**
|
258 | * A class to interact directly an IndexedDB created specifically to save and
|
259 | * retrieve QueueStoreEntries. This class encapsulates all the schema details
|
260 | * to store the representation of a Queue.
|
261 | *
|
262 | * @private
|
263 | */
|
264 | class QueueDb {
|
265 | constructor() {
|
266 | this._db = null;
|
267 | }
|
268 | /**
|
269 | * Add QueueStoreEntry to underlying db.
|
270 | *
|
271 | * @param {UnidentifiedQueueStoreEntry} entry
|
272 | */
|
273 | async addEntry(entry) {
|
274 | const db = await this.getDb();
|
275 | const tx = db.transaction(REQUEST_OBJECT_STORE_NAME, 'readwrite', {
|
276 | durability: 'relaxed'
|
277 | });
|
278 | await tx.store.add(entry);
|
279 | await tx.done;
|
280 | }
|
281 | /**
|
282 | * Returns the first entry id in the ObjectStore.
|
283 | *
|
284 | * @return {number | undefined}
|
285 | */
|
286 | async getFirstEntryId() {
|
287 | const db = await this.getDb();
|
288 | const cursor = await db.transaction(REQUEST_OBJECT_STORE_NAME).store.openCursor();
|
289 | return cursor === null || cursor === void 0 ? void 0 : cursor.value.id;
|
290 | }
|
291 | /**
|
292 | * Get all the entries filtered by index
|
293 | *
|
294 | * @param queueName
|
295 | * @return {Promise<QueueStoreEntry[]>}
|
296 | */
|
297 | async getAllEntriesByQueueName(queueName) {
|
298 | const db = await this.getDb();
|
299 | const results = await db.getAllFromIndex(REQUEST_OBJECT_STORE_NAME, QUEUE_NAME_INDEX, IDBKeyRange.only(queueName));
|
300 | return results ? results : new Array();
|
301 | }
|
302 | /**
|
303 | * Returns the number of entries filtered by index
|
304 | *
|
305 | * @param queueName
|
306 | * @return {Promise<number>}
|
307 | */
|
308 | async getEntryCountByQueueName(queueName) {
|
309 | const db = await this.getDb();
|
310 | return db.countFromIndex(REQUEST_OBJECT_STORE_NAME, QUEUE_NAME_INDEX, IDBKeyRange.only(queueName));
|
311 | }
|
312 | /**
|
313 | * Deletes a single entry by id.
|
314 | *
|
315 | * @param {number} id the id of the entry to be deleted
|
316 | */
|
317 | async deleteEntry(id) {
|
318 | const db = await this.getDb();
|
319 | await db.delete(REQUEST_OBJECT_STORE_NAME, id);
|
320 | }
|
321 | /**
|
322 | *
|
323 | * @param queueName
|
324 | * @returns {Promise<QueueStoreEntry | undefined>}
|
325 | */
|
326 | async getFirstEntryByQueueName(queueName) {
|
327 | return await this.getEndEntryFromIndex(IDBKeyRange.only(queueName), 'next');
|
328 | }
|
329 | /**
|
330 | *
|
331 | * @param queueName
|
332 | * @returns {Promise<QueueStoreEntry | undefined>}
|
333 | */
|
334 | async getLastEntryByQueueName(queueName) {
|
335 | return await this.getEndEntryFromIndex(IDBKeyRange.only(queueName), 'prev');
|
336 | }
|
337 | /**
|
338 | * Returns either the first or the last entries, depending on direction.
|
339 | * Filtered by index.
|
340 | *
|
341 | * @param {IDBCursorDirection} direction
|
342 | * @param {IDBKeyRange} query
|
343 | * @return {Promise<QueueStoreEntry | undefined>}
|
344 | * @private
|
345 | */
|
346 | async getEndEntryFromIndex(query, direction) {
|
347 | const db = await this.getDb();
|
348 | const cursor = await db.transaction(REQUEST_OBJECT_STORE_NAME).store.index(QUEUE_NAME_INDEX).openCursor(query, direction);
|
349 | return cursor === null || cursor === void 0 ? void 0 : cursor.value;
|
350 | }
|
351 | /**
|
352 | * Returns an open connection to the database.
|
353 | *
|
354 | * @private
|
355 | */
|
356 | async getDb() {
|
357 | if (!this._db) {
|
358 | this._db = await openDB(DB_NAME, DB_VERSION, {
|
359 | upgrade: this._upgradeDb
|
360 | });
|
361 | }
|
362 | return this._db;
|
363 | }
|
364 | /**
|
365 | * Upgrades QueueDB
|
366 | *
|
367 | * @param {IDBPDatabase<QueueDBSchema>} db
|
368 | * @param {number} oldVersion
|
369 | * @private
|
370 | */
|
371 | _upgradeDb(db, oldVersion) {
|
372 | if (oldVersion > 0 && oldVersion < DB_VERSION) {
|
373 | if (db.objectStoreNames.contains(REQUEST_OBJECT_STORE_NAME)) {
|
374 | db.deleteObjectStore(REQUEST_OBJECT_STORE_NAME);
|
375 | }
|
376 | }
|
377 | const objStore = db.createObjectStore(REQUEST_OBJECT_STORE_NAME, {
|
378 | autoIncrement: true,
|
379 | keyPath: 'id'
|
380 | });
|
381 | objStore.createIndex(QUEUE_NAME_INDEX, QUEUE_NAME_INDEX, {
|
382 | unique: false
|
383 | });
|
384 | }
|
385 | }
|
386 |
|
387 | /*
|
388 | Copyright 2018 Google LLC
|
389 |
|
390 | Use of this source code is governed by an MIT-style
|
391 | license that can be found in the LICENSE file or at
|
392 | https://opensource.org/licenses/MIT.
|
393 | */
|
394 | /**
|
395 | * A class to manage storing requests from a Queue in IndexedDB,
|
396 | * indexed by their queue name for easier access.
|
397 | *
|
398 | * Most developers will not need to access this class directly;
|
399 | * it is exposed for advanced use cases.
|
400 | */
|
401 | class QueueStore {
|
402 | /**
|
403 | * Associates this instance with a Queue instance, so entries added can be
|
404 | * identified by their queue name.
|
405 | *
|
406 | * @param {string} queueName
|
407 | */
|
408 | constructor(queueName) {
|
409 | this._queueName = queueName;
|
410 | this._queueDb = new QueueDb();
|
411 | }
|
412 | /**
|
413 | * Append an entry last in the queue.
|
414 | *
|
415 | * @param {Object} entry
|
416 | * @param {Object} entry.requestData
|
417 | * @param {number} [entry.timestamp]
|
418 | * @param {Object} [entry.metadata]
|
419 | */
|
420 | async pushEntry(entry) {
|
421 | {
|
422 | assert_js.assert.isType(entry, 'object', {
|
423 | moduleName: 'workbox-background-sync',
|
424 | className: 'QueueStore',
|
425 | funcName: 'pushEntry',
|
426 | paramName: 'entry'
|
427 | });
|
428 | assert_js.assert.isType(entry.requestData, 'object', {
|
429 | moduleName: 'workbox-background-sync',
|
430 | className: 'QueueStore',
|
431 | funcName: 'pushEntry',
|
432 | paramName: 'entry.requestData'
|
433 | });
|
434 | }
|
435 | // Don't specify an ID since one is automatically generated.
|
436 | delete entry.id;
|
437 | entry.queueName = this._queueName;
|
438 | await this._queueDb.addEntry(entry);
|
439 | }
|
440 | /**
|
441 | * Prepend an entry first in the queue.
|
442 | *
|
443 | * @param {Object} entry
|
444 | * @param {Object} entry.requestData
|
445 | * @param {number} [entry.timestamp]
|
446 | * @param {Object} [entry.metadata]
|
447 | */
|
448 | async unshiftEntry(entry) {
|
449 | {
|
450 | assert_js.assert.isType(entry, 'object', {
|
451 | moduleName: 'workbox-background-sync',
|
452 | className: 'QueueStore',
|
453 | funcName: 'unshiftEntry',
|
454 | paramName: 'entry'
|
455 | });
|
456 | assert_js.assert.isType(entry.requestData, 'object', {
|
457 | moduleName: 'workbox-background-sync',
|
458 | className: 'QueueStore',
|
459 | funcName: 'unshiftEntry',
|
460 | paramName: 'entry.requestData'
|
461 | });
|
462 | }
|
463 | const firstId = await this._queueDb.getFirstEntryId();
|
464 | if (firstId) {
|
465 | // Pick an ID one less than the lowest ID in the object store.
|
466 | entry.id = firstId - 1;
|
467 | } else {
|
468 | // Otherwise let the auto-incrementor assign the ID.
|
469 | delete entry.id;
|
470 | }
|
471 | entry.queueName = this._queueName;
|
472 | await this._queueDb.addEntry(entry);
|
473 | }
|
474 | /**
|
475 | * Removes and returns the last entry in the queue matching the `queueName`.
|
476 | *
|
477 | * @return {Promise<QueueStoreEntry|undefined>}
|
478 | */
|
479 | async popEntry() {
|
480 | return this._removeEntry(await this._queueDb.getLastEntryByQueueName(this._queueName));
|
481 | }
|
482 | /**
|
483 | * Removes and returns the first entry in the queue matching the `queueName`.
|
484 | *
|
485 | * @return {Promise<QueueStoreEntry|undefined>}
|
486 | */
|
487 | async shiftEntry() {
|
488 | return this._removeEntry(await this._queueDb.getFirstEntryByQueueName(this._queueName));
|
489 | }
|
490 | /**
|
491 | * Returns all entries in the store matching the `queueName`.
|
492 | *
|
493 | * @param {Object} options See {@link workbox-background-sync.Queue~getAll}
|
494 | * @return {Promise<Array<Object>>}
|
495 | */
|
496 | async getAll() {
|
497 | return await this._queueDb.getAllEntriesByQueueName(this._queueName);
|
498 | }
|
499 | /**
|
500 | * Returns the number of entries in the store matching the `queueName`.
|
501 | *
|
502 | * @param {Object} options See {@link workbox-background-sync.Queue~size}
|
503 | * @return {Promise<number>}
|
504 | */
|
505 | async size() {
|
506 | return await this._queueDb.getEntryCountByQueueName(this._queueName);
|
507 | }
|
508 | /**
|
509 | * Deletes the entry for the given ID.
|
510 | *
|
511 | * WARNING: this method does not ensure the deleted entry belongs to this
|
512 | * queue (i.e. matches the `queueName`). But this limitation is acceptable
|
513 | * as this class is not publicly exposed. An additional check would make
|
514 | * this method slower than it needs to be.
|
515 | *
|
516 | * @param {number} id
|
517 | */
|
518 | async deleteEntry(id) {
|
519 | await this._queueDb.deleteEntry(id);
|
520 | }
|
521 | /**
|
522 | * Removes and returns the first or last entry in the queue (based on the
|
523 | * `direction` argument) matching the `queueName`.
|
524 | *
|
525 | * @return {Promise<QueueStoreEntry|undefined>}
|
526 | * @private
|
527 | */
|
528 | async _removeEntry(entry) {
|
529 | if (entry) {
|
530 | await this.deleteEntry(entry.id);
|
531 | }
|
532 | return entry;
|
533 | }
|
534 | }
|
535 |
|
536 | /*
|
537 | Copyright 2018 Google LLC
|
538 |
|
539 | Use of this source code is governed by an MIT-style
|
540 | license that can be found in the LICENSE file or at
|
541 | https://opensource.org/licenses/MIT.
|
542 | */
|
543 | const serializableProperties = ['method', 'referrer', 'referrerPolicy', 'mode', 'credentials', 'cache', 'redirect', 'integrity', 'keepalive'];
|
544 | /**
|
545 | * A class to make it easier to serialize and de-serialize requests so they
|
546 | * can be stored in IndexedDB.
|
547 | *
|
548 | * Most developers will not need to access this class directly;
|
549 | * it is exposed for advanced use cases.
|
550 | */
|
551 | class StorableRequest {
|
552 | /**
|
553 | * Converts a Request object to a plain object that can be structured
|
554 | * cloned or JSON-stringified.
|
555 | *
|
556 | * @param {Request} request
|
557 | * @return {Promise<StorableRequest>}
|
558 | */
|
559 | static async fromRequest(request) {
|
560 | const requestData = {
|
561 | url: request.url,
|
562 | headers: {}
|
563 | };
|
564 | // Set the body if present.
|
565 | if (request.method !== 'GET') {
|
566 | // Use ArrayBuffer to support non-text request bodies.
|
567 | // NOTE: we can't use Blobs becuse Safari doesn't support storing
|
568 | // Blobs in IndexedDB in some cases:
|
569 | // https://github.com/dfahlander/Dexie.js/issues/618#issuecomment-398348457
|
570 | requestData.body = await request.clone().arrayBuffer();
|
571 | }
|
572 | // Convert the headers from an iterable to an object.
|
573 | for (const [key, value] of request.headers.entries()) {
|
574 | requestData.headers[key] = value;
|
575 | }
|
576 | // Add all other serializable request properties
|
577 | for (const prop of serializableProperties) {
|
578 | if (request[prop] !== undefined) {
|
579 | requestData[prop] = request[prop];
|
580 | }
|
581 | }
|
582 | return new StorableRequest(requestData);
|
583 | }
|
584 | /**
|
585 | * Accepts an object of request data that can be used to construct a
|
586 | * `Request` but can also be stored in IndexedDB.
|
587 | *
|
588 | * @param {Object} requestData An object of request data that includes the
|
589 | * `url` plus any relevant properties of
|
590 | * [requestInit]{@link https://fetch.spec.whatwg.org/#requestinit}.
|
591 | */
|
592 | constructor(requestData) {
|
593 | {
|
594 | assert_js.assert.isType(requestData, 'object', {
|
595 | moduleName: 'workbox-background-sync',
|
596 | className: 'StorableRequest',
|
597 | funcName: 'constructor',
|
598 | paramName: 'requestData'
|
599 | });
|
600 | assert_js.assert.isType(requestData.url, 'string', {
|
601 | moduleName: 'workbox-background-sync',
|
602 | className: 'StorableRequest',
|
603 | funcName: 'constructor',
|
604 | paramName: 'requestData.url'
|
605 | });
|
606 | }
|
607 | // If the request's mode is `navigate`, convert it to `same-origin` since
|
608 | // navigation requests can't be constructed via script.
|
609 | if (requestData['mode'] === 'navigate') {
|
610 | requestData['mode'] = 'same-origin';
|
611 | }
|
612 | this._requestData = requestData;
|
613 | }
|
614 | /**
|
615 | * Returns a deep clone of the instances `_requestData` object.
|
616 | *
|
617 | * @return {Object}
|
618 | */
|
619 | toObject() {
|
620 | const requestData = Object.assign({}, this._requestData);
|
621 | requestData.headers = Object.assign({}, this._requestData.headers);
|
622 | if (requestData.body) {
|
623 | requestData.body = requestData.body.slice(0);
|
624 | }
|
625 | return requestData;
|
626 | }
|
627 | /**
|
628 | * Converts this instance to a Request.
|
629 | *
|
630 | * @return {Request}
|
631 | */
|
632 | toRequest() {
|
633 | return new Request(this._requestData.url, this._requestData);
|
634 | }
|
635 | /**
|
636 | * Creates and returns a deep clone of the instance.
|
637 | *
|
638 | * @return {StorableRequest}
|
639 | */
|
640 | clone() {
|
641 | return new StorableRequest(this.toObject());
|
642 | }
|
643 | }
|
644 |
|
645 | /*
|
646 | Copyright 2018 Google LLC
|
647 |
|
648 | Use of this source code is governed by an MIT-style
|
649 | license that can be found in the LICENSE file or at
|
650 | https://opensource.org/licenses/MIT.
|
651 | */
|
652 | const TAG_PREFIX = 'workbox-background-sync';
|
653 | const MAX_RETENTION_TIME = 60 * 24 * 7; // 7 days in minutes
|
654 | const queueNames = new Set();
|
655 | /**
|
656 | * Converts a QueueStore entry into the format exposed by Queue. This entails
|
657 | * converting the request data into a real request and omitting the `id` and
|
658 | * `queueName` properties.
|
659 | *
|
660 | * @param {UnidentifiedQueueStoreEntry} queueStoreEntry
|
661 | * @return {Queue}
|
662 | * @private
|
663 | */
|
664 | const convertEntry = queueStoreEntry => {
|
665 | const queueEntry = {
|
666 | request: new StorableRequest(queueStoreEntry.requestData).toRequest(),
|
667 | timestamp: queueStoreEntry.timestamp
|
668 | };
|
669 | if (queueStoreEntry.metadata) {
|
670 | queueEntry.metadata = queueStoreEntry.metadata;
|
671 | }
|
672 | return queueEntry;
|
673 | };
|
674 | /**
|
675 | * A class to manage storing failed requests in IndexedDB and retrying them
|
676 | * later. All parts of the storing and replaying process are observable via
|
677 | * callbacks.
|
678 | *
|
679 | * @memberof workbox-background-sync
|
680 | */
|
681 | class Queue {
|
682 | /**
|
683 | * Creates an instance of Queue with the given options
|
684 | *
|
685 | * @param {string} name The unique name for this queue. This name must be
|
686 | * unique as it's used to register sync events and store requests
|
687 | * in IndexedDB specific to this instance. An error will be thrown if
|
688 | * a duplicate name is detected.
|
689 | * @param {Object} [options]
|
690 | * @param {Function} [options.onSync] A function that gets invoked whenever
|
691 | * the 'sync' event fires. The function is invoked with an object
|
692 | * containing the `queue` property (referencing this instance), and you
|
693 | * can use the callback to customize the replay behavior of the queue.
|
694 | * When not set the `replayRequests()` method is called.
|
695 | * Note: if the replay fails after a sync event, make sure you throw an
|
696 | * error, so the browser knows to retry the sync event later.
|
697 | * @param {number} [options.maxRetentionTime=7 days] The amount of time (in
|
698 | * minutes) a request may be retried. After this amount of time has
|
699 | * passed, the request will be deleted from the queue.
|
700 | * @param {boolean} [options.forceSyncFallback=false] If `true`, instead
|
701 | * of attempting to use background sync events, always attempt to replay
|
702 | * queued request at service worker startup. Most folks will not need
|
703 | * this, unless you explicitly target a runtime like Electron that
|
704 | * exposes the interfaces for background sync, but does not have a working
|
705 | * implementation.
|
706 | */
|
707 | constructor(name, {
|
708 | forceSyncFallback,
|
709 | onSync,
|
710 | maxRetentionTime
|
711 | } = {}) {
|
712 | this._syncInProgress = false;
|
713 | this._requestsAddedDuringSync = false;
|
714 | // Ensure the store name is not already being used
|
715 | if (queueNames.has(name)) {
|
716 | throw new WorkboxError_js.WorkboxError('duplicate-queue-name', {
|
717 | name
|
718 | });
|
719 | } else {
|
720 | queueNames.add(name);
|
721 | }
|
722 | this._name = name;
|
723 | this._onSync = onSync || this.replayRequests;
|
724 | this._maxRetentionTime = maxRetentionTime || MAX_RETENTION_TIME;
|
725 | this._forceSyncFallback = Boolean(forceSyncFallback);
|
726 | this._queueStore = new QueueStore(this._name);
|
727 | this._addSyncListener();
|
728 | }
|
729 | /**
|
730 | * @return {string}
|
731 | */
|
732 | get name() {
|
733 | return this._name;
|
734 | }
|
735 | /**
|
736 | * Stores the passed request in IndexedDB (with its timestamp and any
|
737 | * metadata) at the end of the queue.
|
738 | *
|
739 | * @param {QueueEntry} entry
|
740 | * @param {Request} entry.request The request to store in the queue.
|
741 | * @param {Object} [entry.metadata] Any metadata you want associated with the
|
742 | * stored request. When requests are replayed you'll have access to this
|
743 | * metadata object in case you need to modify the request beforehand.
|
744 | * @param {number} [entry.timestamp] The timestamp (Epoch time in
|
745 | * milliseconds) when the request was first added to the queue. This is
|
746 | * used along with `maxRetentionTime` to remove outdated requests. In
|
747 | * general you don't need to set this value, as it's automatically set
|
748 | * for you (defaulting to `Date.now()`), but you can update it if you
|
749 | * don't want particular requests to expire.
|
750 | */
|
751 | async pushRequest(entry) {
|
752 | {
|
753 | assert_js.assert.isType(entry, 'object', {
|
754 | moduleName: 'workbox-background-sync',
|
755 | className: 'Queue',
|
756 | funcName: 'pushRequest',
|
757 | paramName: 'entry'
|
758 | });
|
759 | assert_js.assert.isInstance(entry.request, Request, {
|
760 | moduleName: 'workbox-background-sync',
|
761 | className: 'Queue',
|
762 | funcName: 'pushRequest',
|
763 | paramName: 'entry.request'
|
764 | });
|
765 | }
|
766 | await this._addRequest(entry, 'push');
|
767 | }
|
768 | /**
|
769 | * Stores the passed request in IndexedDB (with its timestamp and any
|
770 | * metadata) at the beginning of the queue.
|
771 | *
|
772 | * @param {QueueEntry} entry
|
773 | * @param {Request} entry.request The request to store in the queue.
|
774 | * @param {Object} [entry.metadata] Any metadata you want associated with the
|
775 | * stored request. When requests are replayed you'll have access to this
|
776 | * metadata object in case you need to modify the request beforehand.
|
777 | * @param {number} [entry.timestamp] The timestamp (Epoch time in
|
778 | * milliseconds) when the request was first added to the queue. This is
|
779 | * used along with `maxRetentionTime` to remove outdated requests. In
|
780 | * general you don't need to set this value, as it's automatically set
|
781 | * for you (defaulting to `Date.now()`), but you can update it if you
|
782 | * don't want particular requests to expire.
|
783 | */
|
784 | async unshiftRequest(entry) {
|
785 | {
|
786 | assert_js.assert.isType(entry, 'object', {
|
787 | moduleName: 'workbox-background-sync',
|
788 | className: 'Queue',
|
789 | funcName: 'unshiftRequest',
|
790 | paramName: 'entry'
|
791 | });
|
792 | assert_js.assert.isInstance(entry.request, Request, {
|
793 | moduleName: 'workbox-background-sync',
|
794 | className: 'Queue',
|
795 | funcName: 'unshiftRequest',
|
796 | paramName: 'entry.request'
|
797 | });
|
798 | }
|
799 | await this._addRequest(entry, 'unshift');
|
800 | }
|
801 | /**
|
802 | * Removes and returns the last request in the queue (along with its
|
803 | * timestamp and any metadata). The returned object takes the form:
|
804 | * `{request, timestamp, metadata}`.
|
805 | *
|
806 | * @return {Promise<QueueEntry | undefined>}
|
807 | */
|
808 | async popRequest() {
|
809 | return this._removeRequest('pop');
|
810 | }
|
811 | /**
|
812 | * Removes and returns the first request in the queue (along with its
|
813 | * timestamp and any metadata). The returned object takes the form:
|
814 | * `{request, timestamp, metadata}`.
|
815 | *
|
816 | * @return {Promise<QueueEntry | undefined>}
|
817 | */
|
818 | async shiftRequest() {
|
819 | return this._removeRequest('shift');
|
820 | }
|
821 | /**
|
822 | * Returns all the entries that have not expired (per `maxRetentionTime`).
|
823 | * Any expired entries are removed from the queue.
|
824 | *
|
825 | * @return {Promise<Array<QueueEntry>>}
|
826 | */
|
827 | async getAll() {
|
828 | const allEntries = await this._queueStore.getAll();
|
829 | const now = Date.now();
|
830 | const unexpiredEntries = [];
|
831 | for (const entry of allEntries) {
|
832 | // Ignore requests older than maxRetentionTime. Call this function
|
833 | // recursively until an unexpired request is found.
|
834 | const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000;
|
835 | if (now - entry.timestamp > maxRetentionTimeInMs) {
|
836 | await this._queueStore.deleteEntry(entry.id);
|
837 | } else {
|
838 | unexpiredEntries.push(convertEntry(entry));
|
839 | }
|
840 | }
|
841 | return unexpiredEntries;
|
842 | }
|
843 | /**
|
844 | * Returns the number of entries present in the queue.
|
845 | * Note that expired entries (per `maxRetentionTime`) are also included in this count.
|
846 | *
|
847 | * @return {Promise<number>}
|
848 | */
|
849 | async size() {
|
850 | return await this._queueStore.size();
|
851 | }
|
852 | /**
|
853 | * Adds the entry to the QueueStore and registers for a sync event.
|
854 | *
|
855 | * @param {Object} entry
|
856 | * @param {Request} entry.request
|
857 | * @param {Object} [entry.metadata]
|
858 | * @param {number} [entry.timestamp=Date.now()]
|
859 | * @param {string} operation ('push' or 'unshift')
|
860 | * @private
|
861 | */
|
862 | async _addRequest({
|
863 | request,
|
864 | metadata,
|
865 | timestamp = Date.now()
|
866 | }, operation) {
|
867 | const storableRequest = await StorableRequest.fromRequest(request.clone());
|
868 | const entry = {
|
869 | requestData: storableRequest.toObject(),
|
870 | timestamp
|
871 | };
|
872 | // Only include metadata if it's present.
|
873 | if (metadata) {
|
874 | entry.metadata = metadata;
|
875 | }
|
876 | switch (operation) {
|
877 | case 'push':
|
878 | await this._queueStore.pushEntry(entry);
|
879 | break;
|
880 | case 'unshift':
|
881 | await this._queueStore.unshiftEntry(entry);
|
882 | break;
|
883 | }
|
884 | {
|
885 | logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(request.url)}' has ` + `been added to background sync queue '${this._name}'.`);
|
886 | }
|
887 | // Don't register for a sync if we're in the middle of a sync. Instead,
|
888 | // we wait until the sync is complete and call register if
|
889 | // `this._requestsAddedDuringSync` is true.
|
890 | if (this._syncInProgress) {
|
891 | this._requestsAddedDuringSync = true;
|
892 | } else {
|
893 | await this.registerSync();
|
894 | }
|
895 | }
|
896 | /**
|
897 | * Removes and returns the first or last (depending on `operation`) entry
|
898 | * from the QueueStore that's not older than the `maxRetentionTime`.
|
899 | *
|
900 | * @param {string} operation ('pop' or 'shift')
|
901 | * @return {Object|undefined}
|
902 | * @private
|
903 | */
|
904 | async _removeRequest(operation) {
|
905 | const now = Date.now();
|
906 | let entry;
|
907 | switch (operation) {
|
908 | case 'pop':
|
909 | entry = await this._queueStore.popEntry();
|
910 | break;
|
911 | case 'shift':
|
912 | entry = await this._queueStore.shiftEntry();
|
913 | break;
|
914 | }
|
915 | if (entry) {
|
916 | // Ignore requests older than maxRetentionTime. Call this function
|
917 | // recursively until an unexpired request is found.
|
918 | const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000;
|
919 | if (now - entry.timestamp > maxRetentionTimeInMs) {
|
920 | return this._removeRequest(operation);
|
921 | }
|
922 | return convertEntry(entry);
|
923 | } else {
|
924 | return undefined;
|
925 | }
|
926 | }
|
927 | /**
|
928 | * Loops through each request in the queue and attempts to re-fetch it.
|
929 | * If any request fails to re-fetch, it's put back in the same position in
|
930 | * the queue (which registers a retry for the next sync event).
|
931 | */
|
932 | async replayRequests() {
|
933 | let entry;
|
934 | while (entry = await this.shiftRequest()) {
|
935 | try {
|
936 | await fetch(entry.request.clone());
|
937 | if ("dev" !== 'production') {
|
938 | logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(entry.request.url)}' ` + `has been replayed in queue '${this._name}'`);
|
939 | }
|
940 | } catch (error) {
|
941 | await this.unshiftRequest(entry);
|
942 | {
|
943 | logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(entry.request.url)}' ` + `failed to replay, putting it back in queue '${this._name}'`);
|
944 | }
|
945 | throw new WorkboxError_js.WorkboxError('queue-replay-failed', {
|
946 | name: this._name
|
947 | });
|
948 | }
|
949 | }
|
950 | {
|
951 | logger_js.logger.log(`All requests in queue '${this.name}' have successfully ` + `replayed; the queue is now empty!`);
|
952 | }
|
953 | }
|
954 | /**
|
955 | * Registers a sync event with a tag unique to this instance.
|
956 | */
|
957 | async registerSync() {
|
958 | // See https://github.com/GoogleChrome/workbox/issues/2393
|
959 | if ('sync' in self.registration && !this._forceSyncFallback) {
|
960 | try {
|
961 | await self.registration.sync.register(`${TAG_PREFIX}:${this._name}`);
|
962 | } catch (err) {
|
963 | // This means the registration failed for some reason, possibly due to
|
964 | // the user disabling it.
|
965 | {
|
966 | logger_js.logger.warn(`Unable to register sync event for '${this._name}'.`, err);
|
967 | }
|
968 | }
|
969 | }
|
970 | }
|
971 | /**
|
972 | * In sync-supporting browsers, this adds a listener for the sync event.
|
973 | * In non-sync-supporting browsers, or if _forceSyncFallback is true, this
|
974 | * will retry the queue on service worker startup.
|
975 | *
|
976 | * @private
|
977 | */
|
978 | _addSyncListener() {
|
979 | // See https://github.com/GoogleChrome/workbox/issues/2393
|
980 | if ('sync' in self.registration && !this._forceSyncFallback) {
|
981 | self.addEventListener('sync', event => {
|
982 | if (event.tag === `${TAG_PREFIX}:${this._name}`) {
|
983 | {
|
984 | logger_js.logger.log(`Background sync for tag '${event.tag}' ` + `has been received`);
|
985 | }
|
986 | const syncComplete = async () => {
|
987 | this._syncInProgress = true;
|
988 | let syncError;
|
989 | try {
|
990 | await this._onSync({
|
991 | queue: this
|
992 | });
|
993 | } catch (error) {
|
994 | if (error instanceof Error) {
|
995 | syncError = error;
|
996 | // Rethrow the error. Note: the logic in the finally clause
|
997 | // will run before this gets rethrown.
|
998 | throw syncError;
|
999 | }
|
1000 | } finally {
|
1001 | // New items may have been added to the queue during the sync,
|
1002 | // so we need to register for a new sync if that's happened...
|
1003 | // Unless there was an error during the sync, in which
|
1004 | // case the browser will automatically retry later, as long
|
1005 | // as `event.lastChance` is not true.
|
1006 | if (this._requestsAddedDuringSync && !(syncError && !event.lastChance)) {
|
1007 | await this.registerSync();
|
1008 | }
|
1009 | this._syncInProgress = false;
|
1010 | this._requestsAddedDuringSync = false;
|
1011 | }
|
1012 | };
|
1013 | event.waitUntil(syncComplete());
|
1014 | }
|
1015 | });
|
1016 | } else {
|
1017 | {
|
1018 | logger_js.logger.log(`Background sync replaying without background sync event`);
|
1019 | }
|
1020 | // If the browser doesn't support background sync, or the developer has
|
1021 | // opted-in to not using it, retry every time the service worker starts up
|
1022 | // as a fallback.
|
1023 | void this._onSync({
|
1024 | queue: this
|
1025 | });
|
1026 | }
|
1027 | }
|
1028 | /**
|
1029 | * Returns the set of queue names. This is primarily used to reset the list
|
1030 | * of queue names in tests.
|
1031 | *
|
1032 | * @return {Set<string>}
|
1033 | *
|
1034 | * @private
|
1035 | */
|
1036 | static get _queueNames() {
|
1037 | return queueNames;
|
1038 | }
|
1039 | }
|
1040 |
|
1041 | /*
|
1042 | Copyright 2018 Google LLC
|
1043 |
|
1044 | Use of this source code is governed by an MIT-style
|
1045 | license that can be found in the LICENSE file or at
|
1046 | https://opensource.org/licenses/MIT.
|
1047 | */
|
1048 | /**
|
1049 | * A class implementing the `fetchDidFail` lifecycle callback. This makes it
|
1050 | * easier to add failed requests to a background sync Queue.
|
1051 | *
|
1052 | * @memberof workbox-background-sync
|
1053 | */
|
1054 | class BackgroundSyncPlugin {
|
1055 | /**
|
1056 | * @param {string} name See the {@link workbox-background-sync.Queue}
|
1057 | * documentation for parameter details.
|
1058 | * @param {Object} [options] See the
|
1059 | * {@link workbox-background-sync.Queue} documentation for
|
1060 | * parameter details.
|
1061 | */
|
1062 | constructor(name, options) {
|
1063 | /**
|
1064 | * @param {Object} options
|
1065 | * @param {Request} options.request
|
1066 | * @private
|
1067 | */
|
1068 | this.fetchDidFail = async ({
|
1069 | request
|
1070 | }) => {
|
1071 | await this._queue.pushRequest({
|
1072 | request
|
1073 | });
|
1074 | };
|
1075 | this._queue = new Queue(name, options);
|
1076 | }
|
1077 | }
|
1078 |
|
1079 | exports.BackgroundSyncPlugin = BackgroundSyncPlugin;
|
1080 | exports.Queue = Queue;
|
1081 | exports.QueueStore = QueueStore;
|
1082 | exports.StorableRequest = StorableRequest;
|
1083 |
|
1084 | return exports;
|
1085 |
|
1086 | })({}, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private);
|
1087 | //# sourceMappingURL=workbox-background-sync.dev.js.map
|