UNPKG

29.1 kBJavaScriptView Raw
1"use strict";
2// Copyright 2022 Google LLC
3//
4// Licensed under the Apache License, Version 2.0 (the "License");
5// you may not use this file except in compliance with the License.
6// You may obtain a copy of the License at
7//
8// http://www.apache.org/licenses/LICENSE-2.0
9//
10// Unless required by applicable law or agreed to in writing, software
11// distributed under the License is distributed on an "AS IS" BASIS,
12// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13// See the License for the specific language governing permissions and
14// limitations under the License.
15Object.defineProperty(exports, "__esModule", { value: true });
16exports.createURI = exports.upload = exports.Upload = exports.PROTOCOL_REGEX = void 0;
17const abort_controller_1 = require("abort-controller");
18const crypto_1 = require("crypto");
19const extend = require("extend");
20const gaxios = require("gaxios");
21const google_auth_library_1 = require("google-auth-library");
22const stream_1 = require("stream");
23const retry = require("async-retry");
24const uuid = require("uuid");
25const NOT_FOUND_STATUS_CODE = 404;
26const RESUMABLE_INCOMPLETE_STATUS_CODE = 308;
27const DEFAULT_API_ENDPOINT_REGEX = /.*\.googleapis\.com/;
28const packageJson = require('../../package.json');
29exports.PROTOCOL_REGEX = /^(\w*):\/\//;
30class Upload extends stream_1.Writable {
31 constructor(cfg) {
32 super();
33 this.numBytesWritten = 0;
34 this.numRetries = 0;
35 this.currentInvocationId = {
36 chunk: uuid.v4(),
37 uri: uuid.v4(),
38 offset: uuid.v4(),
39 };
40 this.upstreamChunkBuffer = Buffer.alloc(0);
41 this.chunkBufferEncoding = undefined;
42 this.numChunksReadInRequest = 0;
43 /**
44 * A chunk used for caching the most recent upload chunk.
45 * We should not assume that the server received all bytes sent in the request.
46 * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload
47 */
48 this.lastChunkSent = Buffer.alloc(0);
49 this.upstreamEnded = false;
50 cfg = cfg || {};
51 if (!cfg.bucket || !cfg.file) {
52 throw new Error('A bucket and file name are required');
53 }
54 cfg.authConfig = cfg.authConfig || {};
55 cfg.authConfig.scopes = [
56 'https://www.googleapis.com/auth/devstorage.full_control',
57 ];
58 this.authClient = cfg.authClient || new google_auth_library_1.GoogleAuth(cfg.authConfig);
59 this.apiEndpoint = 'https://storage.googleapis.com';
60 if (cfg.apiEndpoint) {
61 this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint);
62 if (!DEFAULT_API_ENDPOINT_REGEX.test(cfg.apiEndpoint)) {
63 this.authClient = gaxios;
64 }
65 }
66 this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`;
67 this.bucket = cfg.bucket;
68 const cacheKeyElements = [cfg.bucket, cfg.file];
69 if (typeof cfg.generation === 'number') {
70 cacheKeyElements.push(`${cfg.generation}`);
71 }
72 this.cacheKey = cacheKeyElements.join('/');
73 this.customRequestOptions = cfg.customRequestOptions || {};
74 this.file = cfg.file;
75 this.generation = cfg.generation;
76 this.kmsKeyName = cfg.kmsKeyName;
77 this.metadata = cfg.metadata || {};
78 this.offset = cfg.offset;
79 this.origin = cfg.origin;
80 this.params = cfg.params || {};
81 this.userProject = cfg.userProject;
82 this.chunkSize = cfg.chunkSize;
83 this.retryOptions = cfg.retryOptions;
84 if (cfg.key) {
85 const base64Key = Buffer.from(cfg.key).toString('base64');
86 this.encryption = {
87 key: base64Key,
88 hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'),
89 };
90 }
91 this.predefinedAcl = cfg.predefinedAcl;
92 if (cfg.private)
93 this.predefinedAcl = 'private';
94 if (cfg.public)
95 this.predefinedAcl = 'publicRead';
96 const autoRetry = cfg.retryOptions.autoRetry;
97 this.uriProvidedManually = !!cfg.uri;
98 this.uri = cfg.uri;
99 this.numBytesWritten = 0;
100 this.numRetries = 0; // counter for number of retries currently executed
101 if (!autoRetry) {
102 cfg.retryOptions.maxRetries = 0;
103 }
104 this.timeOfFirstRequest = Date.now();
105 const contentLength = cfg.metadata
106 ? Number(cfg.metadata.contentLength)
107 : NaN;
108 this.contentLength = isNaN(contentLength) ? '*' : contentLength;
109 this.once('writing', () => {
110 if (this.uri) {
111 this.continueUploading();
112 }
113 else {
114 this.createURI(err => {
115 if (err) {
116 return this.destroy(err);
117 }
118 this.startUploading();
119 return;
120 });
121 }
122 });
123 }
124 /**
125 * Prevent 'finish' event until the upload has succeeded.
126 *
127 * @param fireFinishEvent The finish callback
128 */
129 _final(fireFinishEvent = () => { }) {
130 this.upstreamEnded = true;
131 this.once('uploadFinished', fireFinishEvent);
132 process.nextTick(() => {
133 this.emit('upstreamFinished');
134 // it's possible `_write` may not be called - namely for empty object uploads
135 this.emit('writing');
136 });
137 }
138 /**
139 * Handles incoming data from upstream
140 *
141 * @param chunk The chunk to append to the buffer
142 * @param encoding The encoding of the chunk
143 * @param readCallback A callback for when the buffer has been read downstream
144 */
145 _write(chunk, encoding, readCallback = () => { }) {
146 // Backwards-compatible event
147 this.emit('writing');
148 this.upstreamChunkBuffer = Buffer.concat([
149 this.upstreamChunkBuffer,
150 typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk,
151 ]);
152 this.chunkBufferEncoding = encoding;
153 this.once('readFromChunkBuffer', readCallback);
154 process.nextTick(() => this.emit('wroteToChunkBuffer'));
155 }
156 /**
157 * Prepends data back to the upstream chunk buffer.
158 *
159 * @param chunk The data to prepend
160 */
161 unshiftChunkBuffer(chunk) {
162 this.upstreamChunkBuffer = Buffer.concat([chunk, this.upstreamChunkBuffer]);
163 }
164 /**
165 * Retrieves data from upstream's buffer.
166 *
167 * @param limit The maximum amount to return from the buffer.
168 * @returns The data requested.
169 */
170 pullFromChunkBuffer(limit) {
171 const chunk = this.upstreamChunkBuffer.slice(0, limit);
172 this.upstreamChunkBuffer = this.upstreamChunkBuffer.slice(limit);
173 // notify upstream we've read from the buffer so it can potentially
174 // send more data down.
175 process.nextTick(() => this.emit('readFromChunkBuffer'));
176 return chunk;
177 }
178 /**
179 * A handler for determining if data is ready to be read from upstream.
180 *
181 * @returns If there will be more chunks to read in the future
182 */
183 async waitForNextChunk() {
184 const willBeMoreChunks = await new Promise(resolve => {
185 // There's data available - it should be digested
186 if (this.upstreamChunkBuffer.byteLength) {
187 return resolve(true);
188 }
189 // The upstream writable ended, we shouldn't expect any more data.
190 if (this.upstreamEnded) {
191 return resolve(false);
192 }
193 // Nothing immediate seems to be determined. We need to prepare some
194 // listeners to determine next steps...
195 const wroteToChunkBufferCallback = () => {
196 removeListeners();
197 return resolve(true);
198 };
199 const upstreamFinishedCallback = () => {
200 removeListeners();
201 // this should be the last chunk, if there's anything there
202 if (this.upstreamChunkBuffer.length)
203 return resolve(true);
204 return resolve(false);
205 };
206 // Remove listeners when we're ready to callback.
207 const removeListeners = () => {
208 this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback);
209 this.removeListener('upstreamFinished', upstreamFinishedCallback);
210 };
211 // If there's data recently written it should be digested
212 this.once('wroteToChunkBuffer', wroteToChunkBufferCallback);
213 // If the upstream finishes let's see if there's anything to grab
214 this.once('upstreamFinished', upstreamFinishedCallback);
215 });
216 return willBeMoreChunks;
217 }
218 /**
219 * Reads data from upstream up to the provided `limit`.
220 * Ends when the limit has reached or no data is expected to be pushed from upstream.
221 *
222 * @param limit The most amount of data this iterator should return. `Infinity` by default.
223 * @param oneChunkMode Determines if one, exhaustive chunk is yielded for the iterator
224 */
225 async *upstreamIterator(limit = Infinity, oneChunkMode) {
226 let completeChunk = Buffer.alloc(0);
227 // read from upstream chunk buffer
228 while (limit && (await this.waitForNextChunk())) {
229 // read until end or limit has been reached
230 const chunk = this.pullFromChunkBuffer(limit);
231 limit -= chunk.byteLength;
232 if (oneChunkMode) {
233 // return 1 chunk at the end of iteration
234 completeChunk = Buffer.concat([completeChunk, chunk]);
235 }
236 else {
237 // return many chunks throughout iteration
238 yield {
239 chunk,
240 encoding: this.chunkBufferEncoding,
241 };
242 }
243 }
244 if (oneChunkMode) {
245 yield {
246 chunk: completeChunk,
247 encoding: this.chunkBufferEncoding,
248 };
249 }
250 }
251 createURI(callback) {
252 if (!callback) {
253 return this.createURIAsync();
254 }
255 this.createURIAsync().then(r => callback(null, r), callback);
256 }
257 async createURIAsync() {
258 const metadata = { ...this.metadata };
259 const headers = {};
260 // Delete content length and content type from metadata if they exist.
261 // These are headers and should not be sent as part of the metadata.
262 if (metadata.contentLength) {
263 headers['X-Upload-Content-Length'] = metadata.contentLength.toString();
264 delete metadata.contentLength;
265 }
266 if (metadata.contentType) {
267 headers['X-Upload-Content-Type'] = metadata.contentType;
268 delete metadata.contentType;
269 }
270 // Check if headers already exist before creating new ones
271 const reqOpts = {
272 method: 'POST',
273 url: [this.baseURI, this.bucket, 'o'].join('/'),
274 params: Object.assign({
275 name: this.file,
276 uploadType: 'resumable',
277 }, this.params),
278 data: metadata,
279 headers: {
280 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.uri}`,
281 ...headers,
282 },
283 };
284 if (metadata.contentLength) {
285 reqOpts.headers['X-Upload-Content-Length'] =
286 metadata.contentLength.toString();
287 }
288 if (metadata.contentType) {
289 reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType;
290 }
291 if (typeof this.generation !== 'undefined') {
292 reqOpts.params.ifGenerationMatch = this.generation;
293 }
294 if (this.kmsKeyName) {
295 reqOpts.params.kmsKeyName = this.kmsKeyName;
296 }
297 if (this.predefinedAcl) {
298 reqOpts.params.predefinedAcl = this.predefinedAcl;
299 }
300 if (this.origin) {
301 reqOpts.headers.Origin = this.origin;
302 }
303 const uri = await retry(async (bail) => {
304 var _a, _b, _c;
305 try {
306 const res = await this.makeRequest(reqOpts);
307 // We have successfully got a URI we can now create a new invocation id
308 this.currentInvocationId.uri = uuid.v4();
309 return res.headers.location;
310 }
311 catch (err) {
312 const e = err;
313 const apiError = {
314 code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status,
315 name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText,
316 message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText,
317 errors: [
318 {
319 reason: e.code,
320 },
321 ],
322 };
323 if (this.retryOptions.maxRetries > 0 &&
324 this.retryOptions.retryableErrorFn(apiError)) {
325 throw e;
326 }
327 else {
328 return bail(e);
329 }
330 }
331 }, {
332 retries: this.retryOptions.maxRetries,
333 factor: this.retryOptions.retryDelayMultiplier,
334 maxTimeout: this.retryOptions.maxRetryDelay * 1000,
335 maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds
336 });
337 this.uri = uri;
338 this.offset = 0;
339 return uri;
340 }
341 async continueUploading() {
342 if (typeof this.offset === 'number') {
343 this.startUploading();
344 return;
345 }
346 await this.getAndSetOffset();
347 this.startUploading();
348 }
349 async startUploading() {
350 const multiChunkMode = !!this.chunkSize;
351 let responseReceived = false;
352 this.numChunksReadInRequest = 0;
353 if (!this.offset) {
354 this.offset = 0;
355 }
356 // Check if the offset (server) is too far behind the current stream
357 if (this.offset < this.numBytesWritten) {
358 const delta = this.numBytesWritten - this.offset;
359 const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`;
360 this.emit('error', new RangeError(message));
361 return;
362 }
363 // Check if we should 'fast-forward' to the relevant data to upload
364 if (this.numBytesWritten < this.offset) {
365 // 'fast-forward' to the byte where we need to upload.
366 // only push data from the byte after the one we left off on
367 const fastForwardBytes = this.offset - this.numBytesWritten;
368 for await (const _chunk of this.upstreamIterator(fastForwardBytes)) {
369 _chunk; // discard the data up until the point we want
370 }
371 this.numBytesWritten = this.offset;
372 }
373 let expectedUploadSize = undefined;
374 // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available
375 if (typeof this.contentLength === 'number') {
376 expectedUploadSize = this.contentLength - this.numBytesWritten;
377 }
378 // `expectedUploadSize` should be no more than the `chunkSize`.
379 // It's possible this is the last chunk request for a multiple
380 // chunk upload, thus smaller than the chunk size.
381 if (this.chunkSize) {
382 expectedUploadSize = expectedUploadSize
383 ? Math.min(this.chunkSize, expectedUploadSize)
384 : this.chunkSize;
385 }
386 // A queue for the upstream data
387 const upstreamQueue = this.upstreamIterator(expectedUploadSize, multiChunkMode // multi-chunk mode should return 1 chunk per request
388 );
389 // The primary read stream for this request. This stream retrieves no more
390 // than the exact requested amount from upstream.
391 const requestStream = new stream_1.Readable({
392 read: async () => {
393 // Don't attempt to retrieve data upstream if we already have a response
394 if (responseReceived)
395 requestStream.push(null);
396 const result = await upstreamQueue.next();
397 if (result.value) {
398 this.numChunksReadInRequest++;
399 this.lastChunkSent = result.value.chunk;
400 this.numBytesWritten += result.value.chunk.byteLength;
401 this.emit('progress', {
402 bytesWritten: this.numBytesWritten,
403 contentLength: this.contentLength,
404 });
405 requestStream.push(result.value.chunk, result.value.encoding);
406 }
407 if (result.done) {
408 requestStream.push(null);
409 }
410 },
411 });
412 const headers = {
413 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.chunk}`,
414 };
415 // If using multiple chunk upload, set appropriate header
416 if (multiChunkMode) {
417 // We need to know how much data is available upstream to set the `Content-Range` header.
418 const oneChunkIterator = this.upstreamIterator(expectedUploadSize, true);
419 const { value } = await oneChunkIterator.next();
420 const bytesToUpload = value.chunk.byteLength;
421 // Important: we want to know if the upstream has ended and the queue is empty before
422 // unshifting data back into the queue. This way we will know if this is the last request or not.
423 const isLastChunkOfUpload = !(await this.waitForNextChunk());
424 // Important: put the data back in the queue for the actual upload iterator
425 this.unshiftChunkBuffer(value.chunk);
426 let totalObjectSize = this.contentLength;
427 if (typeof this.contentLength !== 'number' && isLastChunkOfUpload) {
428 // Let's let the server know this is the last chunk since
429 // we didn't know the content-length beforehand.
430 totalObjectSize = bytesToUpload + this.numBytesWritten;
431 }
432 // `- 1` as the ending byte is inclusive in the request.
433 const endingByte = bytesToUpload + this.numBytesWritten - 1;
434 // `Content-Length` for multiple chunk uploads is the size of the chunk,
435 // not the overall object
436 headers['Content-Length'] = bytesToUpload;
437 headers['Content-Range'] = `bytes ${this.offset}-${endingByte}/${totalObjectSize}`;
438 }
439 else {
440 headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`;
441 }
442 const reqOpts = {
443 method: 'PUT',
444 url: this.uri,
445 headers,
446 body: requestStream,
447 };
448 try {
449 const resp = await this.makeRequestStream(reqOpts);
450 if (resp) {
451 responseReceived = true;
452 this.responseHandler(resp);
453 }
454 }
455 catch (e) {
456 const err = e;
457 if (this.retryOptions.retryableErrorFn(err)) {
458 this.attemptDelayedRetry({
459 status: NaN,
460 data: err,
461 });
462 return;
463 }
464 this.destroy(err);
465 }
466 }
467 // Process the API response to look for errors that came in
468 // the response body.
469 responseHandler(resp) {
470 if (resp.data.error) {
471 this.destroy(resp.data.error);
472 return;
473 }
474 // At this point we can safely create a new id for the chunk
475 this.currentInvocationId.chunk = uuid.v4();
476 const shouldContinueWithNextMultiChunkRequest = this.chunkSize &&
477 resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE &&
478 resp.headers.range;
479 if (shouldContinueWithNextMultiChunkRequest) {
480 // Use the upper value in this header to determine where to start the next chunk.
481 // We should not assume that the server received all bytes sent in the request.
482 // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload
483 const range = resp.headers.range;
484 this.offset = Number(range.split('-')[1]) + 1;
485 // We should not assume that the server received all bytes sent in the request.
486 // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload
487 const missingBytes = this.numBytesWritten - this.offset;
488 if (missingBytes) {
489 const dataToPrependForResending = this.lastChunkSent.slice(-missingBytes);
490 // As multi-chunk uploads send one chunk per request and pulls one
491 // chunk into the pipeline, prepending the missing bytes back should
492 // be fine for the next request.
493 this.unshiftChunkBuffer(dataToPrependForResending);
494 this.numBytesWritten -= missingBytes;
495 this.lastChunkSent = Buffer.alloc(0);
496 }
497 // continue uploading next chunk
498 this.continueUploading();
499 }
500 else if (!this.isSuccessfulResponse(resp.status)) {
501 const err = new Error('Upload failed');
502 err.code = resp.status;
503 err.name = 'Upload failed';
504 if (resp === null || resp === void 0 ? void 0 : resp.data) {
505 err.errors = [resp === null || resp === void 0 ? void 0 : resp.data];
506 }
507 this.destroy(err);
508 }
509 else {
510 // remove the last chunk sent to free memory
511 this.lastChunkSent = Buffer.alloc(0);
512 if (resp && resp.data) {
513 resp.data.size = Number(resp.data.size);
514 }
515 this.emit('metadata', resp.data);
516 // Allow the object (Upload) to continue naturally so the user's
517 // "finish" event fires.
518 this.emit('uploadFinished');
519 }
520 }
521 async getAndSetOffset() {
522 const opts = {
523 method: 'PUT',
524 url: this.uri,
525 headers: {
526 'Content-Length': 0,
527 'Content-Range': 'bytes */*',
528 'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.offset}`,
529 },
530 };
531 try {
532 const resp = await this.makeRequest(opts);
533 // Successfully got the offset we can now create a new offset invocation id
534 this.currentInvocationId.offset = uuid.v4();
535 if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) {
536 if (resp.headers.range) {
537 const range = resp.headers.range;
538 this.offset = Number(range.split('-')[1]) + 1;
539 return;
540 }
541 }
542 this.offset = 0;
543 }
544 catch (e) {
545 const err = e;
546 if (this.retryOptions.retryableErrorFn(err)) {
547 this.attemptDelayedRetry({
548 status: NaN,
549 data: err,
550 });
551 return;
552 }
553 this.destroy(err);
554 }
555 }
556 async makeRequest(reqOpts) {
557 if (this.encryption) {
558 reqOpts.headers = reqOpts.headers || {};
559 reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256';
560 reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString();
561 reqOpts.headers['x-goog-encryption-key-sha256'] =
562 this.encryption.hash.toString();
563 }
564 if (this.userProject) {
565 reqOpts.params = reqOpts.params || {};
566 reqOpts.params.userProject = this.userProject;
567 }
568 // Let gaxios know we will handle a 308 error code ourselves.
569 reqOpts.validateStatus = (status) => {
570 return (this.isSuccessfulResponse(status) ||
571 status === RESUMABLE_INCOMPLETE_STATUS_CODE);
572 };
573 const combinedReqOpts = extend(true, {}, this.customRequestOptions, reqOpts);
574 const res = await this.authClient.request(combinedReqOpts);
575 if (res.data && res.data.error) {
576 throw res.data.error;
577 }
578 return res;
579 }
580 async makeRequestStream(reqOpts) {
581 const controller = new abort_controller_1.default();
582 const errorCallback = () => controller.abort();
583 this.once('error', errorCallback);
584 if (this.userProject) {
585 reqOpts.params = reqOpts.params || {};
586 reqOpts.params.userProject = this.userProject;
587 }
588 reqOpts.signal = controller.signal;
589 reqOpts.validateStatus = () => true;
590 const combinedReqOpts = extend(true, {}, this.customRequestOptions, reqOpts);
591 const res = await this.authClient.request(combinedReqOpts);
592 const successfulRequest = this.onResponse(res);
593 this.removeListener('error', errorCallback);
594 return successfulRequest ? res : null;
595 }
596 /**
597 * @return {bool} is the request good?
598 */
599 onResponse(resp) {
600 if (resp.status !== 200 &&
601 this.retryOptions.retryableErrorFn({
602 code: resp.status,
603 message: resp.statusText,
604 name: resp.statusText,
605 })) {
606 this.attemptDelayedRetry(resp);
607 return false;
608 }
609 this.emit('response', resp);
610 return true;
611 }
612 /**
613 * @param resp GaxiosResponse object from previous attempt
614 */
615 attemptDelayedRetry(resp) {
616 if (this.numRetries < this.retryOptions.maxRetries) {
617 if (resp.status === NOT_FOUND_STATUS_CODE &&
618 this.numChunksReadInRequest === 0) {
619 this.startUploading();
620 }
621 else {
622 const retryDelay = this.getRetryDelay();
623 if (retryDelay <= 0) {
624 this.destroy(new Error(`Retry total time limit exceeded - ${resp.data}`));
625 return;
626 }
627 // Unshift the most recent chunk back in case it's needed for the next
628 // request.
629 this.numBytesWritten -= this.lastChunkSent.byteLength;
630 this.unshiftChunkBuffer(this.lastChunkSent);
631 this.lastChunkSent = Buffer.alloc(0);
632 // We don't know how much data has been received by the server.
633 // `continueUploading` will recheck the offset via `getAndSetOffset`.
634 // If `offset` < `numberBytesReceived` then we will raise a RangeError
635 // as we've streamed too much data that has been missed - this should
636 // not be the case for multi-chunk uploads as `lastChunkSent` is the
637 // body of the entire request.
638 this.offset = undefined;
639 setTimeout(this.continueUploading.bind(this), retryDelay);
640 }
641 this.numRetries++;
642 }
643 else {
644 this.destroy(new Error('Retry limit exceeded - ' + resp.data));
645 }
646 }
647 /**
648 * @returns {number} the amount of time to wait before retrying the request
649 */
650 getRetryDelay() {
651 const randomMs = Math.round(Math.random() * 1000);
652 const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) *
653 1000 +
654 randomMs;
655 const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 -
656 (Date.now() - this.timeOfFirstRequest);
657 const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000;
658 return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs);
659 }
660 /*
661 * Prepare user-defined API endpoint for compatibility with our API.
662 */
663 sanitizeEndpoint(url) {
664 if (!exports.PROTOCOL_REGEX.test(url)) {
665 url = `https://${url}`;
666 }
667 return url.replace(/\/+$/, ''); // Remove trailing slashes
668 }
669 /**
670 * Check if a given status code is 2xx
671 *
672 * @param status The status code to check
673 * @returns if the status is 2xx
674 */
675 isSuccessfulResponse(status) {
676 return status >= 200 && status < 300;
677 }
678}
679exports.Upload = Upload;
680function upload(cfg) {
681 return new Upload(cfg);
682}
683exports.upload = upload;
684function createURI(cfg, callback) {
685 const up = new Upload(cfg);
686 if (!callback) {
687 return up.createURI();
688 }
689 up.createURI().then(r => callback(null, r), callback);
690}
691exports.createURI = createURI;
692//# sourceMappingURL=resumable-upload.js.map
\No newline at end of file