UNPKG

133 kBJavaScriptView Raw
1"use strict";
2// Copyright 2019 Google LLC
3//
4// Licensed under the Apache License, Version 2.0 (the "License");
5// you may not use this file except in compliance with the License.
6// You may obtain a copy of the License at
7//
8// http://www.apache.org/licenses/LICENSE-2.0
9//
10// Unless required by applicable law or agreed to in writing, software
11// distributed under the License is distributed on an "AS IS" BASIS,
12// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13// See the License for the specific language governing permissions and
14// limitations under the License.
15var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
16 if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
17 if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
18 return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
19};
20var _File_instances, _File_validateIntegrity;
21Object.defineProperty(exports, "__esModule", { value: true });
22exports.File = exports.FileExceptionMessages = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0;
23const nodejs_common_1 = require("./nodejs-common");
24const promisify_1 = require("@google-cloud/promisify");
25const compressible = require("compressible");
26const crypto = require("crypto");
27const extend = require("extend");
28const fs = require("fs");
29const mime = require("mime");
30const resumableUpload = require("./resumable-upload");
31const stream_1 = require("stream");
32const zlib = require("zlib");
33const storage_1 = require("./storage");
34const bucket_1 = require("./bucket");
35const acl_1 = require("./acl");
36const signer_1 = require("./signer");
37// eslint-disable-next-line @typescript-eslint/no-var-requires
38const duplexify = require('duplexify');
39const util_1 = require("./util");
40const hash_stream_validator_1 = require("./hash-stream-validator");
41const retry = require("async-retry");
42var ActionToHTTPMethod;
43(function (ActionToHTTPMethod) {
44 ActionToHTTPMethod["read"] = "GET";
45 ActionToHTTPMethod["write"] = "PUT";
46 ActionToHTTPMethod["delete"] = "DELETE";
47 ActionToHTTPMethod["resumable"] = "POST";
48})(ActionToHTTPMethod = exports.ActionToHTTPMethod || (exports.ActionToHTTPMethod = {}));
49/**
50 * @private
51 */
52exports.STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com';
53/**
54 * @private
55 */
56const GS_URL_REGEXP = /^gs:\/\/([a-z0-9_.-]+)\/(.+)$/;
57class RequestError extends Error {
58}
59const SEVEN_DAYS = 7 * 24 * 60 * 60;
60var FileExceptionMessages;
61(function (FileExceptionMessages) {
62 FileExceptionMessages["EXPIRATION_TIME_NA"] = "An expiration time is not available.";
63 FileExceptionMessages["DESTINATION_NO_NAME"] = "Destination file should have a name.";
64 FileExceptionMessages["INVALID_VALIDATION_FILE_RANGE"] = "Cannot use validation with file ranges (start/end).";
65 FileExceptionMessages["MD5_NOT_AVAILABLE"] = "MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects.";
66 FileExceptionMessages["EQUALS_CONDITION_TWO_ELEMENTS"] = "Equals condition must be an array of 2 elements.";
67 FileExceptionMessages["STARTS_WITH_TWO_ELEMENTS"] = "StartsWith condition must be an array of 2 elements.";
68 FileExceptionMessages["CONTENT_LENGTH_RANGE_MIN_MAX"] = "ContentLengthRange must have numeric min & max fields.";
69 FileExceptionMessages["DOWNLOAD_MISMATCH"] = "The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again.";
70 FileExceptionMessages["UPLOAD_MISMATCH_DELETE_FAIL"] = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n ";
71 FileExceptionMessages["UPLOAD_MISMATCH"] = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again.";
72})(FileExceptionMessages = exports.FileExceptionMessages || (exports.FileExceptionMessages = {}));
73/**
74 * A File object is created from your {@link Bucket} object using
75 * {@link Bucket#file}.
76 *
77 * @class
78 */
79class File extends nodejs_common_1.ServiceObject {
80 /**
81 * Cloud Storage uses access control lists (ACLs) to manage object and
82 * bucket access. ACLs are the mechanism you use to share objects with other
83 * users and allow other users to access your buckets and objects.
84 *
85 * An ACL consists of one or more entries, where each entry grants permissions
86 * to an entity. Permissions define the actions that can be performed against
87 * an object or bucket (for example, `READ` or `WRITE`); the entity defines
88 * who the permission applies to (for example, a specific user or group of
89 * users).
90 *
91 * The `acl` object on a File instance provides methods to get you a list of
92 * the ACLs defined on your bucket, as well as set, update, and delete them.
93 *
94 * See {@link http://goo.gl/6qBBPO| About Access Control lists}
95 *
96 * @name File#acl
97 * @mixes Acl
98 *
99 * @example
100 * ```
101 * const {Storage} = require('@google-cloud/storage');
102 * const storage = new Storage();
103 * const myBucket = storage.bucket('my-bucket');
104 *
105 * const file = myBucket.file('my-file');
106 * //-
107 * // Make a file publicly readable.
108 * //-
109 * const options = {
110 * entity: 'allUsers',
111 * role: storage.acl.READER_ROLE
112 * };
113 *
114 * file.acl.add(options, function(err, aclObject) {});
115 *
116 * //-
117 * // If the callback is omitted, we'll return a Promise.
118 * //-
119 * file.acl.add(options).then(function(data) {
120 * const aclObject = data[0];
121 * const apiResponse = data[1];
122 * });
123 * ```
124 */
125 /**
126 * The API-formatted resource description of the file.
127 *
128 * Note: This is not guaranteed to be up-to-date when accessed. To get the
129 * latest record, call the `getMetadata()` method.
130 *
131 * @name File#metadata
132 * @type {object}
133 */
134 /**
135 * The file's name.
136 * @name File#name
137 * @type {string}
138 */
139 /**
140 * @callback Crc32cGeneratorToStringCallback
141 * A method returning the CRC32C as a base64-encoded string.
142 *
143 * @returns {string}
144 *
145 * @example
146 * Hashing the string 'data' should return 'rth90Q=='
147 *
148 * ```js
149 * const buffer = Buffer.from('data');
150 * crc32c.update(buffer);
151 * crc32c.toString(); // 'rth90Q=='
152 * ```
153 **/
154 /**
155 * @callback Crc32cGeneratorValidateCallback
156 * A method validating a base64-encoded CRC32C string.
157 *
158 * @param {string} [value] base64-encoded CRC32C string to validate
159 * @returns {boolean}
160 *
161 * @example
162 * Should return `true` if the value matches, `false` otherwise
163 *
164 * ```js
165 * const buffer = Buffer.from('data');
166 * crc32c.update(buffer);
167 * crc32c.validate('DkjKuA=='); // false
168 * crc32c.validate('rth90Q=='); // true
169 * ```
170 **/
171 /**
172 * @callback Crc32cGeneratorUpdateCallback
173 * A method for passing `Buffer`s for CRC32C generation.
174 *
175 * @param {Buffer} [data] data to update CRC32C value with
176 * @returns {undefined}
177 *
178 * @example
179 * Hashing buffers from 'some ' and 'text\n'
180 *
181 * ```js
182 * const buffer1 = Buffer.from('some ');
183 * crc32c.update(buffer1);
184 *
185 * const buffer2 = Buffer.from('text\n');
186 * crc32c.update(buffer2);
187 *
188 * crc32c.toString(); // 'DkjKuA=='
189 * ```
190 **/
191 /**
192 * @typedef {object} CRC32CValidator
193 * @property {Crc32cGeneratorToStringCallback}
194 * @property {Crc32cGeneratorValidateCallback}
195 * @property {Crc32cGeneratorUpdateCallback}
196 */
197 /**
198 * @callback Crc32cGeneratorCallback
199 * @returns {CRC32CValidator}
200 */
201 /**
202 * @typedef {object} FileOptions Options passed to the File constructor.
203 * @property {string} [encryptionKey] A custom encryption key.
204 * @property {number} [generation] Generation to scope the file to.
205 * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this
206 * object, if the object is encrypted by such a key. Limited availability;
207 * usable only by enabled projects.
208 * @property {string} [userProject] The ID of the project which will be
209 * billed for all requests made from File object.
210 * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C}
211 */
212 /**
213 * Constructs a file object.
214 *
215 * @param {Bucket} bucket The Bucket instance this file is
216 * attached to.
217 * @param {string} name The name of the remote file.
218 * @param {FileOptions} [options] Configuration options.
219 * @example
220 * ```
221 * const {Storage} = require('@google-cloud/storage');
222 * const storage = new Storage();
223 * const myBucket = storage.bucket('my-bucket');
224 *
225 * const file = myBucket.file('my-file');
226 * ```
227 */
228 constructor(bucket, name, options = {}) {
229 var _a, _b;
230 const requestQueryObject = {};
231 let generation;
232 if (options.generation !== null) {
233 if (typeof options.generation === 'string') {
234 generation = Number(options.generation);
235 }
236 else {
237 generation = options.generation;
238 }
239 if (!isNaN(generation)) {
240 requestQueryObject.generation = generation;
241 }
242 }
243 Object.assign(requestQueryObject, options.preconditionOpts);
244 const userProject = options.userProject || bucket.userProject;
245 if (typeof userProject === 'string') {
246 requestQueryObject.userProject = userProject;
247 }
248 const methods = {
249 /**
250 * @typedef {array} DeleteFileResponse
251 * @property {object} 0 The full API response.
252 */
253 /**
254 * @callback DeleteFileCallback
255 * @param {?Error} err Request error, if any.
256 * @param {object} apiResponse The full API response.
257 */
258 /**
259 * Delete the file.
260 *
261 * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation}
262 *
263 * @method File#delete
264 * @param {object} [options] Configuration options.
265 * @param {boolean} [options.ignoreNotFound = false] Ignore an error if
266 * the file does not exist.
267 * @param {string} [options.userProject] The ID of the project which will be
268 * billed for the request.
269 * @param {DeleteFileCallback} [callback] Callback function.
270 * @returns {Promise<DeleteFileResponse>}
271 *
272 * @example
273 * ```
274 * const {Storage} = require('@google-cloud/storage');
275 * const storage = new Storage();
276 * const myBucket = storage.bucket('my-bucket');
277 *
278 * const file = myBucket.file('my-file');
279 * file.delete(function(err, apiResponse) {});
280 *
281 * //-
282 * // If the callback is omitted, we'll return a Promise.
283 * //-
284 * file.delete().then(function(data) {
285 * const apiResponse = data[0];
286 * });
287 *
288 * ```
289 * @example <caption>include:samples/files.js</caption>
290 * region_tag:storage_delete_file
291 * Another example:
292 */
293 delete: {
294 reqOpts: {
295 qs: requestQueryObject,
296 },
297 },
298 /**
299 * @typedef {array} FileExistsResponse
300 * @property {boolean} 0 Whether the {@link File} exists.
301 */
302 /**
303 * @callback FileExistsCallback
304 * @param {?Error} err Request error, if any.
305 * @param {boolean} exists Whether the {@link File} exists.
306 */
307 /**
308 * Check if the file exists.
309 *
310 * @method File#exists
311 * @param {options} [options] Configuration options.
312 * @param {string} [options.userProject] The ID of the project which will be
313 * billed for the request.
314 * @param {FileExistsCallback} [callback] Callback function.
315 * @returns {Promise<FileExistsResponse>}
316 *
317 * @example
318 * ```
319 * const {Storage} = require('@google-cloud/storage');
320 * const storage = new Storage();
321 * const myBucket = storage.bucket('my-bucket');
322 *
323 * const file = myBucket.file('my-file');
324 *
325 * file.exists(function(err, exists) {});
326 *
327 * //-
328 * // If the callback is omitted, we'll return a Promise.
329 * //-
330 * file.exists().then(function(data) {
331 * const exists = data[0];
332 * });
333 * ```
334 */
335 exists: {
336 reqOpts: {
337 qs: requestQueryObject,
338 },
339 },
340 /**
341 * @typedef {array} GetFileResponse
342 * @property {File} 0 The {@link File}.
343 * @property {object} 1 The full API response.
344 */
345 /**
346 * @callback GetFileCallback
347 * @param {?Error} err Request error, if any.
348 * @param {File} file The {@link File}.
349 * @param {object} apiResponse The full API response.
350 */
351 /**
352 * Get a file object and its metadata if it exists.
353 *
354 * @method File#get
355 * @param {options} [options] Configuration options.
356 * @param {string} [options.userProject] The ID of the project which will be
357 * billed for the request.
358 * @param {GetFileCallback} [callback] Callback function.
359 * @returns {Promise<GetFileResponse>}
360 *
361 * @example
362 * ```
363 * const {Storage} = require('@google-cloud/storage');
364 * const storage = new Storage();
365 * const myBucket = storage.bucket('my-bucket');
366 *
367 * const file = myBucket.file('my-file');
368 *
369 * file.get(function(err, file, apiResponse) {
370 * // file.metadata` has been populated.
371 * });
372 *
373 * //-
374 * // If the callback is omitted, we'll return a Promise.
375 * //-
376 * file.get().then(function(data) {
377 * const file = data[0];
378 * const apiResponse = data[1];
379 * });
380 * ```
381 */
382 get: {
383 reqOpts: {
384 qs: requestQueryObject,
385 },
386 },
387 /**
388 * @typedef {array} GetFileMetadataResponse
389 * @property {object} 0 The {@link File} metadata.
390 * @property {object} 1 The full API response.
391 */
392 /**
393 * @callback GetFileMetadataCallback
394 * @param {?Error} err Request error, if any.
395 * @param {object} metadata The {@link File} metadata.
396 * @param {object} apiResponse The full API response.
397 */
398 /**
399 * Get the file's metadata.
400 *
401 * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation}
402 *
403 * @method File#getMetadata
404 * @param {object} [options] Configuration options.
405 * @param {string} [options.userProject] The ID of the project which will be
406 * billed for the request.
407 * @param {GetFileMetadataCallback} [callback] Callback function.
408 * @returns {Promise<GetFileMetadataResponse>}
409 *
410 * @example
411 * ```
412 * const {Storage} = require('@google-cloud/storage');
413 * const storage = new Storage();
414 * const myBucket = storage.bucket('my-bucket');
415 *
416 * const file = myBucket.file('my-file');
417 *
418 * file.getMetadata(function(err, metadata, apiResponse) {});
419 *
420 * //-
421 * // If the callback is omitted, we'll return a Promise.
422 * //-
423 * file.getMetadata().then(function(data) {
424 * const metadata = data[0];
425 * const apiResponse = data[1];
426 * });
427 *
428 * ```
429 * @example <caption>include:samples/files.js</caption>
430 * region_tag:storage_get_metadata
431 * Another example:
432 */
433 getMetadata: {
434 reqOpts: {
435 qs: requestQueryObject,
436 },
437 },
438 /**
439 * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata().
440 * @param {string} [userProject] The ID of the project which will be billed for the request.
441 */
442 /**
443 * @callback SetFileMetadataCallback
444 * @param {?Error} err Request error, if any.
445 * @param {object} apiResponse The full API response.
446 */
447 /**
448 * @typedef {array} SetFileMetadataResponse
449 * @property {object} 0 The full API response.
450 */
451 /**
452 * Merge the given metadata with the current remote file's metadata. This
453 * will set metadata if it was previously unset or update previously set
454 * metadata. To unset previously set metadata, set its value to null.
455 *
456 * You can set custom key/value pairs in the metadata key of the given
457 * object, however the other properties outside of this object must adhere
458 * to the {@link https://goo.gl/BOnnCK| official API documentation}.
459 *
460 *
461 * See the examples below for more information.
462 *
463 * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation}
464 *
465 * @method File#setMetadata
466 * @param {object} [metadata] The metadata you wish to update.
467 * @param {SetFileMetadataOptions} [options] Configuration options.
468 * @param {SetFileMetadataCallback} [callback] Callback function.
469 * @returns {Promise<SetFileMetadataResponse>}
470 *
471 * @example
472 * ```
473 * const {Storage} = require('@google-cloud/storage');
474 * const storage = new Storage();
475 * const myBucket = storage.bucket('my-bucket');
476 *
477 * const file = myBucket.file('my-file');
478 *
479 * const metadata = {
480 * contentType: 'application/x-font-ttf',
481 * metadata: {
482 * my: 'custom',
483 * properties: 'go here'
484 * }
485 * };
486 *
487 * file.setMetadata(metadata, function(err, apiResponse) {});
488 *
489 * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' }
490 * file.setMetadata({
491 * metadata: {
492 * abc: '123', // will be set.
493 * unsetMe: null, // will be unset (deleted).
494 * hello: 'goodbye' // will be updated from 'world' to 'goodbye'.
495 * }
496 * }, function(err, apiResponse) {
497 * // metadata should now be { abc: '123', hello: 'goodbye' }
498 * });
499 *
500 * //-
501 * // Set a temporary hold on this file from its bucket's retention period
502 * // configuration.
503 * //
504 * file.setMetadata({
505 * temporaryHold: true
506 * }, function(err, apiResponse) {});
507 *
508 * //-
509 * // Alternatively, you may set a temporary hold. This will follow the
510 * // same behavior as an event-based hold, with the exception that the
511 * // bucket's retention policy will not renew for this file from the time
512 * // the hold is released.
513 * //-
514 * file.setMetadata({
515 * eventBasedHold: true
516 * }, function(err, apiResponse) {});
517 *
518 * //-
519 * // If the callback is omitted, we'll return a Promise.
520 * //-
521 * file.setMetadata(metadata).then(function(data) {
522 * const apiResponse = data[0];
523 * });
524 * ```
525 */
526 setMetadata: {
527 reqOpts: {
528 qs: requestQueryObject,
529 },
530 },
531 };
532 super({
533 parent: bucket,
534 baseUrl: '/o',
535 id: encodeURIComponent(name),
536 methods,
537 });
538 _File_instances.add(this);
539 this.bucket = bucket;
540 // eslint-disable-next-line @typescript-eslint/no-explicit-any
541 this.storage = bucket.parent;
542 // @TODO Can this duplicate code from above be avoided?
543 if (options.generation !== null) {
544 let generation;
545 if (typeof options.generation === 'string') {
546 generation = Number(options.generation);
547 }
548 else {
549 generation = options.generation;
550 }
551 if (!isNaN(generation)) {
552 this.generation = generation;
553 }
554 }
555 this.kmsKeyName = options.kmsKeyName;
556 this.userProject = userProject;
557 this.name = name;
558 if (options.encryptionKey) {
559 this.setEncryptionKey(options.encryptionKey);
560 }
561 this.acl = new acl_1.Acl({
562 request: this.request.bind(this),
563 pathPrefix: '/acl',
564 });
565 this.crc32cGenerator =
566 options.crc32cGenerator || this.bucket.crc32cGenerator;
567 this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry;
568 this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts;
569 }
570 /**
571 * The object's Cloud Storage URI (`gs://`)
572 *
573 * @example
574 * ```ts
575 * const {Storage} = require('@google-cloud/storage');
576 * const storage = new Storage();
577 * const bucket = storage.bucket('my-bucket');
578 * const file = bucket.file('image.png');
579 *
580 * // `gs://my-bucket/image.png`
581 * const href = file.cloudStorageURI.href;
582 * ```
583 */
584 get cloudStorageURI() {
585 const uri = this.bucket.cloudStorageURI;
586 uri.pathname = this.name;
587 return uri;
588 }
589 /**
590 * A helper method for determining if a request should be retried based on preconditions.
591 * This should only be used for methods where the idempotency is determined by
592 * `ifGenerationMatch`
593 * @private
594 *
595 * A request should not be retried under the following conditions:
596 * - if precondition option `ifGenerationMatch` is not set OR
597 * - if `idempotencyStrategy` is set to `RetryNever`
598 */
599 shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) {
600 var _a;
601 return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined &&
602 ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined &&
603 this.storage.retryOptions.idempotencyStrategy ===
604 storage_1.IdempotencyStrategy.RetryConditional) ||
605 this.storage.retryOptions.idempotencyStrategy ===
606 storage_1.IdempotencyStrategy.RetryNever);
607 }
608 /**
609 * @typedef {array} CopyResponse
610 * @property {File} 0 The copied {@link File}.
611 * @property {object} 1 The full API response.
612 */
613 /**
614 * @callback CopyCallback
615 * @param {?Error} err Request error, if any.
616 * @param {File} copiedFile The copied {@link File}.
617 * @param {object} apiResponse The full API response.
618 */
619 /**
620 * @typedef {object} CopyOptions Configuration options for File#copy(). See an
621 * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}.
622 * @property {string} [cacheControl] The cacheControl setting for the new file.
623 * @property {string} [contentEncoding] The contentEncoding setting for the new file.
624 * @property {string} [contentType] The contentType setting for the new file.
625 * @property {string} [destinationKmsKeyName] Resource name of the Cloud
626 * KMS key, of the form
627 * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`,
628 * that will be used to encrypt the object. Overwrites the object
629 * metadata's `kms_key_name` value, if any.
630 * @property {Metadata} [metadata] Metadata to specify on the copied file.
631 * @property {string} [predefinedAcl] Set the ACL for the new file.
632 * @property {string} [token] A previously-returned `rewriteToken` from an
633 * unfinished rewrite request.
634 * @property {string} [userProject] The ID of the project which will be
635 * billed for the request.
636 */
637 /**
638 * Copy this file to another file. By default, this will copy the file to the
639 * same bucket, but you can choose to copy it to another Bucket by providing
640 * a Bucket or File object or a URL starting with "gs://".
641 * The generation of the file will not be preserved.
642 *
643 * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation}
644 *
645 * @throws {Error} If the destination file is not provided.
646 *
647 * @param {string|Bucket|File} destination Destination file.
648 * @param {CopyOptions} [options] Configuration options. See an
649 * @param {CopyCallback} [callback] Callback function.
650 * @returns {Promise<CopyResponse>}
651 *
652 * @example
653 * ```
654 * const {Storage} = require('@google-cloud/storage');
655 * const storage = new Storage();
656 *
657 * //-
658 * // You can pass in a variety of types for the destination.
659 * //
660 * // For all of the below examples, assume we are working with the following
661 * // Bucket and File objects.
662 * //-
663 * const bucket = storage.bucket('my-bucket');
664 * const file = bucket.file('my-image.png');
665 *
666 * //-
667 * // If you pass in a string for the destination, the file is copied to its
668 * // current bucket, under the new name provided.
669 * //-
670 * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) {
671 * // `my-bucket` now contains:
672 * // - "my-image.png"
673 * // - "my-image-copy.png"
674 *
675 * // `copiedFile` is an instance of a File object that refers to your new
676 * // file.
677 * });
678 *
679 * //-
680 * // If you pass in a string starting with "gs://" for the destination, the
681 * // file is copied to the other bucket and under the new name provided.
682 * //-
683 * const newLocation = 'gs://another-bucket/my-image-copy.png';
684 * file.copy(newLocation, function(err, copiedFile, apiResponse) {
685 * // `my-bucket` still contains:
686 * // - "my-image.png"
687 * //
688 * // `another-bucket` now contains:
689 * // - "my-image-copy.png"
690 *
691 * // `copiedFile` is an instance of a File object that refers to your new
692 * // file.
693 * });
694 *
695 * //-
696 * // If you pass in a Bucket object, the file will be copied to that bucket
697 * // using the same name.
698 * //-
699 * const anotherBucket = storage.bucket('another-bucket');
700 * file.copy(anotherBucket, function(err, copiedFile, apiResponse) {
701 * // `my-bucket` still contains:
702 * // - "my-image.png"
703 * //
704 * // `another-bucket` now contains:
705 * // - "my-image.png"
706 *
707 * // `copiedFile` is an instance of a File object that refers to your new
708 * // file.
709 * });
710 *
711 * //-
712 * // If you pass in a File object, you have complete control over the new
713 * // bucket and filename.
714 * //-
715 * const anotherFile = anotherBucket.file('my-awesome-image.png');
716 * file.copy(anotherFile, function(err, copiedFile, apiResponse) {
717 * // `my-bucket` still contains:
718 * // - "my-image.png"
719 * //
720 * // `another-bucket` now contains:
721 * // - "my-awesome-image.png"
722 *
723 * // Note:
724 * // The `copiedFile` parameter is equal to `anotherFile`.
725 * });
726 *
727 * //-
728 * // If the callback is omitted, we'll return a Promise.
729 * //-
730 * file.copy(newLocation).then(function(data) {
731 * const newFile = data[0];
732 * const apiResponse = data[1];
733 * });
734 *
735 * ```
736 * @example <caption>include:samples/files.js</caption>
737 * region_tag:storage_copy_file
738 * Another example:
739 */
740 copy(destination, optionsOrCallback, callback) {
741 var _a, _b;
742 const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME);
743 if (!destination) {
744 throw noDestinationError;
745 }
746 let options = {};
747 if (typeof optionsOrCallback === 'function') {
748 callback = optionsOrCallback;
749 }
750 else if (optionsOrCallback) {
751 options = optionsOrCallback;
752 }
753 options = extend(true, {}, options);
754 callback = callback || nodejs_common_1.util.noop;
755 let destBucket;
756 let destName;
757 let newFile;
758 if (typeof destination === 'string') {
759 const parsedDestination = GS_URL_REGEXP.exec(destination);
760 if (parsedDestination !== null && parsedDestination.length === 3) {
761 destBucket = this.storage.bucket(parsedDestination[1]);
762 destName = parsedDestination[2];
763 }
764 else {
765 destBucket = this.bucket;
766 destName = destination;
767 }
768 }
769 else if (destination instanceof bucket_1.Bucket) {
770 destBucket = destination;
771 destName = this.name;
772 }
773 else if (destination instanceof File) {
774 destBucket = destination.bucket;
775 destName = destination.name;
776 newFile = destination;
777 }
778 else {
779 throw noDestinationError;
780 }
781 const query = {};
782 if (this.generation !== undefined) {
783 query.sourceGeneration = this.generation;
784 }
785 if (options.token !== undefined) {
786 query.rewriteToken = options.token;
787 }
788 if (options.userProject !== undefined) {
789 query.userProject = options.userProject;
790 delete options.userProject;
791 }
792 if (options.predefinedAcl !== undefined) {
793 query.destinationPredefinedAcl = options.predefinedAcl;
794 delete options.predefinedAcl;
795 }
796 newFile = newFile || destBucket.file(destName);
797 const headers = {};
798 if (this.encryptionKey !== undefined) {
799 headers['x-goog-copy-source-encryption-algorithm'] = 'AES256';
800 headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64;
801 headers['x-goog-copy-source-encryption-key-sha256'] =
802 this.encryptionKeyHash;
803 }
804 if (newFile.encryptionKey !== undefined) {
805 this.setEncryptionKey(newFile.encryptionKey);
806 }
807 else if (options.destinationKmsKeyName !== undefined) {
808 query.destinationKmsKeyName = options.destinationKmsKeyName;
809 delete options.destinationKmsKeyName;
810 }
811 else if (newFile.kmsKeyName !== undefined) {
812 query.destinationKmsKeyName = newFile.kmsKeyName;
813 }
814 if (query.destinationKmsKeyName) {
815 this.kmsKeyName = query.destinationKmsKeyName;
816 const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor);
817 if (keyIndex > -1) {
818 this.interceptors.splice(keyIndex, 1);
819 }
820 }
821 if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) {
822 this.storage.retryOptions.autoRetry = false;
823 }
824 if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) {
825 query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch;
826 delete options.preconditionOpts;
827 }
828 this.request({
829 method: 'POST',
830 uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`,
831 qs: query,
832 json: options,
833 headers,
834 }, (err, resp) => {
835 this.storage.retryOptions.autoRetry = this.instanceRetryValue;
836 if (err) {
837 callback(err, null, resp);
838 return;
839 }
840 if (resp.rewriteToken) {
841 const options = {
842 token: resp.rewriteToken,
843 };
844 if (query.userProject) {
845 options.userProject = query.userProject;
846 }
847 if (query.destinationKmsKeyName) {
848 options.destinationKmsKeyName = query.destinationKmsKeyName;
849 }
850 this.copy(newFile, options, callback);
851 return;
852 }
853 callback(null, newFile, resp);
854 });
855 }
856 /**
857 * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream.
858 * @property {string} [userProject] The ID of the project which will be
859 * billed for the request.
860 * @property {string|boolean} [validation] Possible values: `"md5"`,
861 * `"crc32c"`, or `false`. By default, data integrity is validated with a
862 * CRC32c checksum. You may use MD5 if preferred, but that hash is not
863 * supported for composite objects. An error will be raised if MD5 is
864 * specified but is not available. You may also choose to skip validation
865 * completely, however this is **not recommended**.
866 * @property {number} [start] A byte offset to begin the file's download
867 * from. Default is 0. NOTE: Byte ranges are inclusive; that is,
868 * `options.start = 0` and `options.end = 999` represent the first 1000
869 * bytes in a file or object. NOTE: when specifying a byte range, data
870 * integrity is not available.
871 * @property {number} [end] A byte offset to stop reading the file at.
872 * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and
873 * `options.end = 999` represent the first 1000 bytes in a file or object.
874 * NOTE: when specifying a byte range, data integrity is not available.
875 * @property {boolean} [decompress=true] Disable auto decompression of the
876 * received data. By default this option is set to `true`.
877 * Applicable in cases where the data was uploaded with
878 * `gzip: true` option. See {@link File#createWriteStream}.
879 */
880 /**
881 * Create a readable stream to read the contents of the remote file. It can be
882 * piped to a writable stream or listened to for 'data' events to read a
883 * file's contents.
884 *
885 * In the unlikely event there is a mismatch between what you downloaded and
886 * the version in your Bucket, your error handler will receive an error with
887 * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best
888 * recourse is to try downloading the file again.
889 *
890 * NOTE: Readable streams will emit the `end` event when the file is fully
891 * downloaded.
892 *
893 * @param {CreateReadStreamOptions} [options] Configuration options.
894 * @returns {ReadableStream}
895 *
896 * @example
897 * ```
898 * //-
899 * // <h4>Downloading a File</h4>
900 * //
901 * // The example below demonstrates how we can reference a remote file, then
902 * // pipe its contents to a local file. This is effectively creating a local
903 * // backup of your remote data.
904 * //-
905 * const {Storage} = require('@google-cloud/storage');
906 * const storage = new Storage();
907 * const bucket = storage.bucket('my-bucket');
908 *
909 * const fs = require('fs');
910 * const remoteFile = bucket.file('image.png');
911 * const localFilename = '/Users/stephen/Photos/image.png';
912 *
913 * remoteFile.createReadStream()
914 * .on('error', function(err) {})
915 * .on('response', function(response) {
916 * // Server connected and responded with the specified status and headers.
917 * })
918 * .on('end', function() {
919 * // The file is fully downloaded.
920 * })
921 * .pipe(fs.createWriteStream(localFilename));
922 *
923 * //-
924 * // To limit the downloaded data to only a byte range, pass an options
925 * // object.
926 * //-
927 * const logFile = myBucket.file('access_log');
928 * logFile.createReadStream({
929 * start: 10000,
930 * end: 20000
931 * })
932 * .on('error', function(err) {})
933 * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt'));
934 *
935 * //-
936 * // To read a tail byte range, specify only `options.end` as a negative
937 * // number.
938 * //-
939 * const logFile = myBucket.file('access_log');
940 * logFile.createReadStream({
941 * end: -100
942 * })
943 * .on('error', function(err) {})
944 * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt'));
945 * ```
946 */
947 createReadStream(options = {}) {
948 options = Object.assign({ decompress: true }, options);
949 const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number';
950 const tailRequest = options.end < 0;
951 let validateStream = undefined;
952 const throughStream = new util_1.PassThroughShim();
953 let isCompressed = true;
954 let crc32c = true;
955 let md5 = false;
956 let safeToValidate = true;
957 if (typeof options.validation === 'string') {
958 const value = options.validation.toLowerCase().trim();
959 crc32c = value === 'crc32c';
960 md5 = value === 'md5';
961 }
962 else if (options.validation === false) {
963 crc32c = false;
964 }
965 const shouldRunValidation = !rangeRequest && (crc32c || md5);
966 if (rangeRequest) {
967 if (typeof options.validation === 'string' ||
968 options.validation === true) {
969 throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE);
970 }
971 // Range requests can't receive data integrity checks.
972 crc32c = false;
973 md5 = false;
974 }
975 // Authenticate the request, then pipe the remote API request to the stream
976 // returned to the user.
977 const makeRequest = () => {
978 const query = { alt: 'media' };
979 if (this.generation) {
980 query.generation = this.generation;
981 }
982 if (options.userProject) {
983 query.userProject = options.userProject;
984 }
985 const headers = {
986 'Accept-Encoding': 'gzip',
987 'Cache-Control': 'no-store',
988 };
989 if (rangeRequest) {
990 const start = typeof options.start === 'number' ? options.start : '0';
991 const end = typeof options.end === 'number' ? options.end : '';
992 headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`;
993 }
994 const reqOpts = {
995 forever: false,
996 uri: '',
997 headers,
998 qs: query,
999 };
1000 const hashes = {};
1001 this.requestStream(reqOpts)
1002 .on('error', err => {
1003 throughStream.destroy(err);
1004 })
1005 .on('response', res => {
1006 throughStream.emit('response', res);
1007 nodejs_common_1.util.handleResp(null, res, null, onResponse);
1008 })
1009 .resume();
1010 // We listen to the response event from the request stream so that we
1011 // can...
1012 //
1013 // 1) Intercept any data from going to the user if an error occurred.
1014 // 2) Calculate the hashes from the http.IncomingMessage response
1015 // stream,
1016 // which will return the bytes from the source without decompressing
1017 // gzip'd content. We then send it through decompressed, if
1018 // applicable, to the user.
1019 const onResponse = (err, _body, rawResponseStream) => {
1020 if (err) {
1021 // Get error message from the body.
1022 this.getBufferFromReadable(rawResponseStream).then(body => {
1023 err.message = body.toString('utf8');
1024 throughStream.destroy(err);
1025 });
1026 return;
1027 }
1028 rawResponseStream.on('error', onComplete);
1029 const headers = rawResponseStream.toJSON().headers;
1030 isCompressed = headers['content-encoding'] === 'gzip';
1031 // The object is safe to validate if:
1032 // 1. It was stored gzip and returned to us gzip OR
1033 // 2. It was never stored as gzip
1034 safeToValidate =
1035 (headers['x-goog-stored-content-encoding'] === 'gzip' &&
1036 isCompressed) ||
1037 headers['x-goog-stored-content-encoding'] === 'identity';
1038 const transformStreams = [];
1039 if (shouldRunValidation) {
1040 // The x-goog-hash header should be set with a crc32c and md5 hash.
1041 // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx'
1042 if (typeof headers['x-goog-hash'] === 'string') {
1043 headers['x-goog-hash']
1044 .split(',')
1045 .forEach((hashKeyValPair) => {
1046 const delimiterIndex = hashKeyValPair.indexOf('=');
1047 const hashType = hashKeyValPair.substr(0, delimiterIndex);
1048 const hashValue = hashKeyValPair.substr(delimiterIndex + 1);
1049 hashes[hashType] = hashValue;
1050 });
1051 }
1052 validateStream = new hash_stream_validator_1.HashStreamValidator({
1053 crc32c,
1054 md5,
1055 crc32cGenerator: this.crc32cGenerator,
1056 });
1057 transformStreams.push(validateStream);
1058 }
1059 if (isCompressed && options.decompress) {
1060 transformStreams.push(zlib.createGunzip());
1061 }
1062 const handoffStream = new stream_1.PassThrough({
1063 final: cb => {
1064 // Preserving `onComplete`'s ability to
1065 // destroy `throughStream` before pipeline
1066 // attempts to.
1067 onComplete(null)
1068 .then(() => {
1069 cb();
1070 })
1071 .catch(cb);
1072 },
1073 });
1074 (0, stream_1.pipeline)(rawResponseStream, ...transformStreams, handoffStream, throughStream, onComplete);
1075 };
1076 // This is hooked to the `complete` event from the request stream. This is
1077 // our chance to validate the data and let the user know if anything went
1078 // wrong.
1079 let onCompleteCalled = false;
1080 const onComplete = async (err) => {
1081 if (onCompleteCalled) {
1082 return;
1083 }
1084 onCompleteCalled = true;
1085 if (err) {
1086 throughStream.destroy(err);
1087 return;
1088 }
1089 if (rangeRequest || !shouldRunValidation) {
1090 return;
1091 }
1092 // If we're doing validation, assume the worst-- a data integrity
1093 // mismatch. If not, these tests won't be performed, and we can assume
1094 // the best.
1095 // We must check if the server decompressed the data on serve because hash
1096 // validation is not possible in this case.
1097 let failed = (crc32c || md5) && safeToValidate;
1098 if (validateStream && safeToValidate) {
1099 if (crc32c && hashes.crc32c) {
1100 failed = !validateStream.test('crc32c', hashes.crc32c);
1101 }
1102 if (md5 && hashes.md5) {
1103 failed = !validateStream.test('md5', hashes.md5);
1104 }
1105 }
1106 if (md5 && !hashes.md5) {
1107 const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE);
1108 hashError.code = 'MD5_NOT_AVAILABLE';
1109 throughStream.destroy(hashError);
1110 }
1111 else if (failed) {
1112 const mismatchError = new RequestError(FileExceptionMessages.DOWNLOAD_MISMATCH);
1113 mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH';
1114 throughStream.destroy(mismatchError);
1115 }
1116 else {
1117 return;
1118 }
1119 };
1120 };
1121 throughStream.on('reading', makeRequest);
1122 return throughStream;
1123 }
1124 /**
1125 * @callback CreateResumableUploadCallback
1126 * @param {?Error} err Request error, if any.
1127 * @param {string} uri The resumable upload's unique session URI.
1128 */
1129 /**
1130 * @typedef {array} CreateResumableUploadResponse
1131 * @property {string} 0 The resumable upload's unique session URI.
1132 */
1133 /**
1134 * @typedef {object} CreateResumableUploadOptions
1135 * @property {object} [metadata] Metadata to set on the file.
1136 * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload.
1137 * @property {string} [origin] Origin header to set for the upload.
1138 * @property {string} [predefinedAcl] Apply a predefined set of access
1139 * controls to this object.
1140 *
1141 * Acceptable values are:
1142 * - **`authenticatedRead`** - Object owner gets `OWNER` access, and
1143 * `allAuthenticatedUsers` get `READER` access.
1144 *
1145 * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and
1146 * project team owners get `OWNER` access.
1147 *
1148 * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project
1149 * team owners get `READER` access.
1150 *
1151 * - **`private`** - Object owner gets `OWNER` access.
1152 *
1153 * - **`projectPrivate`** - Object owner gets `OWNER` access, and project
1154 * team members get access according to their roles.
1155 *
1156 * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`
1157 * get `READER` access.
1158 * @property {boolean} [private] Make the uploaded file private. (Alias for
1159 * `options.predefinedAcl = 'private'`)
1160 * @property {boolean} [public] Make the uploaded file public. (Alias for
1161 * `options.predefinedAcl = 'publicRead'`)
1162 * @property {string} [userProject] The ID of the project which will be
1163 * billed for the request.
1164 * @property {string} [chunkSize] Create a separate request per chunk. This
1165 * value is in bytes and should be a multiple of 256 KiB (2^18).
1166 * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.}
1167 */
1168 /**
1169 * Create a unique resumable upload session URI. This is the first step when
1170 * performing a resumable upload.
1171 *
1172 * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide}
1173 * for more on how the entire process works.
1174 *
1175 * <h4>Note</h4>
1176 *
1177 * If you are just looking to perform a resumable upload without worrying
1178 * about any of the details, see {@link File#createWriteStream}. Resumable
1179 * uploads are performed by default.
1180 *
1181 * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide}
1182 *
1183 * @param {CreateResumableUploadOptions} [options] Configuration options.
1184 * @param {CreateResumableUploadCallback} [callback] Callback function.
1185 * @returns {Promise<CreateResumableUploadResponse>}
1186 *
1187 * @example
1188 * ```
1189 * const {Storage} = require('@google-cloud/storage');
1190 * const storage = new Storage();
1191 * const myBucket = storage.bucket('my-bucket');
1192 *
1193 * const file = myBucket.file('my-file');
1194 * file.createResumableUpload(function(err, uri) {
1195 * if (!err) {
1196 * // `uri` can be used to PUT data to.
1197 * }
1198 * });
1199 *
1200 * //-
1201 * // If the callback is omitted, we'll return a Promise.
1202 * //-
1203 * file.createResumableUpload().then(function(data) {
1204 * const uri = data[0];
1205 * });
1206 * ```
1207 */
1208 createResumableUpload(optionsOrCallback, callback) {
1209 var _a, _b;
1210 const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};
1211 callback =
1212 typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;
1213 const retryOptions = this.storage.retryOptions;
1214 if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined &&
1215 ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined &&
1216 this.storage.retryOptions.idempotencyStrategy ===
1217 storage_1.IdempotencyStrategy.RetryConditional) ||
1218 this.storage.retryOptions.idempotencyStrategy ===
1219 storage_1.IdempotencyStrategy.RetryNever) {
1220 retryOptions.autoRetry = false;
1221 }
1222 resumableUpload.createURI({
1223 authClient: this.storage.authClient,
1224 apiEndpoint: this.storage.apiEndpoint,
1225 bucket: this.bucket.name,
1226 customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}),
1227 file: this.name,
1228 generation: this.generation,
1229 key: this.encryptionKey,
1230 kmsKeyName: this.kmsKeyName,
1231 metadata: options.metadata,
1232 offset: options.offset,
1233 origin: options.origin,
1234 predefinedAcl: options.predefinedAcl,
1235 private: options.private,
1236 public: options.public,
1237 userProject: options.userProject || this.userProject,
1238 retryOptions: retryOptions,
1239 params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts,
1240 }, callback);
1241 this.storage.retryOptions.autoRetry = this.instanceRetryValue;
1242 }
1243 /**
1244 * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream().
1245 * @property {string} [contentType] Alias for
1246 * `options.metadata.contentType`. If set to `auto`, the file name is used
1247 * to determine the contentType.
1248 * @property {string|boolean} [gzip] If true, automatically gzip the file.
1249 * If set to `auto`, the contentType is used to determine if the file
1250 * should be gzipped. This will set `options.metadata.contentEncoding` to
1251 * `gzip` if necessary.
1252 * @property {object} [metadata] See the examples below or
1253 * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}
1254 * for more details.
1255 * @property {number} [offset] The starting byte of the upload stream, for
1256 * resuming an interrupted upload. Defaults to 0.
1257 * @property {string} [predefinedAcl] Apply a predefined set of access
1258 * controls to this object.
1259 *
1260 * Acceptable values are:
1261 * - **`authenticatedRead`** - Object owner gets `OWNER` access, and
1262 * `allAuthenticatedUsers` get `READER` access.
1263 *
1264 * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and
1265 * project team owners get `OWNER` access.
1266 *
1267 * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project
1268 * team owners get `READER` access.
1269 *
1270 * - **`private`** - Object owner gets `OWNER` access.
1271 *
1272 * - **`projectPrivate`** - Object owner gets `OWNER` access, and project
1273 * team members get access according to their roles.
1274 *
1275 * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`
1276 * get `READER` access.
1277 * @property {boolean} [private] Make the uploaded file private. (Alias for
1278 * `options.predefinedAcl = 'private'`)
1279 * @property {boolean} [public] Make the uploaded file public. (Alias for
1280 * `options.predefinedAcl = 'publicRead'`)
1281 * @property {boolean} [resumable] Force a resumable upload. NOTE: When
1282 * working with streams, the file format and size is unknown until it's
1283 * completely consumed. Because of this, it's best for you to be explicit
1284 * for what makes sense given your input.
1285 * @property {number} [timeout=60000] Set the HTTP request timeout in
1286 * milliseconds. This option is not available for resumable uploads.
1287 * Default: `60000`
1288 * @property {string} [uri] The URI for an already-created resumable
1289 * upload. See {@link File#createResumableUpload}.
1290 * @property {string} [userProject] The ID of the project which will be
1291 * billed for the request.
1292 * @property {string|boolean} [validation] Possible values: `"md5"`,
1293 * `"crc32c"`, or `false`. By default, data integrity is validated with a
1294 * CRC32c checksum. You may use MD5 if preferred, but that hash is not
1295 * supported for composite objects. An error will be raised if MD5 is
1296 * specified but is not available. You may also choose to skip validation
1297 * completely, however this is **not recommended**. In addition to specifying
1298 * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will
1299 * cause the server to perform validation in addition to client validation.
1300 * NOTE: Validation is automatically skipped for objects that were
1301 * uploaded using the `gzip` option and have already compressed content.
1302 */
1303 /**
1304 * Create a writable stream to overwrite the contents of the file in your
1305 * bucket.
1306 *
1307 * A File object can also be used to create files for the first time.
1308 *
1309 * Resumable uploads are automatically enabled and must be shut off explicitly
1310 * by setting `options.resumable` to `false`.
1311 *
1312 *
1313 * <p class="notice">
1314 * There is some overhead when using a resumable upload that can cause
1315 * noticeable performance degradation while uploading a series of small
1316 * files. When uploading files less than 10MB, it is recommended that the
1317 * resumable feature is disabled.
1318 * </p>
1319 *
1320 * NOTE: Writable streams will emit the `finish` event when the file is fully
1321 * uploaded.
1322 *
1323 * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload| Upload Options (Simple or Resumable)}
1324 * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation}
1325 *
1326 * @param {CreateWriteStreamOptions} [options] Configuration options.
1327 * @returns {WritableStream}
1328 *
1329 * @example
1330 * ```
1331 * const fs = require('fs');
1332 * const {Storage} = require('@google-cloud/storage');
1333 * const storage = new Storage();
1334 * const myBucket = storage.bucket('my-bucket');
1335 *
1336 * const file = myBucket.file('my-file');
1337 *
1338 * //-
1339 * // <h4>Uploading a File</h4>
1340 * //
1341 * // Now, consider a case where we want to upload a file to your bucket. You
1342 * // have the option of using {@link Bucket#upload}, but that is just
1343 * // a convenience method which will do the following.
1344 * //-
1345 * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')
1346 * .pipe(file.createWriteStream())
1347 * .on('error', function(err) {})
1348 * .on('finish', function() {
1349 * // The file upload is complete.
1350 * });
1351 *
1352 * //-
1353 * // <h4>Uploading a File with gzip compression</h4>
1354 * //-
1355 * fs.createReadStream('/Users/stephen/site/index.html')
1356 * .pipe(file.createWriteStream({ gzip: true }))
1357 * .on('error', function(err) {})
1358 * .on('finish', function() {
1359 * // The file upload is complete.
1360 * });
1361 *
1362 * //-
1363 * // Downloading the file with `createReadStream` will automatically decode
1364 * // the file.
1365 * //-
1366 *
1367 * //-
1368 * // <h4>Uploading a File with Metadata</h4>
1369 * //
1370 * // One last case you may run into is when you want to upload a file to your
1371 * // bucket and set its metadata at the same time. Like above, you can use
1372 * // {@link Bucket#upload} to do this, which is just a wrapper around
1373 * // the following.
1374 * //-
1375 * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')
1376 * .pipe(file.createWriteStream({
1377 * metadata: {
1378 * contentType: 'image/jpeg',
1379 * metadata: {
1380 * custom: 'metadata'
1381 * }
1382 * }
1383 * }))
1384 * .on('error', function(err) {})
1385 * .on('finish', function() {
1386 * // The file upload is complete.
1387 * });
1388 * ```
1389 */
1390 // eslint-disable-next-line @typescript-eslint/no-explicit-any
1391 createWriteStream(options = {}) {
1392 options = extend(true, { metadata: {} }, options);
1393 if (options.contentType) {
1394 options.metadata.contentType = options.contentType;
1395 }
1396 if (!options.metadata.contentType ||
1397 options.metadata.contentType === 'auto') {
1398 const detectedContentType = mime.getType(this.name);
1399 if (detectedContentType) {
1400 options.metadata.contentType = detectedContentType;
1401 }
1402 }
1403 let gzip = options.gzip;
1404 if (gzip === 'auto') {
1405 gzip = compressible(options.metadata.contentType);
1406 }
1407 if (gzip) {
1408 options.metadata.contentEncoding = 'gzip';
1409 }
1410 let crc32c = true;
1411 let md5 = false;
1412 if (typeof options.validation === 'string') {
1413 options.validation = options.validation.toLowerCase();
1414 crc32c = options.validation === 'crc32c';
1415 md5 = options.validation === 'md5';
1416 }
1417 else if (options.validation === false) {
1418 crc32c = false;
1419 }
1420 /**
1421 * A callback for determining when the underlying pipeline is complete.
1422 * It's possible the pipeline callback could error before the write stream
1423 * calls `final` so by default this will destroy the write stream unless the
1424 * write stream sets this callback via its `final` handler.
1425 * @param error An optional error
1426 */
1427 let pipelineCallback = error => {
1428 writeStream.destroy(error || undefined);
1429 };
1430 // A stream for consumer to write to
1431 const writeStream = new stream_1.Writable({
1432 final(cb) {
1433 // Set the pipeline callback to this callback so the pipeline's results
1434 // can be populated to the consumer
1435 pipelineCallback = cb;
1436 emitStream.end();
1437 },
1438 write(chunk, encoding, cb) {
1439 emitStream.write(chunk, encoding, cb);
1440 },
1441 });
1442 const emitStream = new util_1.PassThroughShim();
1443 const hashCalculatingStream = new hash_stream_validator_1.HashStreamValidator({
1444 crc32c,
1445 md5,
1446 crc32cGenerator: this.crc32cGenerator,
1447 });
1448 const fileWriteStream = duplexify();
1449 let fileWriteStreamMetadataReceived = false;
1450 // Handing off emitted events to users
1451 emitStream.on('reading', () => writeStream.emit('reading'));
1452 emitStream.on('writing', () => writeStream.emit('writing'));
1453 fileWriteStream.on('progress', evt => writeStream.emit('progress', evt));
1454 fileWriteStream.on('response', resp => writeStream.emit('response', resp));
1455 fileWriteStream.once('metadata', () => {
1456 fileWriteStreamMetadataReceived = true;
1457 });
1458 writeStream.on('writing', () => {
1459 if (options.resumable === false) {
1460 this.startSimpleUpload_(fileWriteStream, options);
1461 }
1462 else {
1463 this.startResumableUpload_(fileWriteStream, options);
1464 }
1465 (0, stream_1.pipeline)(emitStream, gzip ? zlib.createGzip() : new stream_1.PassThrough(), hashCalculatingStream, fileWriteStream, async (e) => {
1466 if (e) {
1467 return pipelineCallback(e);
1468 }
1469 // We want to make sure we've received the metadata from the server in order
1470 // to properly validate the object's integrity. Depending on the type of upload,
1471 // the stream could close before the response is returned.
1472 if (!fileWriteStreamMetadataReceived) {
1473 try {
1474 await new Promise((resolve, reject) => {
1475 fileWriteStream.once('metadata', resolve);
1476 fileWriteStream.once('error', reject);
1477 });
1478 }
1479 catch (e) {
1480 return pipelineCallback(e);
1481 }
1482 }
1483 try {
1484 await __classPrivateFieldGet(this, _File_instances, "m", _File_validateIntegrity).call(this, hashCalculatingStream, { crc32c, md5 });
1485 pipelineCallback();
1486 }
1487 catch (e) {
1488 pipelineCallback(e);
1489 }
1490 });
1491 });
1492 return writeStream;
1493 }
1494 delete(optionsOrCallback, cb) {
1495 const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};
1496 cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb;
1497 this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, bucket_1.AvailableServiceObjectMethods.delete, options);
1498 super
1499 .delete(options)
1500 .then(resp => cb(null, ...resp))
1501 .catch(cb)
1502 .finally(() => {
1503 this.storage.retryOptions.autoRetry = this.instanceRetryValue;
1504 });
1505 }
1506 /**
1507 * @typedef {array} DownloadResponse
1508 * @property [0] The contents of a File.
1509 */
1510 /**
1511 * @callback DownloadCallback
1512 * @param err Request error, if any.
1513 * @param contents The contents of a File.
1514 */
1515 /**
1516 * Convenience method to download a file into memory or to a local
1517 * destination.
1518 *
1519 * @param {object} [options] Configuration options. The arguments match those
1520 * passed to {@link File#createReadStream}.
1521 * @param {string} [options.destination] Local file path to write the file's
1522 * contents to.
1523 * @param {string} [options.userProject] The ID of the project which will be
1524 * billed for the request.
1525 * @param {DownloadCallback} [callback] Callback function.
1526 * @returns {Promise<DownloadResponse>}
1527 *
1528 * @example
1529 * ```
1530 * const {Storage} = require('@google-cloud/storage');
1531 * const storage = new Storage();
1532 * const myBucket = storage.bucket('my-bucket');
1533 *
1534 * const file = myBucket.file('my-file');
1535 *
1536 * //-
1537 * // Download a file into memory. The contents will be available as the
1538 * second
1539 * // argument in the demonstration below, `contents`.
1540 * //-
1541 * file.download(function(err, contents) {});
1542 *
1543 * //-
1544 * // Download a file to a local destination.
1545 * //-
1546 * file.download({
1547 * destination: '/Users/me/Desktop/file-backup.txt'
1548 * }, function(err) {});
1549 *
1550 * //-
1551 * // If the callback is omitted, we'll return a Promise.
1552 * //-
1553 * file.download().then(function(data) {
1554 * const contents = data[0];
1555 * });
1556 *
1557 * ```
1558 * @example <caption>include:samples/files.js</caption>
1559 * region_tag:storage_download_file
1560 * Another example:
1561 *
1562 * @example <caption>include:samples/encryption.js</caption>
1563 * region_tag:storage_download_encrypted_file
1564 * Example of downloading an encrypted file:
1565 *
1566 * @example <caption>include:samples/requesterPays.js</caption>
1567 * region_tag:storage_download_file_requester_pays
1568 * Example of downloading a file where the requester pays:
1569 */
1570 download(optionsOrCallback, cb) {
1571 let options;
1572 if (typeof optionsOrCallback === 'function') {
1573 cb = optionsOrCallback;
1574 options = {};
1575 }
1576 else {
1577 options = optionsOrCallback;
1578 }
1579 let called = false;
1580 const callback = ((...args) => {
1581 if (!called)
1582 cb(...args);
1583 called = true;
1584 });
1585 const destination = options.destination;
1586 delete options.destination;
1587 const fileStream = this.createReadStream(options);
1588 let receivedData = false;
1589 if (destination) {
1590 fileStream
1591 .on('error', callback)
1592 .once('data', data => {
1593 // We know that the file exists the server - now we can truncate/write to a file
1594 receivedData = true;
1595 const writable = fs.createWriteStream(destination);
1596 writable.write(data);
1597 fileStream
1598 .pipe(writable)
1599 .on('error', callback)
1600 .on('finish', callback);
1601 })
1602 .on('end', () => {
1603 // In the case of an empty file no data will be received before the end event fires
1604 if (!receivedData) {
1605 fs.openSync(destination, 'w');
1606 callback(null, Buffer.alloc(0));
1607 }
1608 });
1609 }
1610 else {
1611 this.getBufferFromReadable(fileStream)
1612 .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents))
1613 .catch(callback);
1614 }
1615 }
1616 /**
1617 * The Storage API allows you to use a custom key for server-side encryption.
1618 *
1619 * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}
1620 *
1621 * @param {string|buffer} encryptionKey An AES-256 encryption key.
1622 * @returns {File}
1623 *
1624 * @example
1625 * ```
1626 * const crypto = require('crypto');
1627 * const {Storage} = require('@google-cloud/storage');
1628 * const storage = new Storage();
1629 * const myBucket = storage.bucket('my-bucket');
1630 *
1631 * const encryptionKey = crypto.randomBytes(32);
1632 *
1633 * const fileWithCustomEncryption = myBucket.file('my-file');
1634 * fileWithCustomEncryption.setEncryptionKey(encryptionKey);
1635 *
1636 * const fileWithoutCustomEncryption = myBucket.file('my-file');
1637 *
1638 * fileWithCustomEncryption.save('data', function(err) {
1639 * // Try to download with the File object that hasn't had
1640 * // `setEncryptionKey()` called:
1641 * fileWithoutCustomEncryption.download(function(err) {
1642 * // We will receive an error:
1643 * // err.message === 'Bad Request'
1644 *
1645 * // Try again with the File object we called `setEncryptionKey()` on:
1646 * fileWithCustomEncryption.download(function(err, contents) {
1647 * // contents.toString() === 'data'
1648 * });
1649 * });
1650 * });
1651 *
1652 * ```
1653 * @example <caption>include:samples/encryption.js</caption>
1654 * region_tag:storage_upload_encrypted_file
1655 * Example of uploading an encrypted file:
1656 *
1657 * @example <caption>include:samples/encryption.js</caption>
1658 * region_tag:storage_download_encrypted_file
1659 * Example of downloading an encrypted file:
1660 */
1661 setEncryptionKey(encryptionKey) {
1662 this.encryptionKey = encryptionKey;
1663 this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64');
1664 this.encryptionKeyHash = crypto
1665 .createHash('sha256')
1666 // eslint-disable-next-line @typescript-eslint/no-explicit-any
1667 .update(this.encryptionKeyBase64, 'base64')
1668 .digest('base64');
1669 this.encryptionKeyInterceptor = {
1670 request: reqOpts => {
1671 reqOpts.headers = reqOpts.headers || {};
1672 reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256';
1673 reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64;
1674 reqOpts.headers['x-goog-encryption-key-sha256'] =
1675 this.encryptionKeyHash;
1676 return reqOpts;
1677 },
1678 };
1679 this.interceptors.push(this.encryptionKeyInterceptor);
1680 return this;
1681 }
1682 /**
1683 * @typedef {array} GetExpirationDateResponse
1684 * @property {date} 0 A Date object representing the earliest time this file's
1685 * retention policy will expire.
1686 */
1687 /**
1688 * @callback GetExpirationDateCallback
1689 * @param {?Error} err Request error, if any.
1690 * @param {date} expirationDate A Date object representing the earliest time
1691 * this file's retention policy will expire.
1692 */
1693 /**
1694 * If this bucket has a retention policy defined, use this method to get a
1695 * Date object representing the earliest time this file will expire.
1696 *
1697 * @param {GetExpirationDateCallback} [callback] Callback function.
1698 * @returns {Promise<GetExpirationDateResponse>}
1699 *
1700 * @example
1701 * ```
1702 * const storage = require('@google-cloud/storage')();
1703 * const myBucket = storage.bucket('my-bucket');
1704 *
1705 * const file = myBucket.file('my-file');
1706 *
1707 * file.getExpirationDate(function(err, expirationDate) {
1708 * // expirationDate is a Date object.
1709 * });
1710 * ```
1711 */
1712 getExpirationDate(callback) {
1713 this.getMetadata((err, metadata, apiResponse) => {
1714 if (err) {
1715 callback(err, null, apiResponse);
1716 return;
1717 }
1718 if (!metadata.retentionExpirationTime) {
1719 const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA);
1720 callback(error, null, apiResponse);
1721 return;
1722 }
1723 callback(null, new Date(metadata.retentionExpirationTime), apiResponse);
1724 });
1725 }
1726 /**
1727 * @typedef {array} GenerateSignedPostPolicyV2Response
1728 * @property {object} 0 The document policy.
1729 */
1730 /**
1731 * @callback GenerateSignedPostPolicyV2Callback
1732 * @param {?Error} err Request error, if any.
1733 * @param {object} policy The document policy.
1734 */
1735 /**
1736 * Get a signed policy document to allow a user to upload data with a POST
1737 * request.
1738 *
1739 * In Google Cloud Platform environments, such as Cloud Functions and App
1740 * Engine, you usually don't provide a `keyFilename` or `credentials` during
1741 * instantiation. In those environments, we call the
1742 * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}
1743 * to create a signed policy. That API requires either the
1744 * `https://www.googleapis.com/auth/iam` or
1745 * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are
1746 * enabled.
1747 *
1748 * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process}
1749 *
1750 * @throws {Error} If an expiration timestamp from the past is given.
1751 * @throws {Error} If options.equals has an array with less or more than two
1752 * members.
1753 * @throws {Error} If options.startsWith has an array with less or more than two
1754 * members.
1755 *
1756 * @param {object} options Configuration options.
1757 * @param {array|array[]} [options.equals] Array of request parameters and
1758 * their expected value (e.g. [['$<field>', '<value>']]). Values are
1759 * translated into equality constraints in the conditions field of the
1760 * policy document (e.g. ['eq', '$<field>', '<value>']). If only one
1761 * equality condition is to be specified, options.equals can be a one-
1762 * dimensional array (e.g. ['$<field>', '<value>']).
1763 * @param {*} options.expires - A timestamp when this policy will expire. Any
1764 * value given is passed to `new Date()`.
1765 * @param {array|array[]} [options.startsWith] Array of request parameters and
1766 * their expected prefixes (e.g. [['$<field>', '<value>']). Values are
1767 * translated into starts-with constraints in the conditions field of the
1768 * policy document (e.g. ['starts-with', '$<field>', '<value>']). If only
1769 * one prefix condition is to be specified, options.startsWith can be a
1770 * one- dimensional array (e.g. ['$<field>', '<value>']).
1771 * @param {string} [options.acl] ACL for the object from possibly predefined
1772 * ACLs.
1773 * @param {string} [options.successRedirect] The URL to which the user client
1774 * is redirected if the upload is successful.
1775 * @param {string} [options.successStatus] - The status of the Google Storage
1776 * response if the upload is successful (must be string).
1777 * @param {object} [options.contentLengthRange]
1778 * @param {number} [options.contentLengthRange.min] Minimum value for the
1779 * request's content length.
1780 * @param {number} [options.contentLengthRange.max] Maximum value for the
1781 * request's content length.
1782 * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function.
1783 * @returns {Promise<GenerateSignedPostPolicyV2Response>}
1784 *
1785 * @example
1786 * ```
1787 * const {Storage} = require('@google-cloud/storage');
1788 * const storage = new Storage();
1789 * const myBucket = storage.bucket('my-bucket');
1790 *
1791 * const file = myBucket.file('my-file');
1792 * const options = {
1793 * equals: ['$Content-Type', 'image/jpeg'],
1794 * expires: '10-25-2022',
1795 * contentLengthRange: {
1796 * min: 0,
1797 * max: 1024
1798 * }
1799 * };
1800 *
1801 * file.generateSignedPostPolicyV2(options, function(err, policy) {
1802 * // policy.string: the policy document in plain text.
1803 * // policy.base64: the policy document in base64.
1804 * // policy.signature: the policy signature in base64.
1805 * });
1806 *
1807 * //-
1808 * // If the callback is omitted, we'll return a Promise.
1809 * //-
1810 * file.generateSignedPostPolicyV2(options).then(function(data) {
1811 * const policy = data[0];
1812 * });
1813 * ```
1814 */
1815 generateSignedPostPolicyV2(optionsOrCallback, cb) {
1816 const args = (0, util_1.normalize)(optionsOrCallback, cb);
1817 let options = args.options;
1818 const callback = args.callback;
1819 const expires = new Date(options.expires);
1820 if (isNaN(expires.getTime())) {
1821 throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID);
1822 }
1823 if (expires.valueOf() < Date.now()) {
1824 throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST);
1825 }
1826 options = Object.assign({}, options);
1827 const conditions = [
1828 ['eq', '$key', this.name],
1829 {
1830 bucket: this.bucket.name,
1831 },
1832 ];
1833 if (Array.isArray(options.equals)) {
1834 if (!Array.isArray(options.equals[0])) {
1835 options.equals = [options.equals];
1836 }
1837 options.equals.forEach(condition => {
1838 if (!Array.isArray(condition) || condition.length !== 2) {
1839 throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS);
1840 }
1841 conditions.push(['eq', condition[0], condition[1]]);
1842 });
1843 }
1844 if (Array.isArray(options.startsWith)) {
1845 if (!Array.isArray(options.startsWith[0])) {
1846 options.startsWith = [options.startsWith];
1847 }
1848 options.startsWith.forEach(condition => {
1849 if (!Array.isArray(condition) || condition.length !== 2) {
1850 throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS);
1851 }
1852 conditions.push(['starts-with', condition[0], condition[1]]);
1853 });
1854 }
1855 if (options.acl) {
1856 conditions.push({
1857 acl: options.acl,
1858 });
1859 }
1860 if (options.successRedirect) {
1861 conditions.push({
1862 success_action_redirect: options.successRedirect,
1863 });
1864 }
1865 if (options.successStatus) {
1866 conditions.push({
1867 success_action_status: options.successStatus,
1868 });
1869 }
1870 if (options.contentLengthRange) {
1871 const min = options.contentLengthRange.min;
1872 const max = options.contentLengthRange.max;
1873 if (typeof min !== 'number' || typeof max !== 'number') {
1874 throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX);
1875 }
1876 conditions.push(['content-length-range', min, max]);
1877 }
1878 const policy = {
1879 expiration: expires.toISOString(),
1880 conditions,
1881 };
1882 const policyString = JSON.stringify(policy);
1883 const policyBase64 = Buffer.from(policyString).toString('base64');
1884 this.storage.authClient.sign(policyBase64).then(signature => {
1885 callback(null, {
1886 string: policyString,
1887 base64: policyBase64,
1888 signature,
1889 });
1890 }, err => {
1891 callback(new signer_1.SigningError(err.message));
1892 });
1893 }
1894 /**
1895 * @typedef {object} SignedPostPolicyV4Output
1896 * @property {string} url The request URL.
1897 * @property {object} fields The form fields to include in the POST request.
1898 */
1899 /**
1900 * @typedef {array} GenerateSignedPostPolicyV4Response
1901 * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields.
1902 */
1903 /**
1904 * @callback GenerateSignedPostPolicyV4Callback
1905 * @param {?Error} err Request error, if any.
1906 * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields.
1907 */
1908 /**
1909 * Get a v4 signed policy document to allow a user to upload data with a POST
1910 * request.
1911 *
1912 * In Google Cloud Platform environments, such as Cloud Functions and App
1913 * Engine, you usually don't provide a `keyFilename` or `credentials` during
1914 * instantiation. In those environments, we call the
1915 * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}
1916 * to create a signed policy. That API requires either the
1917 * `https://www.googleapis.com/auth/iam` or
1918 * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are
1919 * enabled.
1920 *
1921 * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference}
1922 *
1923 * @param {object} options Configuration options.
1924 * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any
1925 * value given is passed to `new Date()`.
1926 * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style
1927 * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style
1928 * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs
1929 * should generally be preferred instead of path-style URL.
1930 * Currently defaults to `false` for path-style, although this may change in a
1931 * future major-version release.
1932 * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in
1933 * the result, e.g. "https://cdn.example.com".
1934 * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument}
1935 * to include in the signed policy. Any fields with key beginning with 'x-ignore-'
1936 * will not be included in the policy to be signed.
1937 * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document}
1938 * to include in the signed policy. All fields given in `config.fields` are
1939 * automatically included in the conditions array, adding the same entry
1940 * in both `fields` and `conditions` will result in duplicate entries.
1941 *
1942 * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function.
1943 * @returns {Promise<GenerateSignedPostPolicyV4Response>}
1944 *
1945 * @example
1946 * ```
1947 * const {Storage} = require('@google-cloud/storage');
1948 * const storage = new Storage();
1949 * const myBucket = storage.bucket('my-bucket');
1950 *
1951 * const file = myBucket.file('my-file');
1952 * const options = {
1953 * expires: '10-25-2022',
1954 * conditions: [
1955 * ['eq', '$Content-Type', 'image/jpeg'],
1956 * ['content-length-range', 0, 1024],
1957 * ],
1958 * fields: {
1959 * acl: 'public-read',
1960 * 'x-goog-meta-foo': 'bar',
1961 * 'x-ignore-mykey': 'data'
1962 * }
1963 * };
1964 *
1965 * file.generateSignedPostPolicyV4(options, function(err, response) {
1966 * // response.url The request URL
1967 * // response.fields The form fields (including the signature) to include
1968 * // to be used to upload objects by HTML forms.
1969 * });
1970 *
1971 * //-
1972 * // If the callback is omitted, we'll return a Promise.
1973 * //-
1974 * file.generateSignedPostPolicyV4(options).then(function(data) {
1975 * const response = data[0];
1976 * // response.url The request URL
1977 * // response.fields The form fields (including the signature) to include
1978 * // to be used to upload objects by HTML forms.
1979 * });
1980 * ```
1981 */
1982 generateSignedPostPolicyV4(optionsOrCallback, cb) {
1983 const args = (0, util_1.normalize)(optionsOrCallback, cb);
1984 let options = args.options;
1985 const callback = args.callback;
1986 const expires = new Date(options.expires);
1987 if (isNaN(expires.getTime())) {
1988 throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_INVALID);
1989 }
1990 if (expires.valueOf() < Date.now()) {
1991 throw new Error(storage_1.ExceptionMessages.EXPIRATION_DATE_PAST);
1992 }
1993 if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) {
1994 throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`);
1995 }
1996 options = Object.assign({}, options);
1997 let fields = Object.assign({}, options.fields);
1998 const now = new Date();
1999 const nowISO = (0, util_1.formatAsUTCISO)(now, true);
2000 const todayISO = (0, util_1.formatAsUTCISO)(now);
2001 const sign = async () => {
2002 const { client_email } = await this.storage.authClient.getCredentials();
2003 const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`;
2004 fields = {
2005 ...fields,
2006 bucket: this.bucket.name,
2007 key: this.name,
2008 'x-goog-date': nowISO,
2009 'x-goog-credential': credential,
2010 'x-goog-algorithm': 'GOOG4-RSA-SHA256',
2011 };
2012 const conditions = options.conditions || [];
2013 Object.entries(fields).forEach(([key, value]) => {
2014 if (!key.startsWith('x-ignore-')) {
2015 conditions.push({ [key]: value });
2016 }
2017 });
2018 delete fields.bucket;
2019 const expiration = (0, util_1.formatAsUTCISO)(expires, true, '-', ':');
2020 const policy = {
2021 conditions,
2022 expiration,
2023 };
2024 const policyString = (0, util_1.unicodeJSONStringify)(policy);
2025 const policyBase64 = Buffer.from(policyString).toString('base64');
2026 try {
2027 const signature = await this.storage.authClient.sign(policyBase64);
2028 const signatureHex = Buffer.from(signature, 'base64').toString('hex');
2029 fields['policy'] = policyBase64;
2030 fields['x-goog-signature'] = signatureHex;
2031 let url;
2032 if (options.virtualHostedStyle) {
2033 url = `https://${this.bucket.name}.storage.googleapis.com/`;
2034 }
2035 else if (options.bucketBoundHostname) {
2036 url = `${options.bucketBoundHostname}/`;
2037 }
2038 else {
2039 url = `${exports.STORAGE_POST_POLICY_BASE_URL}/${this.bucket.name}/`;
2040 }
2041 return {
2042 url,
2043 fields,
2044 };
2045 }
2046 catch (err) {
2047 throw new signer_1.SigningError(err.message);
2048 }
2049 };
2050 sign().then(res => callback(null, res), callback);
2051 }
2052 /**
2053 * @typedef {array} GetSignedUrlResponse
2054 * @property {object} 0 The signed URL.
2055 */
2056 /**
2057 * @callback GetSignedUrlCallback
2058 * @param {?Error} err Request error, if any.
2059 * @param {object} url The signed URL.
2060 */
2061 /**
2062 * Get a signed URL to allow limited time access to the file.
2063 *
2064 * In Google Cloud Platform environments, such as Cloud Functions and App
2065 * Engine, you usually don't provide a `keyFilename` or `credentials` during
2066 * instantiation. In those environments, we call the
2067 * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API}
2068 * to create a signed URL. That API requires either the
2069 * `https://www.googleapis.com/auth/iam` or
2070 * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are
2071 * enabled.
2072 *
2073 * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference}
2074 *
2075 * @throws {Error} if an expiration timestamp from the past is given.
2076 *
2077 * @param {object} config Configuration object.
2078 * @param {string} config.action "read" (HTTP: GET), "write" (HTTP: PUT), or
2079 * "delete" (HTTP: DELETE), "resumable" (HTTP: POST).
2080 * When using "resumable", the header `X-Goog-Resumable: start` has
2081 * to be sent when making a request with the signed URL.
2082 * @param {*} config.expires A timestamp when this link will expire. Any value
2083 * given is passed to `new Date()`.
2084 * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now.
2085 * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example}
2086 * @param {string} [config.version='v2'] The signing version to use, either
2087 * 'v2' or 'v4'.
2088 * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style
2089 * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style
2090 * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs
2091 * should generally be preferred instaed of path-style URL.
2092 * Currently defaults to `false` for path-style, although this may change in a
2093 * future major-version release.
2094 * @param {string} [config.cname] The cname for this bucket, i.e.,
2095 * "https://cdn.example.com".
2096 * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like
2097 * if you provide this, the client must provide this HTTP header with this same
2098 * value in its request, so to if this parameter is not provided here,
2099 * the client must not provide any value for this HTTP header in its request.
2100 * @param {string} [config.contentType] Just like if you provide this, the client
2101 * must provide this HTTP header with this same value in its request, so to if
2102 * this parameter is not provided here, the client must not provide any value
2103 * for this HTTP header in its request.
2104 * @param {object} [config.extensionHeaders] If these headers are used, the
2105 * server will check to make sure that the client provides matching
2106 * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers}
2107 * for the requirements of this feature, most notably:
2108 * - The header name must be prefixed with `x-goog-`
2109 * - The header name must be all lowercase
2110 *
2111 * Note: Multi-valued header passed as an array in the extensionHeaders
2112 * object is converted into a string, delimited by `,` with
2113 * no space. Requests made using the signed URL will need to
2114 * delimit multi-valued headers using a single `,` as well, or
2115 * else the server will report a mismatched signature.
2116 * @param {object} [config.queryParams] Additional query parameters to include
2117 * in the signed URL.
2118 * @param {string} [config.promptSaveAs] The filename to prompt the user to
2119 * save the file as when the signed url is accessed. This is ignored if
2120 * `config.responseDisposition` is set.
2121 * @param {string} [config.responseDisposition] The
2122 * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the
2123 * signed url.
2124 * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value
2125 * given is passed to `new Date()`.
2126 * Note: Use for 'v4' only.
2127 * @param {string} [config.responseType] The response-content-type parameter
2128 * of the signed url.
2129 * @param {GetSignedUrlCallback} [callback] Callback function.
2130 * @returns {Promise<GetSignedUrlResponse>}
2131 *
2132 * @example
2133 * ```
2134 * const {Storage} = require('@google-cloud/storage');
2135 * const storage = new Storage();
2136 * const myBucket = storage.bucket('my-bucket');
2137 *
2138 * const file = myBucket.file('my-file');
2139 *
2140 * //-
2141 * // Generate a URL that allows temporary access to download your file.
2142 * //-
2143 * const request = require('request');
2144 *
2145 * const config = {
2146 * action: 'read',
2147 * expires: '03-17-2025',
2148 * };
2149 *
2150 * file.getSignedUrl(config, function(err, url) {
2151 * if (err) {
2152 * console.error(err);
2153 * return;
2154 * }
2155 *
2156 * // The file is now available to read from this URL.
2157 * request(url, function(err, resp) {
2158 * // resp.statusCode = 200
2159 * });
2160 * });
2161 *
2162 * //-
2163 * // Generate a URL that allows temporary access to download your file.
2164 * // Access will begin at accessibleAt and end at expires.
2165 * //-
2166 * const request = require('request');
2167 *
2168 * const config = {
2169 * action: 'read',
2170 * expires: '03-17-2025',
2171 * accessibleAt: '03-13-2025'
2172 * };
2173 *
2174 * file.getSignedUrl(config, function(err, url) {
2175 * if (err) {
2176 * console.error(err);
2177 * return;
2178 * }
2179 *
2180 * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025.
2181 * request(url, function(err, resp) {
2182 * // resp.statusCode = 200
2183 * });
2184 * });
2185 *
2186 * //-
2187 * // Generate a URL to allow write permissions. This means anyone with this
2188 * URL
2189 * // can send a POST request with new data that will overwrite the file.
2190 * //-
2191 * file.getSignedUrl({
2192 * action: 'write',
2193 * expires: '03-17-2025'
2194 * }, function(err, url) {
2195 * if (err) {
2196 * console.error(err);
2197 * return;
2198 * }
2199 *
2200 * // The file is now available to be written to.
2201 * const writeStream = request.put(url);
2202 * writeStream.end('New data');
2203 *
2204 * writeStream.on('complete', function(resp) {
2205 * // Confirm the new content was saved.
2206 * file.download(function(err, fileContents) {
2207 * console.log('Contents:', fileContents.toString());
2208 * // Contents: New data
2209 * });
2210 * });
2211 * });
2212 *
2213 * //-
2214 * // If the callback is omitted, we'll return a Promise.
2215 * //-
2216 * file.getSignedUrl(config).then(function(data) {
2217 * const url = data[0];
2218 * });
2219 *
2220 * ```
2221 * @example <caption>include:samples/files.js</caption>
2222 * region_tag:storage_generate_signed_url
2223 * Another example:
2224 */
2225 getSignedUrl(cfg, callback) {
2226 const method = ActionToHTTPMethod[cfg.action];
2227 if (!method) {
2228 throw new Error(storage_1.ExceptionMessages.INVALID_ACTION);
2229 }
2230 const extensionHeaders = (0, util_1.objectKeyToLowercase)(cfg.extensionHeaders || {});
2231 if (cfg.action === 'resumable') {
2232 extensionHeaders['x-goog-resumable'] = 'start';
2233 }
2234 const queryParams = Object.assign({}, cfg.queryParams);
2235 if (typeof cfg.responseType === 'string') {
2236 queryParams['response-content-type'] = cfg.responseType;
2237 }
2238 if (typeof cfg.promptSaveAs === 'string') {
2239 queryParams['response-content-disposition'] =
2240 'attachment; filename="' + cfg.promptSaveAs + '"';
2241 }
2242 if (typeof cfg.responseDisposition === 'string') {
2243 queryParams['response-content-disposition'] = cfg.responseDisposition;
2244 }
2245 if (this.generation) {
2246 queryParams['generation'] = this.generation.toString();
2247 }
2248 const signConfig = {
2249 method,
2250 expires: cfg.expires,
2251 accessibleAt: cfg.accessibleAt,
2252 extensionHeaders,
2253 queryParams,
2254 contentMd5: cfg.contentMd5,
2255 contentType: cfg.contentType,
2256 };
2257 if (cfg.cname) {
2258 signConfig.cname = cfg.cname;
2259 }
2260 if (cfg.version) {
2261 signConfig.version = cfg.version;
2262 }
2263 if (cfg.virtualHostedStyle) {
2264 signConfig.virtualHostedStyle = cfg.virtualHostedStyle;
2265 }
2266 if (!this.signer) {
2267 this.signer = new signer_1.URLSigner(this.storage.authClient, this.bucket, this);
2268 }
2269 this.signer
2270 .getSignedUrl(signConfig)
2271 .then(signedUrl => callback(null, signedUrl), callback);
2272 }
2273 /**
2274 * @callback IsPublicCallback
2275 * @param {?Error} err Request error, if any.
2276 * @param {boolean} resp Whether file is public or not.
2277 */
2278 /**
2279 * @typedef {array} IsPublicResponse
2280 * @property {boolean} 0 Whether file is public or not.
2281 */
2282 /**
2283 * Check whether this file is public or not by sending
2284 * a HEAD request without credentials.
2285 * No errors from the server indicates that the current
2286 * file is public.
2287 * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden}
2288 * indicates that file is private.
2289 * Any other non 403 error is propagated to user.
2290 *
2291 * @param {IsPublicCallback} [callback] Callback function.
2292 * @returns {Promise<IsPublicResponse>}
2293 *
2294 * @example
2295 * ```
2296 * const {Storage} = require('@google-cloud/storage');
2297 * const storage = new Storage();
2298 * const myBucket = storage.bucket('my-bucket');
2299 *
2300 * const file = myBucket.file('my-file');
2301 *
2302 * //-
2303 * // Check whether the file is publicly accessible.
2304 * //-
2305 * file.isPublic(function(err, resp) {
2306 * if (err) {
2307 * console.error(err);
2308 * return;
2309 * }
2310 * console.log(`the file ${file.id} is public: ${resp}`) ;
2311 * })
2312 * //-
2313 * // If the callback is omitted, we'll return a Promise.
2314 * //-
2315 * file.isPublic().then(function(data) {
2316 * const resp = data[0];
2317 * });
2318 * ```
2319 */
2320 isPublic(callback) {
2321 var _a;
2322 // Build any custom headers based on the defined interceptors on the parent
2323 // storage object and this object
2324 const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || [];
2325 const fileInterceptors = this.interceptors || [];
2326 const allInterceptors = storageInterceptors.concat(fileInterceptors);
2327 const headers = allInterceptors.reduce((acc, curInterceptor) => {
2328 const currentHeaders = curInterceptor.request({
2329 uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`,
2330 });
2331 Object.assign(acc, currentHeaders.headers);
2332 return acc;
2333 }, {});
2334 nodejs_common_1.util.makeRequest({
2335 method: 'GET',
2336 uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`,
2337 headers,
2338 }, {
2339 retryOptions: this.storage.retryOptions,
2340 }, (err) => {
2341 if (err) {
2342 const apiError = err;
2343 if (apiError.code === 403) {
2344 callback(null, false);
2345 }
2346 else {
2347 callback(err);
2348 }
2349 }
2350 else {
2351 callback(null, true);
2352 }
2353 });
2354 }
2355 /**
2356 * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate().
2357 * @property {Metadata} [metadata] Define custom metadata properties to define
2358 * along with the operation.
2359 * @property {boolean} [strict] If true, set the file to be private to
2360 * only the owner user. Otherwise, it will be private to the project.
2361 * @property {string} [userProject] The ID of the project which will be
2362 * billed for the request.
2363 */
2364 /**
2365 * @callback MakeFilePrivateCallback
2366 * @param {?Error} err Request error, if any.
2367 * @param {object} apiResponse The full API response.
2368 */
2369 /**
2370 * @typedef {array} MakeFilePrivateResponse
2371 * @property {object} 0 The full API response.
2372 */
2373 /**
2374 * Make a file private to the project and remove all other permissions.
2375 * Set `options.strict` to true to make the file private to only the owner.
2376 *
2377 * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation}
2378 *
2379 * @param {MakeFilePrivateOptions} [options] Configuration options.
2380 * @param {MakeFilePrivateCallback} [callback] Callback function.
2381 * @returns {Promise<MakeFilePrivateResponse>}
2382 *
2383 * @example
2384 * ```
2385 * const {Storage} = require('@google-cloud/storage');
2386 * const storage = new Storage();
2387 * const myBucket = storage.bucket('my-bucket');
2388 *
2389 * const file = myBucket.file('my-file');
2390 *
2391 * //-
2392 * // Set the file private so only project maintainers can see and modify it.
2393 * //-
2394 * file.makePrivate(function(err) {});
2395 *
2396 * //-
2397 * // Set the file private so only the owner can see and modify it.
2398 * //-
2399 * file.makePrivate({ strict: true }, function(err) {});
2400 *
2401 * //-
2402 * // If the callback is omitted, we'll return a Promise.
2403 * //-
2404 * file.makePrivate().then(function(data) {
2405 * const apiResponse = data[0];
2406 * });
2407 * ```
2408 */
2409 makePrivate(optionsOrCallback, callback) {
2410 var _a, _b;
2411 const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};
2412 callback =
2413 typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;
2414 const query = {
2415 predefinedAcl: options.strict ? 'private' : 'projectPrivate',
2416 // eslint-disable-next-line @typescript-eslint/no-explicit-any
2417 };
2418 if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) {
2419 query.ifMetagenerationMatch =
2420 (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch;
2421 delete options.preconditionOpts;
2422 }
2423 if (options.userProject) {
2424 query.userProject = options.userProject;
2425 }
2426 // You aren't allowed to set both predefinedAcl & acl properties on a file,
2427 // so acl must explicitly be nullified, destroying all previous acls on the
2428 // file.
2429 const metadata = extend({}, options.metadata, { acl: null });
2430 this.setMetadata(metadata, query, callback);
2431 }
2432 /**
2433 * @typedef {array} MakeFilePublicResponse
2434 * @property {object} 0 The full API response.
2435 */
2436 /**
2437 * @callback MakeFilePublicCallback
2438 * @param {?Error} err Request error, if any.
2439 * @param {object} apiResponse The full API response.
2440 */
2441 /**
2442 * Set a file to be publicly readable and maintain all previous permissions.
2443 *
2444 * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation}
2445 *
2446 * @param {MakeFilePublicCallback} [callback] Callback function.
2447 * @returns {Promise<MakeFilePublicResponse>}
2448 *
2449 * @example
2450 * ```
2451 * const {Storage} = require('@google-cloud/storage');
2452 * const storage = new Storage();
2453 * const myBucket = storage.bucket('my-bucket');
2454 *
2455 * const file = myBucket.file('my-file');
2456 *
2457 * file.makePublic(function(err, apiResponse) {});
2458 *
2459 * //-
2460 * // If the callback is omitted, we'll return a Promise.
2461 * //-
2462 * file.makePublic().then(function(data) {
2463 * const apiResponse = data[0];
2464 * });
2465 *
2466 * ```
2467 * @example <caption>include:samples/files.js</caption>
2468 * region_tag:storage_make_public
2469 * Another example:
2470 */
2471 makePublic(callback) {
2472 callback = callback || nodejs_common_1.util.noop;
2473 this.acl.add({
2474 entity: 'allUsers',
2475 role: 'READER',
2476 }, (err, acl, resp) => {
2477 callback(err, resp);
2478 });
2479 }
2480 /**
2481 * The public URL of this File
2482 * Use {@link File#makePublic} to enable anonymous access via the returned URL.
2483 *
2484 * @returns {string}
2485 *
2486 * @example
2487 * ```
2488 * const {Storage} = require('@google-cloud/storage');
2489 * const storage = new Storage();
2490 * const bucket = storage.bucket('albums');
2491 * const file = bucket.file('my-file');
2492 *
2493 * // publicUrl will be "https://storage.googleapis.com/albums/my-file"
2494 * const publicUrl = file.publicUrl();
2495 * ```
2496 */
2497 publicUrl() {
2498 return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`;
2499 }
2500 /**
2501 * @typedef {array} MoveResponse
2502 * @property {File} 0 The destination File.
2503 * @property {object} 1 The full API response.
2504 */
2505 /**
2506 * @callback MoveCallback
2507 * @param {?Error} err Request error, if any.
2508 * @param {?File} destinationFile The destination File.
2509 * @param {object} apiResponse The full API response.
2510 */
2511 /**
2512 * @typedef {object} MoveOptions Configuration options for File#move(). See an
2513 * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}.
2514 * @param {string} [userProject] The ID of the project which will be
2515 * billed for the request.
2516 */
2517 /**
2518 * Move this file to another location. By default, this will rename the file
2519 * and keep it in the same bucket, but you can choose to move it to another
2520 * Bucket by providing a Bucket or File object or a URL beginning with
2521 * "gs://".
2522 *
2523 * **Warning**:
2524 * There is currently no atomic `move` method in the Cloud Storage API,
2525 * so this method is a composition of {@link File#copy} (to the new
2526 * location) and {@link File#delete} (from the old location). While
2527 * unlikely, it is possible that an error returned to your callback could be
2528 * triggered from either one of these API calls failing, which could leave a
2529 * duplicate file lingering. The error message will indicate what operation
2530 * has failed.
2531 *
2532 * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation}
2533 *
2534 * @throws {Error} If the destination file is not provided.
2535 *
2536 * @param {string|Bucket|File} destination Destination file.
2537 * @param {MoveCallback} [callback] Callback function.
2538 * @returns {Promise<MoveResponse>}
2539 *
2540 * @example
2541 * ```
2542 * const {Storage} = require('@google-cloud/storage');
2543 * const storage = new Storage();
2544 * //-
2545 * // You can pass in a variety of types for the destination.
2546 * //
2547 * // For all of the below examples, assume we are working with the following
2548 * // Bucket and File objects.
2549 * //-
2550 * const bucket = storage.bucket('my-bucket');
2551 * const file = bucket.file('my-image.png');
2552 *
2553 * //-
2554 * // If you pass in a string for the destination, the file is moved to its
2555 * // current bucket, under the new name provided.
2556 * //-
2557 * file.move('my-image-new.png', function(err, destinationFile, apiResponse) {
2558 * // `my-bucket` no longer contains:
2559 * // - "my-image.png"
2560 * // but contains instead:
2561 * // - "my-image-new.png"
2562 *
2563 * // `destinationFile` is an instance of a File object that refers to your
2564 * // new file.
2565 * });
2566 *
2567 * //-
2568 * // If you pass in a string starting with "gs://" for the destination, the
2569 * // file is copied to the other bucket and under the new name provided.
2570 * //-
2571 * const newLocation = 'gs://another-bucket/my-image-new.png';
2572 * file.move(newLocation, function(err, destinationFile, apiResponse) {
2573 * // `my-bucket` no longer contains:
2574 * // - "my-image.png"
2575 * //
2576 * // `another-bucket` now contains:
2577 * // - "my-image-new.png"
2578 *
2579 * // `destinationFile` is an instance of a File object that refers to your
2580 * // new file.
2581 * });
2582 *
2583 * //-
2584 * // If you pass in a Bucket object, the file will be moved to that bucket
2585 * // using the same name.
2586 * //-
2587 * const anotherBucket = gcs.bucket('another-bucket');
2588 *
2589 * file.move(anotherBucket, function(err, destinationFile, apiResponse) {
2590 * // `my-bucket` no longer contains:
2591 * // - "my-image.png"
2592 * //
2593 * // `another-bucket` now contains:
2594 * // - "my-image.png"
2595 *
2596 * // `destinationFile` is an instance of a File object that refers to your
2597 * // new file.
2598 * });
2599 *
2600 * //-
2601 * // If you pass in a File object, you have complete control over the new
2602 * // bucket and filename.
2603 * //-
2604 * const anotherFile = anotherBucket.file('my-awesome-image.png');
2605 *
2606 * file.move(anotherFile, function(err, destinationFile, apiResponse) {
2607 * // `my-bucket` no longer contains:
2608 * // - "my-image.png"
2609 * //
2610 * // `another-bucket` now contains:
2611 * // - "my-awesome-image.png"
2612 *
2613 * // Note:
2614 * // The `destinationFile` parameter is equal to `anotherFile`.
2615 * });
2616 *
2617 * //-
2618 * // If the callback is omitted, we'll return a Promise.
2619 * //-
2620 * file.move('my-image-new.png').then(function(data) {
2621 * const destinationFile = data[0];
2622 * const apiResponse = data[1];
2623 * });
2624 *
2625 * ```
2626 * @example <caption>include:samples/files.js</caption>
2627 * region_tag:storage_move_file
2628 * Another example:
2629 */
2630 move(destination, optionsOrCallback, callback) {
2631 const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};
2632 callback =
2633 typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;
2634 callback = callback || nodejs_common_1.util.noop;
2635 this.copy(destination, options, (err, destinationFile, copyApiResponse) => {
2636 if (err) {
2637 err.message = 'file#copy failed with an error - ' + err.message;
2638 callback(err, null, copyApiResponse);
2639 return;
2640 }
2641 if (this.name !== destinationFile.name ||
2642 this.bucket.name !== destinationFile.bucket.name) {
2643 this.delete(options, (err, apiResponse) => {
2644 if (err) {
2645 err.message = 'file#delete failed with an error - ' + err.message;
2646 callback(err, destinationFile, apiResponse);
2647 return;
2648 }
2649 callback(null, destinationFile, copyApiResponse);
2650 });
2651 }
2652 else {
2653 callback(null, destinationFile, copyApiResponse);
2654 }
2655 });
2656 }
2657 /**
2658 * @typedef {array} RenameResponse
2659 * @property {File} 0 The destination File.
2660 * @property {object} 1 The full API response.
2661 */
2662 /**
2663 * @callback RenameCallback
2664 * @param {?Error} err Request error, if any.
2665 * @param {?File} destinationFile The destination File.
2666 * @param {object} apiResponse The full API response.
2667 */
2668 /**
2669 * @typedef {object} RenameOptions Configuration options for File#move(). See an
2670 * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}.
2671 * @param {string} [userProject] The ID of the project which will be
2672 * billed for the request.
2673 */
2674 /**
2675 * Rename this file.
2676 *
2677 * **Warning**:
2678 * There is currently no atomic `rename` method in the Cloud Storage API,
2679 * so this method is an alias of {@link File#move}, which in turn is a
2680 * composition of {@link File#copy} (to the new location) and
2681 * {@link File#delete} (from the old location). While
2682 * unlikely, it is possible that an error returned to your callback could be
2683 * triggered from either one of these API calls failing, which could leave a
2684 * duplicate file lingering. The error message will indicate what operation
2685 * has failed.
2686 *
2687 * @param {string|File} destinationFile Destination file.
2688 * @param {RenameCallback} [callback] Callback function.
2689 * @returns {Promise<RenameResponse>}
2690 *
2691 * @example
2692 * ```
2693 * const {Storage} = require('@google-cloud/storage');
2694 * const storage = new Storage();
2695 *
2696 * //-
2697 * // You can pass in a string or a File object.
2698 * //
2699 * // For all of the below examples, assume we are working with the following
2700 * // Bucket and File objects.
2701 * //-
2702 *
2703 * const bucket = storage.bucket('my-bucket');
2704 * const file = bucket.file('my-image.png');
2705 *
2706 * //-
2707 * // You can pass in a string for the destinationFile.
2708 * //-
2709 * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) {
2710 * // `my-bucket` no longer contains:
2711 * // - "my-image.png"
2712 * // but contains instead:
2713 * // - "renamed-image.png"
2714 *
2715 * // `renamedFile` is an instance of a File object that refers to your
2716 * // renamed file.
2717 * });
2718 *
2719 * //-
2720 * // You can pass in a File object.
2721 * //-
2722 * const anotherFile = anotherBucket.file('my-awesome-image.png');
2723 *
2724 * file.rename(anotherFile, function(err, renamedFile, apiResponse) {
2725 * // `my-bucket` no longer contains:
2726 * // - "my-image.png"
2727 *
2728 * // Note:
2729 * // The `renamedFile` parameter is equal to `anotherFile`.
2730 * });
2731 *
2732 * //-
2733 * // If the callback is omitted, we'll return a Promise.
2734 * //-
2735 * file.rename('my-renamed-image.png').then(function(data) {
2736 * const renamedFile = data[0];
2737 * const apiResponse = data[1];
2738 * });
2739 * ```
2740 */
2741 rename(destinationFile, optionsOrCallback, callback) {
2742 const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};
2743 callback =
2744 typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;
2745 callback = callback || nodejs_common_1.util.noop;
2746 this.move(destinationFile, options, callback);
2747 }
2748 /**
2749 * Makes request and applies userProject query parameter if necessary.
2750 *
2751 * @private
2752 *
2753 * @param {object} reqOpts - The request options.
2754 * @param {function} callback - The callback function.
2755 */
2756 request(reqOpts, callback) {
2757 return this.parent.request.call(this, reqOpts, callback);
2758 }
2759 /**
2760 * @callback RotateEncryptionKeyCallback
2761 * @extends CopyCallback
2762 */
2763 /**
2764 * @typedef RotateEncryptionKeyResponse
2765 * @extends CopyResponse
2766 */
2767 /**
2768 * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options
2769 * for File#rotateEncryptionKey().
2770 * If a string or Buffer is provided, it is interpreted as an AES-256,
2771 * customer-supplied encryption key. If you'd like to use a Cloud KMS key
2772 * name, you must specify an options object with the property name:
2773 * `kmsKeyName`.
2774 * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key.
2775 * @param {string} [options.kmsKeyName] A Cloud KMS key name.
2776 */
2777 /**
2778 * This method allows you to update the encryption key associated with this
2779 * file.
2780 *
2781 * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}
2782 *
2783 * @param {RotateEncryptionKeyOptions} [options] - Configuration options.
2784 * @param {RotateEncryptionKeyCallback} [callback]
2785 * @returns {Promise<File>}
2786 *
2787 * @example <caption>include:samples/encryption.js</caption>
2788 * region_tag:storage_rotate_encryption_key
2789 * Example of rotating the encryption key for this file:
2790 */
2791 rotateEncryptionKey(optionsOrCallback, callback) {
2792 var _a;
2793 callback =
2794 typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;
2795 let options = {};
2796 if (typeof optionsOrCallback === 'string' ||
2797 optionsOrCallback instanceof Buffer) {
2798 options = {
2799 encryptionKey: optionsOrCallback,
2800 };
2801 }
2802 else if (typeof optionsOrCallback === 'object') {
2803 options = optionsOrCallback;
2804 }
2805 const newFile = this.bucket.file(this.id, options);
2806 const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined
2807 ? { preconditionOpts: options.preconditionOpts }
2808 : {};
2809 this.copy(newFile, copyOptions, callback);
2810 }
2811 /**
2812 * @typedef {object} SaveOptions
2813 * @extends CreateWriteStreamOptions
2814 */
2815 /**
2816 * @callback SaveCallback
2817 * @param {?Error} err Request error, if any.
2818 */
2819 /**
2820 * Write strings or buffers to a file.
2821 *
2822 * *This is a convenience method which wraps {@link File#createWriteStream}.*
2823 * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly.
2824 *
2825 * Resumable uploads are automatically enabled and must be shut off explicitly
2826 * by setting `options.resumable` to `false`.
2827 *
2828 * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff.
2829 *
2830 * <p class="notice">
2831 * There is some overhead when using a resumable upload that can cause
2832 * noticeable performance degradation while uploading a series of small
2833 * files. When uploading files less than 10MB, it is recommended that the
2834 * resumable feature is disabled.
2835 * </p>
2836 *
2837 * @param {string | Buffer} data The data to write to a file.
2838 * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options`
2839 * parameter.
2840 * @param {SaveCallback} [callback] Callback function.
2841 * @returns {Promise}
2842 *
2843 * @example
2844 * ```
2845 * const {Storage} = require('@google-cloud/storage');
2846 * const storage = new Storage();
2847 * const myBucket = storage.bucket('my-bucket');
2848 *
2849 * const file = myBucket.file('my-file');
2850 * const contents = 'This is the contents of the file.';
2851 *
2852 * file.save(contents, function(err) {
2853 * if (!err) {
2854 * // File written successfully.
2855 * }
2856 * });
2857 *
2858 * //-
2859 * // If the callback is omitted, we'll return a Promise.
2860 * //-
2861 * file.save(contents).then(function() {});
2862 * ```
2863 */
2864 save(data, optionsOrCallback, callback) {
2865 // tslint:enable:no-any
2866 callback =
2867 typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;
2868 const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};
2869 let maxRetries = this.storage.retryOptions.maxRetries;
2870 if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) {
2871 maxRetries = 0;
2872 }
2873 const returnValue = retry(async (bail) => {
2874 await new Promise((resolve, reject) => {
2875 if (maxRetries === 0) {
2876 this.storage.retryOptions.autoRetry = false;
2877 }
2878 const writable = this.createWriteStream(options)
2879 .on('error', err => {
2880 if (this.storage.retryOptions.autoRetry &&
2881 this.storage.retryOptions.retryableErrorFn(err)) {
2882 return reject(err);
2883 }
2884 else {
2885 return bail(err);
2886 }
2887 })
2888 .on('finish', () => {
2889 return resolve();
2890 });
2891 if (options.onUploadProgress) {
2892 writable.on('progress', options.onUploadProgress);
2893 }
2894 writable.end(data);
2895 });
2896 }, {
2897 retries: maxRetries,
2898 factor: this.storage.retryOptions.retryDelayMultiplier,
2899 maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000,
2900 maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds
2901 });
2902 if (!callback) {
2903 return returnValue;
2904 }
2905 else {
2906 return returnValue
2907 .then(() => {
2908 if (callback) {
2909 return callback();
2910 }
2911 })
2912 .catch(callback);
2913 }
2914 }
2915 setMetadata(metadata, optionsOrCallback, cb) {
2916 const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};
2917 cb =
2918 typeof optionsOrCallback === 'function'
2919 ? optionsOrCallback
2920 : cb;
2921 this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, bucket_1.AvailableServiceObjectMethods.setMetadata, options);
2922 super
2923 .setMetadata(metadata, options)
2924 .then(resp => cb(null, ...resp))
2925 .catch(cb)
2926 .finally(() => {
2927 this.storage.retryOptions.autoRetry = this.instanceRetryValue;
2928 });
2929 }
2930 /**
2931 * @typedef {array} SetStorageClassResponse
2932 * @property {object} 0 The full API response.
2933 */
2934 /**
2935 * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass().
2936 * @property {string} [userProject] The ID of the project which will be
2937 * billed for the request.
2938 */
2939 /**
2940 * @callback SetStorageClassCallback
2941 * @param {?Error} err Request error, if any.
2942 * @param {object} apiResponse The full API response.
2943 */
2944 /**
2945 * Set the storage class for this file.
2946 *
2947 * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class}
2948 * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes}
2949 *
2950 * @param {string} storageClass The new storage class. (`standard`,
2951 * `nearline`, `coldline`, or `archive`)
2952 * **Note:** The storage classes `multi_regional` and `regional`
2953 * are now legacy and will be deprecated in the future.
2954 * @param {SetStorageClassOptions} [options] Configuration options.
2955 * @param {string} [options.userProject] The ID of the project which will be
2956 * billed for the request.
2957 * @param {SetStorageClassCallback} [callback] Callback function.
2958 * @returns {Promise<SetStorageClassResponse>}
2959 *
2960 * @example
2961 * ```
2962 * file.setStorageClass('nearline', function(err, apiResponse) {
2963 * if (err) {
2964 * // Error handling omitted.
2965 * }
2966 *
2967 * // The storage class was updated successfully.
2968 * });
2969 *
2970 * //-
2971 * // If the callback is omitted, we'll return a Promise.
2972 * //-
2973 * file.setStorageClass('nearline').then(function() {});
2974 * ```
2975 */
2976 setStorageClass(storageClass, optionsOrCallback, callback) {
2977 callback =
2978 typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;
2979 const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};
2980 const req = extend(true, {}, options);
2981 // In case we get input like `storageClass`, convert to `storage_class`.
2982 req.storageClass = storageClass
2983 .replace(/-/g, '_')
2984 .replace(/([a-z])([A-Z])/g, (_, low, up) => {
2985 return low + '_' + up;
2986 })
2987 .toUpperCase();
2988 this.copy(this, req, (err, file, apiResponse) => {
2989 if (err) {
2990 callback(err, apiResponse);
2991 return;
2992 }
2993 this.metadata = file.metadata;
2994 callback(null, apiResponse);
2995 });
2996 }
2997 /**
2998 * Set a user project to be billed for all requests made from this File
2999 * object.
3000 *
3001 * @param {string} userProject The user project.
3002 *
3003 * @example
3004 * ```
3005 * const {Storage} = require('@google-cloud/storage');
3006 * const storage = new Storage();
3007 * const bucket = storage.bucket('albums');
3008 * const file = bucket.file('my-file');
3009 *
3010 * file.setUserProject('grape-spaceship-123');
3011 * ```
3012 */
3013 setUserProject(userProject) {
3014 this.bucket.setUserProject.call(this, userProject);
3015 }
3016 /**
3017 * This creates a resumable-upload upload stream.
3018 *
3019 * @param {Duplexify} stream - Duplexify stream of data to pipe to the file.
3020 * @param {object=} options - Configuration object.
3021 *
3022 * @private
3023 */
3024 startResumableUpload_(dup, options) {
3025 options = extend(true, {
3026 metadata: {},
3027 }, options);
3028 const retryOptions = this.storage.retryOptions;
3029 if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) {
3030 retryOptions.autoRetry = false;
3031 }
3032 const uploadStream = resumableUpload.upload({
3033 authClient: this.storage.authClient,
3034 apiEndpoint: this.storage.apiEndpoint,
3035 bucket: this.bucket.name,
3036 customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}),
3037 file: this.name,
3038 generation: this.generation,
3039 key: this.encryptionKey,
3040 kmsKeyName: this.kmsKeyName,
3041 metadata: options.metadata,
3042 offset: options.offset,
3043 predefinedAcl: options.predefinedAcl,
3044 private: options.private,
3045 public: options.public,
3046 uri: options.uri,
3047 userProject: options.userProject || this.userProject,
3048 retryOptions: { ...retryOptions },
3049 params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts,
3050 chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize,
3051 });
3052 uploadStream
3053 .on('response', resp => {
3054 dup.emit('response', resp);
3055 })
3056 .on('metadata', metadata => {
3057 this.metadata = metadata;
3058 dup.emit('metadata');
3059 })
3060 .on('finish', () => {
3061 dup.emit('complete');
3062 })
3063 .on('progress', evt => dup.emit('progress', evt));
3064 dup.setWritable(uploadStream);
3065 this.storage.retryOptions.autoRetry = this.instanceRetryValue;
3066 }
3067 /**
3068 * Takes a readable stream and pipes it to a remote file. Unlike
3069 * `startResumableUpload_`, which uses the resumable upload technique, this
3070 * method uses a simple upload (all or nothing).
3071 *
3072 * @param {Duplexify} dup - Duplexify stream of data to pipe to the file.
3073 * @param {object=} options - Configuration object.
3074 *
3075 * @private
3076 */
3077 startSimpleUpload_(dup, options) {
3078 options = extend(true, {
3079 metadata: {},
3080 }, options);
3081 const apiEndpoint = this.storage.apiEndpoint;
3082 const bucketName = this.bucket.name;
3083 const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`;
3084 const reqOpts = {
3085 qs: {
3086 name: this.name,
3087 },
3088 uri: uri,
3089 };
3090 if (this.generation !== undefined) {
3091 reqOpts.qs.ifGenerationMatch = this.generation;
3092 }
3093 if (this.kmsKeyName !== undefined) {
3094 reqOpts.qs.kmsKeyName = this.kmsKeyName;
3095 }
3096 if (typeof options.timeout === 'number') {
3097 reqOpts.timeout = options.timeout;
3098 }
3099 if (options.userProject || this.userProject) {
3100 reqOpts.qs.userProject = options.userProject || this.userProject;
3101 }
3102 if (options.predefinedAcl) {
3103 reqOpts.qs.predefinedAcl = options.predefinedAcl;
3104 }
3105 else if (options.private) {
3106 reqOpts.qs.predefinedAcl = 'private';
3107 }
3108 else if (options.public) {
3109 reqOpts.qs.predefinedAcl = 'publicRead';
3110 }
3111 Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts);
3112 nodejs_common_1.util.makeWritableStream(dup, {
3113 makeAuthenticatedRequest: (reqOpts) => {
3114 this.request(reqOpts, (err, body, resp) => {
3115 if (err) {
3116 dup.destroy(err);
3117 return;
3118 }
3119 this.metadata = body;
3120 dup.emit('metadata', body);
3121 dup.emit('response', resp);
3122 dup.emit('complete');
3123 });
3124 },
3125 metadata: options.metadata,
3126 request: reqOpts,
3127 });
3128 }
3129 disableAutoRetryConditionallyIdempotent_(
3130 // eslint-disable-next-line @typescript-eslint/no-explicit-any
3131 coreOpts, methodType, localPreconditionOptions) {
3132 var _a, _b, _c, _d;
3133 if ((typeof coreOpts === 'object' &&
3134 ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined &&
3135 (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined &&
3136 methodType === bucket_1.AvailableServiceObjectMethods.delete &&
3137 this.storage.retryOptions.idempotencyStrategy ===
3138 storage_1.IdempotencyStrategy.RetryConditional) ||
3139 this.storage.retryOptions.idempotencyStrategy ===
3140 storage_1.IdempotencyStrategy.RetryNever) {
3141 this.storage.retryOptions.autoRetry = false;
3142 }
3143 if ((typeof coreOpts === 'object' &&
3144 ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined &&
3145 (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined &&
3146 methodType === bucket_1.AvailableServiceObjectMethods.setMetadata &&
3147 this.storage.retryOptions.idempotencyStrategy ===
3148 storage_1.IdempotencyStrategy.RetryConditional) ||
3149 this.storage.retryOptions.idempotencyStrategy ===
3150 storage_1.IdempotencyStrategy.RetryNever) {
3151 this.storage.retryOptions.autoRetry = false;
3152 }
3153 }
3154 async getBufferFromReadable(readable) {
3155 const buf = [];
3156 for await (const chunk of readable) {
3157 buf.push(chunk);
3158 }
3159 return Buffer.concat(buf);
3160 }
3161}
3162exports.File = File;
3163_File_instances = new WeakSet(), _File_validateIntegrity =
3164/**
3165 *
3166 * @param hashCalculatingStream
3167 * @param verify
3168 * @returns {boolean} Returns `true` if valid, throws with error otherwise
3169 */
3170async function _File_validateIntegrity(hashCalculatingStream, verify = {}) {
3171 const metadata = this.metadata;
3172 // If we're doing validation, assume the worst
3173 let dataMismatch = !!(verify.crc32c || verify.md5);
3174 if (verify.crc32c && metadata.crc32c) {
3175 dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c);
3176 }
3177 if (verify.md5 && metadata.md5Hash) {
3178 dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash);
3179 }
3180 if (dataMismatch) {
3181 const errors = [];
3182 let code = '';
3183 let message = '';
3184 try {
3185 await this.delete();
3186 if (verify.md5 && !metadata.md5Hash) {
3187 code = 'MD5_NOT_AVAILABLE';
3188 message = FileExceptionMessages.MD5_NOT_AVAILABLE;
3189 }
3190 else {
3191 code = 'FILE_NO_UPLOAD';
3192 message = FileExceptionMessages.UPLOAD_MISMATCH;
3193 }
3194 }
3195 catch (e) {
3196 const error = e;
3197 code = 'FILE_NO_UPLOAD_DELETE';
3198 message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`;
3199 errors.push(error);
3200 }
3201 const error = new RequestError(message);
3202 error.code = code;
3203 error.errors = errors;
3204 throw error;
3205 }
3206 return true;
3207};
3208/*! Developer Documentation
3209 *
3210 * All async methods (except for streams) will return a Promise in the event
3211 * that a callback is omitted.
3212 */
3213(0, promisify_1.promisifyAll)(File, {
3214 exclude: [
3215 'cloudStorageURI',
3216 'publicUrl',
3217 'request',
3218 'save',
3219 'setEncryptionKey',
3220 'shouldRetryBasedOnPreconditionAndIdempotencyStrat',
3221 'getBufferFromReadable',
3222 ],
3223});
3224//# sourceMappingURL=file.js.map
\No newline at end of file