;
createResumableUpload(options: CreateResumableUploadOptions, callback: CreateResumableUploadCallback): void;
createResumableUpload(callback: CreateResumableUploadCallback): void;
/**
* @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream().
* @property {string} [configPath] **This only applies to resumable
* uploads.** A full JSON file path to use with `gcs-resumable-upload`.
* This maps to the [configstore option by the same
* name](https://github.com/yeoman/configstore/tree/0df1ec950d952b1f0dfb39ce22af8e505dffc71a#configpath).
* @property {string} [contentType] Alias for
* `options.metadata.contentType`. If set to `auto`, the file name is used
* to determine the contentType.
* @property {string|boolean} [gzip] If true, automatically gzip the file.
* If set to `auto`, the contentType is used to determine if the file
* should be gzipped. This will set `options.metadata.contentEncoding` to
* `gzip` if necessary.
* @property {object} [metadata] See the examples below or
* [Objects: insert request
* body](https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON)
* for more details.
* @property {number} [offset] The starting byte of the upload stream, for
* resuming an interrupted upload. Defaults to 0.
* @property {string} [predefinedAcl] Apply a predefined set of access
* controls to this object.
*
* Acceptable values are:
* - **`authenticatedRead`** - Object owner gets `OWNER` access, and
* `allAuthenticatedUsers` get `READER` access.
*
* - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and
* project team owners get `OWNER` access.
*
* - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project
* team owners get `READER` access.
*
* - **`private`** - Object owner gets `OWNER` access.
*
* - **`projectPrivate`** - Object owner gets `OWNER` access, and project
* team members get access according to their roles.
*
* - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`
* get `READER` access.
* @property {boolean} [private] Make the uploaded file private. (Alias for
* `options.predefinedAcl = 'private'`)
* @property {boolean} [public] Make the uploaded file public. (Alias for
* `options.predefinedAcl = 'publicRead'`)
* @property {boolean} [resumable] Force a resumable upload. NOTE: When
* working with streams, the file format and size is unknown until it's
* completely consumed. Because of this, it's best for you to be explicit
* for what makes sense given your input.
* @param {number} [timeout=60000] Set the HTTP request timeout in
* milliseconds. This option is not available for resumable uploads.
* Default: `60000`
* @property {string} [uri] The URI for an already-created resumable
* upload. See {@link File#createResumableUpload}.
* @property {string} [userProject] The ID of the project which will be
* billed for the request.
* @property {string|boolean} [validation] Possible values: `"md5"`,
* `"crc32c"`, or `false`. By default, data integrity is validated with a
* CRC32c checksum. You may use MD5 if preferred, but that hash is not
* supported for composite objects. An error will be raised if MD5 is
* specified but is not available. You may also choose to skip validation
* completely, however this is **not recommended**.
* NOTE: Validation is automatically skipped for objects that were
* uploaded using the `gzip` option and have already compressed content.
*/
/**
* Create a writable stream to overwrite the contents of the file in your
* bucket.
*
* A File object can also be used to create files for the first time.
*
* Resumable uploads are automatically enabled and must be shut off explicitly
* by setting `options.resumable` to `false`.
*
* Resumable uploads require write access to the $HOME directory. Through
* [`config-store`](https://www.npmjs.com/package/configstore), some metadata
* is stored. By default, if the directory is not writable, we will fall back
* to a simple upload. However, if you explicitly request a resumable upload,
* and we cannot write to the config directory, we will return a
* `ResumableUploadError`.
*
*
* There is some overhead when using a resumable upload that can cause
* noticeable performance degradation while uploading a series of small
* files. When uploading files less than 10MB, it is recommended that the
* resumable feature is disabled.
*
*
* For faster crc32c computation, you must manually install
* [`fast-crc32c`](https://www.npmjs.com/package/fast-crc32c):
*
* $ npm install --save fast-crc32c
*
* NOTE: Writable streams will emit the `finish` event when the file is fully
* uploaded.
*
* @see [Upload Options (Simple or Resumable)]{@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload}
* @see [Objects: insert API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert}
*
* @param {CreateWriteStreamOptions} [options] Configuration options.
* @returns {WritableStream}
*
* @example
* const fs = require('fs');
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const myBucket = storage.bucket('my-bucket');
*
* const file = myBucket.file('my-file');
*
* //-
* // Uploading a File
* //
* // Now, consider a case where we want to upload a file to your bucket. You
* // have the option of using {@link Bucket#upload}, but that is just
* // a convenience method which will do the following.
* //-
* fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')
* .pipe(file.createWriteStream())
* .on('error', function(err) {})
* .on('finish', function() {
* // The file upload is complete.
* });
*
* //-
* // Uploading a File with gzip compression
* //-
* fs.createReadStream('/Users/stephen/site/index.html')
* .pipe(file.createWriteStream({ gzip: true }))
* .on('error', function(err) {})
* .on('finish', function() {
* // The file upload is complete.
* });
*
* //-
* // Downloading the file with `createReadStream` will automatically decode
* // the file.
* //-
*
* //-
* // Uploading a File with Metadata
* //
* // One last case you may run into is when you want to upload a file to your
* // bucket and set its metadata at the same time. Like above, you can use
* // {@link Bucket#upload} to do this, which is just a wrapper around
* // the following.
* //-
* fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')
* .pipe(file.createWriteStream({
* metadata: {
* contentType: 'image/jpeg',
* metadata: {
* custom: 'metadata'
* }
* }
* }))
* .on('error', function(err) {})
* .on('finish', function() {
* // The file upload is complete.
* });
*/
createWriteStream(options?: CreateWriteStreamOptions): Writable;
/**
* Delete failed resumable upload file cache.
*
* Resumable file upload cache the config file to restart upload in case of
* failure. In certain scenarios, the resumable upload will not works and
* upload file cache needs to be deleted to upload the same file.
*
* Following are some of the scenarios.
*
* Resumable file upload failed even though the file is successfully saved
* on the google storage and need to clean up a resumable file cache to
* update the same file.
*
* Resumable file upload failed due to pre-condition
* (i.e generation number is not matched) and want to upload a same
* file with the new generation number.
*
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const myBucket = storage.bucket('my-bucket');
*
* const file = myBucket.file('my-file', { generation: 0 });
* const contents = 'This is the contents of the file.';
*
* file.save(contents, function(err) {
* if (err) {
* file.deleteResumableCache();
* }
* });
*
*/
deleteResumableCache(): void;
download(options?: DownloadOptions): Promise;
download(options: DownloadOptions, callback: DownloadCallback): void;
download(callback: DownloadCallback): void;
/**
* The Storage API allows you to use a custom key for server-side encryption.
*
* @see [Customer-supplied Encryption Keys]{@link https://cloud.google.com/storage/docs/encryption#customer-supplied}
*
* @param {string|buffer} encryptionKey An AES-256 encryption key.
* @returns {File}
*
* @example
* const crypto = require('crypto');
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const myBucket = storage.bucket('my-bucket');
*
* const encryptionKey = crypto.randomBytes(32);
*
* const fileWithCustomEncryption = myBucket.file('my-file');
* fileWithCustomEncryption.setEncryptionKey(encryptionKey);
*
* const fileWithoutCustomEncryption = myBucket.file('my-file');
*
* fileWithCustomEncryption.save('data', function(err) {
* // Try to download with the File object that hasn't had
* // `setEncryptionKey()` called:
* fileWithoutCustomEncryption.download(function(err) {
* // We will receive an error:
* // err.message === 'Bad Request'
*
* // Try again with the File object we called `setEncryptionKey()` on:
* fileWithCustomEncryption.download(function(err, contents) {
* // contents.toString() === 'data'
* });
* });
* });
*
* @example include:samples/encryption.js
* region_tag:storage_upload_encrypted_file
* Example of uploading an encrypted file:
*
* @example include:samples/encryption.js
* region_tag:storage_download_encrypted_file
* Example of downloading an encrypted file:
*/
setEncryptionKey(encryptionKey: string | Buffer): this;
getExpirationDate(): Promise;
getExpirationDate(callback: GetExpirationDateCallback): void;
getSignedPolicy(options: GetSignedPolicyOptions): Promise;
getSignedPolicy(options: GetSignedPolicyOptions, callback: GetSignedPolicyCallback): void;
getSignedPolicy(callback: GetSignedPolicyCallback): void;
generateSignedPostPolicyV2(options: GenerateSignedPostPolicyV2Options): Promise;
generateSignedPostPolicyV2(options: GenerateSignedPostPolicyV2Options, callback: GenerateSignedPostPolicyV2Callback): void;
generateSignedPostPolicyV2(callback: GenerateSignedPostPolicyV2Callback): void;
generateSignedPostPolicyV4(options: GenerateSignedPostPolicyV4Options): Promise;
generateSignedPostPolicyV4(options: GenerateSignedPostPolicyV4Options, callback: GenerateSignedPostPolicyV4Callback): void;
generateSignedPostPolicyV4(callback: GenerateSignedPostPolicyV4Callback): void;
getSignedUrl(cfg: GetSignedUrlConfig): Promise;
getSignedUrl(cfg: GetSignedUrlConfig, callback: GetSignedUrlCallback): void;
isPublic(): Promise;
isPublic(callback: IsPublicCallback): void;
makePrivate(options?: MakeFilePrivateOptions): Promise;
makePrivate(callback: MakeFilePrivateCallback): void;
makePrivate(options: MakeFilePrivateOptions, callback: MakeFilePrivateCallback): void;
makePublic(): Promise;
makePublic(callback: MakeFilePublicCallback): void;
/**
* The public URL of this File
* Use {@link File#makePublic} to enable anonymous access via the returned URL.
*
* @returns {string}
*
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const bucket = storage.bucket('albums');
* const file = bucket.file('my-file');
*
* // publicUrl will be "https://storage.googleapis.com/albums/my-file"
* const publicUrl = file.publicUrl();
*/
publicUrl(): string;
move(destination: string | Bucket | File, options?: MoveOptions): Promise;
move(destination: string | Bucket | File, callback: MoveCallback): void;
move(destination: string | Bucket | File, options: MoveOptions, callback: MoveCallback): void;
rename(destinationFile: string | File, options?: RenameOptions): Promise;
rename(destinationFile: string | File, callback: RenameCallback): void;
rename(destinationFile: string | File, options: RenameOptions, callback: RenameCallback): void;
request(reqOpts: DecorateRequestOptions): Promise<[ResponseBody, Metadata]>;
request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void;
rotateEncryptionKey(options?: RotateEncryptionKeyOptions): Promise;
rotateEncryptionKey(callback: RotateEncryptionKeyCallback): void;
rotateEncryptionKey(options: RotateEncryptionKeyOptions, callback: RotateEncryptionKeyCallback): void;
save(data: string | Buffer, options?: SaveOptions): Promise;
save(data: string | Buffer, callback: SaveCallback): void;
save(data: string | Buffer, options: SaveOptions, callback: SaveCallback): void;
setStorageClass(storageClass: string, options?: SetStorageClassOptions): Promise;
setStorageClass(storageClass: string, options: SetStorageClassOptions, callback: SetStorageClassCallback): void;
setStorageClass(storageClass: string, callback?: SetStorageClassCallback): void;
/**
* Set a user project to be billed for all requests made from this File
* object.
*
* @param {string} userProject The user project.
*
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const bucket = storage.bucket('albums');
* const file = bucket.file('my-file');
*
* file.setUserProject('grape-spaceship-123');
*/
setUserProject(userProject: string): void;
/**
* This creates a gcs-resumable-upload upload stream.
*
* @see [gcs-resumable-upload]{@link https://github.com/stephenplusplus/gcs-resumable-upload}
*
* @param {Duplexify} stream - Duplexify stream of data to pipe to the file.
* @param {object=} options - Configuration object.
*
* @private
*/
startResumableUpload_(dup: Duplexify, options: CreateResumableUploadOptions): void;
/**
* Takes a readable stream and pipes it to a remote file. Unlike
* `startResumableUpload_`, which uses the resumable upload technique, this
* method uses a simple upload (all or nothing).
*
* @param {Duplexify} dup - Duplexify stream of data to pipe to the file.
* @param {object=} options - Configuration object.
*
* @private
*/
startSimpleUpload_(dup: Duplexify, options?: CreateWriteStreamOptions): void;
}
/**
* Reference to the {@link File} class.
* @name module:@google-cloud/storage.File
* @see File
*/
export { File };