UNPKG

12.1 kBTypeScriptView Raw
1/*!
2 * Copyright 2022 Google LLC. All Rights Reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16import { Bucket, UploadOptions, UploadResponse } from './bucket.js';
17import { DownloadOptions, DownloadResponse, File } from './file.js';
18import { GaxiosResponse } from 'gaxios';
19export interface UploadManyFilesOptions {
20 concurrencyLimit?: number;
21 customDestinationBuilder?(path: string, options: UploadManyFilesOptions): string;
22 skipIfExists?: boolean;
23 prefix?: string;
24 passthroughOptions?: Omit<UploadOptions, 'destination'>;
25}
26export interface DownloadManyFilesOptions {
27 concurrencyLimit?: number;
28 prefix?: string;
29 stripPrefix?: string;
30 passthroughOptions?: DownloadOptions;
31}
32export interface DownloadFileInChunksOptions {
33 concurrencyLimit?: number;
34 chunkSizeBytes?: number;
35 destination?: string;
36 validation?: 'crc32c' | false;
37 noReturnData?: boolean;
38}
39export interface UploadFileInChunksOptions {
40 concurrencyLimit?: number;
41 chunkSizeBytes?: number;
42 uploadName?: string;
43 maxQueueSize?: number;
44 uploadId?: string;
45 autoAbortFailure?: boolean;
46 partsMap?: Map<number, string>;
47 validation?: 'md5' | false;
48 headers?: {
49 [key: string]: string;
50 };
51}
52export interface MultiPartUploadHelper {
53 bucket: Bucket;
54 fileName: string;
55 uploadId?: string;
56 partsMap?: Map<number, string>;
57 initiateUpload(headers?: {
58 [key: string]: string;
59 }): Promise<void>;
60 uploadPart(partNumber: number, chunk: Buffer, validation?: 'md5' | false): Promise<void>;
61 completeUpload(): Promise<GaxiosResponse | undefined>;
62 abortUpload(): Promise<void>;
63}
64export type MultiPartHelperGenerator = (bucket: Bucket, fileName: string, uploadId?: string, partsMap?: Map<number, string>) => MultiPartUploadHelper;
65export declare class MultiPartUploadError extends Error {
66 private uploadId;
67 private partsMap;
68 constructor(message: string, uploadId: string, partsMap: Map<number, string>);
69}
70/**
71 * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket.
72 *
73 * @class
74 * @hideconstructor
75 *
76 * @param {Bucket} bucket A {@link Bucket} instance
77 *
78 */
79export declare class TransferManager {
80 bucket: Bucket;
81 constructor(bucket: Bucket);
82 /**
83 * @typedef {object} UploadManyFilesOptions
84 * @property {number} [concurrencyLimit] The number of concurrently executing promises
85 * to use when uploading the files.
86 * @property {Function} [customDestinationBuilder] A fuction that will take the current path of a local file
87 * and return a string representing a custom path to be used to upload the file to GCS.
88 * @property {boolean} [skipIfExists] Do not upload the file if it already exists in
89 * the bucket. This will set the precondition ifGenerationMatch = 0.
90 * @property {string} [prefix] A prefix to append to all of the uploaded files.
91 * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through
92 * to each individual upload operation.
93 *
94 */
95 /**
96 * Upload multiple files in parallel to the bucket. This is a convenience method
97 * that utilizes {@link Bucket#upload} to perform the upload.
98 *
99 * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name.
100 * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list.
101 * to be uploaded to the bucket
102 * @param {UploadManyFilesOptions} [options] Configuration options.
103 * @returns {Promise<UploadResponse[]>}
104 *
105 * @example
106 * ```
107 * const {Storage} = require('@google-cloud/storage');
108 * const storage = new Storage();
109 * const bucket = storage.bucket('my-bucket');
110 * const transferManager = new TransferManager(bucket);
111 *
112 * //-
113 * // Upload multiple files in parallel.
114 * //-
115 * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']);
116 * // Your bucket now contains:
117 * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt')
118 * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt')
119 * const response = await transferManager.uploadManyFiles('/local/directory');
120 * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure.
121 * ```
122 *
123 */
124 uploadManyFiles(filePathsOrDirectory: string[] | string, options?: UploadManyFilesOptions): Promise<UploadResponse[]>;
125 /**
126 * @typedef {object} DownloadManyFilesOptions
127 * @property {number} [concurrencyLimit] The number of concurrently executing promises
128 * to use when downloading the files.
129 * @property {string} [prefix] A prefix to append to all of the downloaded files.
130 * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files.
131 * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through
132 * to each individual download operation.
133 *
134 */
135 /**
136 * Download multiple files in parallel to the local filesystem. This is a convenience method
137 * that utilizes {@link File#download} to perform the download.
138 *
139 * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If
140 * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded.
141 * @param {DownloadManyFilesOptions} [options] Configuration options. Setting options.prefix or options.stripPrefix
142 * or options.passthroughOptions.destination will cause the downloaded files to be written to the file system
143 * instead of being returned as a buffer.
144 * @returns {Promise<DownloadResponse[]>}
145 *
146 * @example
147 * ```
148 * const {Storage} = require('@google-cloud/storage');
149 * const storage = new Storage();
150 * const bucket = storage.bucket('my-bucket');
151 * const transferManager = new TransferManager(bucket);
152 *
153 * //-
154 * // Download multiple files in parallel.
155 * //-
156 * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']);
157 * // The following files have been downloaded:
158 * // - "file1.txt" (with the contents from my-bucket.file1.txt)
159 * // - "file2.txt" (with the contents from my-bucket.file2.txt)
160 * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]);
161 * // The following files have been downloaded:
162 * // - "file1.txt" (with the contents from my-bucket.file1.txt)
163 * // - "file2.txt" (with the contents from my-bucket.file2.txt)
164 * const response = await transferManager.downloadManyFiles('test-folder');
165 * // All files with GCS prefix of 'test-folder' have been downloaded.
166 * ```
167 *
168 */
169 downloadManyFiles(filesOrFolder: File[] | string[] | string, options?: DownloadManyFilesOptions): Promise<void | DownloadResponse[]>;
170 /**
171 * @typedef {object} DownloadFileInChunksOptions
172 * @property {number} [concurrencyLimit] The number of concurrently executing promises
173 * to use when downloading the file.
174 * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded.
175 * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete.
176 * @property {boolean} [noReturnData] Whether or not to return the downloaded data. A `true` value here would be useful for files with a size that will not fit into memory.
177 *
178 */
179 /**
180 * Download a large file in chunks utilizing parallel download operations. This is a convenience method
181 * that utilizes {@link File#download} to perform the download.
182 *
183 * @param {File | string} fileOrName {@link File} to download.
184 * @param {DownloadFileInChunksOptions} [options] Configuration options.
185 * @returns {Promise<void | DownloadResponse>}
186 *
187 * @example
188 * ```
189 * const {Storage} = require('@google-cloud/storage');
190 * const storage = new Storage();
191 * const bucket = storage.bucket('my-bucket');
192 * const transferManager = new TransferManager(bucket);
193 *
194 * //-
195 * // Download a large file in chunks utilizing parallel operations.
196 * //-
197 * const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt');
198 * // Your local directory now contains:
199 * // - "large-file.txt" (with the contents from my-bucket.large-file.txt)
200 * ```
201 *
202 */
203 downloadFileInChunks(fileOrName: File | string, options?: DownloadFileInChunksOptions): Promise<void | DownloadResponse>;
204 /**
205 * @typedef {object} UploadFileInChunksOptions
206 * @property {number} [concurrencyLimit] The number of concurrently executing promises
207 * to use when uploading the file.
208 * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded.
209 * @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path.
210 * @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified
211 * defaults to the specified concurrency limit.
212 * @property {string} [uploadId] If specified attempts to resume a previous upload.
213 * @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk
214 * specified in partsMap
215 * @property {object} [headers] headers to be sent when initiating the multipart upload.
216 * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-multipart#request_headers| Request Headers: Initiate a Multipart Upload}
217 * @property {boolean} [autoAbortFailure] boolean to indicate if an in progress upload session will be automatically aborted upon failure. If not set,
218 * failures will be automatically aborted.
219 *
220 */
221 /**
222 * Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and
223 * map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to
224 * resume the upload.
225 *
226 * @param {string} [filePath] The path of the file to be uploaded
227 * @param {UploadFileInChunksOptions} [options] Configuration options.
228 * @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this.
229 * @returns {Promise<void>} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map.
230 *
231 * @example
232 * ```
233 * const {Storage} = require('@google-cloud/storage');
234 * const storage = new Storage();
235 * const bucket = storage.bucket('my-bucket');
236 * const transferManager = new TransferManager(bucket);
237 *
238 * //-
239 * // Upload a large file in chunks utilizing parallel operations.
240 * //-
241 * const response = await transferManager.uploadFileInChunks('large-file.txt');
242 * // Your bucket now contains:
243 * // - "large-file.txt"
244 * ```
245 *
246 *
247 */
248 uploadFileInChunks(filePath: string, options?: UploadFileInChunksOptions, generator?: MultiPartHelperGenerator): Promise<GaxiosResponse | undefined>;
249 private getPathsFromDirectory;
250}