UNPKG

11.6 kBTypeScriptView Raw
1/*!
2 * Copyright 2022 Google LLC. All Rights Reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16/// <reference types="node" />
17import { Bucket, UploadOptions, UploadResponse } from './bucket.js';
18import { DownloadOptions, DownloadResponse, File } from './file.js';
19import { GaxiosResponse } from 'gaxios';
20export interface UploadManyFilesOptions {
21 concurrencyLimit?: number;
22 skipIfExists?: boolean;
23 prefix?: string;
24 passthroughOptions?: Omit<UploadOptions, 'destination'>;
25}
26export interface DownloadManyFilesOptions {
27 concurrencyLimit?: number;
28 prefix?: string;
29 stripPrefix?: string;
30 passthroughOptions?: DownloadOptions;
31}
32export interface DownloadFileInChunksOptions {
33 concurrencyLimit?: number;
34 chunkSizeBytes?: number;
35 destination?: string;
36 validation?: 'crc32c' | false;
37 noReturnData?: boolean;
38}
39export interface UploadFileInChunksOptions {
40 concurrencyLimit?: number;
41 chunkSizeBytes?: number;
42 uploadName?: string;
43 maxQueueSize?: number;
44 uploadId?: string;
45 autoAbortFailure?: boolean;
46 partsMap?: Map<number, string>;
47 validation?: 'md5' | false;
48 headers?: {
49 [key: string]: string;
50 };
51}
52export interface MultiPartUploadHelper {
53 bucket: Bucket;
54 fileName: string;
55 uploadId?: string;
56 partsMap?: Map<number, string>;
57 initiateUpload(headers?: {
58 [key: string]: string;
59 }): Promise<void>;
60 uploadPart(partNumber: number, chunk: Buffer, validation?: 'md5' | false): Promise<void>;
61 completeUpload(): Promise<GaxiosResponse | undefined>;
62 abortUpload(): Promise<void>;
63}
64export type MultiPartHelperGenerator = (bucket: Bucket, fileName: string, uploadId?: string, partsMap?: Map<number, string>) => MultiPartUploadHelper;
65export declare class MultiPartUploadError extends Error {
66 private uploadId;
67 private partsMap;
68 constructor(message: string, uploadId: string, partsMap: Map<number, string>);
69}
70/**
71 * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket.
72 *
73 * @class
74 * @hideconstructor
75 *
76 * @param {Bucket} bucket A {@link Bucket} instance
77 *
78 */
79export declare class TransferManager {
80 bucket: Bucket;
81 constructor(bucket: Bucket);
82 /**
83 * @typedef {object} UploadManyFilesOptions
84 * @property {number} [concurrencyLimit] The number of concurrently executing promises
85 * to use when uploading the files.
86 * @property {boolean} [skipIfExists] Do not upload the file if it already exists in
87 * the bucket. This will set the precondition ifGenerationMatch = 0.
88 * @property {string} [prefix] A prefix to append to all of the uploaded files.
89 * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through
90 * to each individual upload operation.
91 *
92 */
93 /**
94 * Upload multiple files in parallel to the bucket. This is a convenience method
95 * that utilizes {@link Bucket#upload} to perform the upload.
96 *
97 * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name.
98 * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list.
99 * to be uploaded to the bucket
100 * @param {UploadManyFilesOptions} [options] Configuration options.
101 * @returns {Promise<UploadResponse[]>}
102 *
103 * @example
104 * ```
105 * const {Storage} = require('@google-cloud/storage');
106 * const storage = new Storage();
107 * const bucket = storage.bucket('my-bucket');
108 * const transferManager = new TransferManager(bucket);
109 *
110 * //-
111 * // Upload multiple files in parallel.
112 * //-
113 * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']);
114 * // Your bucket now contains:
115 * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt')
116 * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt')
117 * const response = await transferManager.uploadManyFiles('/local/directory');
118 * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure.
119 * ```
120 *
121 */
122 uploadManyFiles(filePathsOrDirectory: string[] | string, options?: UploadManyFilesOptions): Promise<UploadResponse[]>;
123 /**
124 * @typedef {object} DownloadManyFilesOptions
125 * @property {number} [concurrencyLimit] The number of concurrently executing promises
126 * to use when downloading the files.
127 * @property {string} [prefix] A prefix to append to all of the downloaded files.
128 * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files.
129 * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through
130 * to each individual download operation.
131 *
132 */
133 /**
134 * Download multiple files in parallel to the local filesystem. This is a convenience method
135 * that utilizes {@link File#download} to perform the download.
136 *
137 * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If
138 * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded.
139 * @param {DownloadManyFilesOptions} [options] Configuration options.
140 * @returns {Promise<DownloadResponse[]>}
141 *
142 * @example
143 * ```
144 * const {Storage} = require('@google-cloud/storage');
145 * const storage = new Storage();
146 * const bucket = storage.bucket('my-bucket');
147 * const transferManager = new TransferManager(bucket);
148 *
149 * //-
150 * // Download multiple files in parallel.
151 * //-
152 * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']);
153 * // The following files have been downloaded:
154 * // - "file1.txt" (with the contents from my-bucket.file1.txt)
155 * // - "file2.txt" (with the contents from my-bucket.file2.txt)
156 * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]);
157 * // The following files have been downloaded:
158 * // - "file1.txt" (with the contents from my-bucket.file1.txt)
159 * // - "file2.txt" (with the contents from my-bucket.file2.txt)
160 * const response = await transferManager.downloadManyFiles('test-folder');
161 * // All files with GCS prefix of 'test-folder' have been downloaded.
162 * ```
163 *
164 */
165 downloadManyFiles(filesOrFolder: File[] | string[] | string, options?: DownloadManyFilesOptions): Promise<void | DownloadResponse[]>;
166 /**
167 * @typedef {object} DownloadFileInChunksOptions
168 * @property {number} [concurrencyLimit] The number of concurrently executing promises
169 * to use when downloading the file.
170 * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded.
171 * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete.
172 * @property {boolean} [noReturnData] Whether or not to return the downloaded data. A `true` value here would be useful for files with a size that will not fit into memory.
173 *
174 */
175 /**
176 * Download a large file in chunks utilizing parallel download operations. This is a convenience method
177 * that utilizes {@link File#download} to perform the download.
178 *
179 * @param {File | string} fileOrName {@link File} to download.
180 * @param {DownloadFileInChunksOptions} [options] Configuration options.
181 * @returns {Promise<void | DownloadResponse>}
182 *
183 * @example
184 * ```
185 * const {Storage} = require('@google-cloud/storage');
186 * const storage = new Storage();
187 * const bucket = storage.bucket('my-bucket');
188 * const transferManager = new TransferManager(bucket);
189 *
190 * //-
191 * // Download a large file in chunks utilizing parallel operations.
192 * //-
193 * const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt');
194 * // Your local directory now contains:
195 * // - "large-file.txt" (with the contents from my-bucket.large-file.txt)
196 * ```
197 *
198 */
199 downloadFileInChunks(fileOrName: File | string, options?: DownloadFileInChunksOptions): Promise<void | DownloadResponse>;
200 /**
201 * @typedef {object} UploadFileInChunksOptions
202 * @property {number} [concurrencyLimit] The number of concurrently executing promises
203 * to use when uploading the file.
204 * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded.
205 * @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path.
206 * @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified
207 * defaults to the specified concurrency limit.
208 * @property {string} [uploadId] If specified attempts to resume a previous upload.
209 * @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk
210 * specified in partsMap
211 * @property {object} [headers] headers to be sent when initiating the multipart upload.
212 * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-multipart#request_headers| Request Headers: Initiate a Multipart Upload}
213 * @property {boolean} [autoAbortFailure] boolean to indicate if an in progress upload session will be automatically aborted upon failure. If not set,
214 * failures will be automatically aborted.
215 *
216 */
217 /**
218 * Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and
219 * map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to
220 * resume the upload.
221 *
222 * @param {string} [filePath] The path of the file to be uploaded
223 * @param {UploadFileInChunksOptions} [options] Configuration options.
224 * @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this.
225 * @returns {Promise<void>} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map.
226 *
227 * @example
228 * ```
229 * const {Storage} = require('@google-cloud/storage');
230 * const storage = new Storage();
231 * const bucket = storage.bucket('my-bucket');
232 * const transferManager = new TransferManager(bucket);
233 *
234 * //-
235 * // Upload a large file in chunks utilizing parallel operations.
236 * //-
237 * const response = await transferManager.uploadFileInChunks('large-file.txt');
238 * // Your bucket now contains:
239 * // - "large-file.txt"
240 * ```
241 *
242 *
243 */
244 uploadFileInChunks(filePath: string, options?: UploadFileInChunksOptions, generator?: MultiPartHelperGenerator): Promise<GaxiosResponse | undefined>;
245 private getPathsFromDirectory;
246}