1 | import '@smithy/md5-js';
|
2 | import '@aws-amplify/core/internals/aws-client-utils';
|
3 | import '../../utils/client/runtime/s3TransferHandler/fetch.mjs';
|
4 | import 'fast-xml-parser';
|
5 | import '../../utils/client/runtime/s3TransferHandler/xhr.mjs';
|
6 | import 'buffer';
|
7 | import '@aws-amplify/core/internals/utils';
|
8 | import { assertValidationError } from '../../../../errors/utils/assertValidationError.mjs';
|
9 | import { StorageValidationErrorCode } from '../../../../errors/types/validation.mjs';
|
10 | import { MAX_OBJECT_SIZE, DEFAULT_PART_SIZE } from '../../utils/constants.mjs';
|
11 | import { createUploadTask } from '../../utils/transferTask.mjs';
|
12 | import { byteLength } from './byteLength.mjs';
|
13 | import { putObjectJob } from './putObjectJob.mjs';
|
14 | import { getMultipartUploadHandlers } from './multipart/uploadHandlers.mjs';
|
15 |
|
16 | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
17 | // SPDX-License-Identifier: Apache-2.0
|
18 | /**
|
19 | * Upload data to specified S3 object. By default, it uses single PUT operation to upload if the data is less than 5MB.
|
20 | * Otherwise, it uses multipart upload to upload the data. If the data length is unknown, it uses multipart upload.
|
21 | *
|
22 | * Limitations:
|
23 | * * Maximum object size is 5TB.
|
24 | * * Maximum object size if the size cannot be determined before upload is 50GB.
|
25 | *
|
26 | * @param input - The UploadDataInput object.
|
27 | * @returns A cancelable and resumable task exposing result promise from `result`
|
28 | * property.
|
29 | * @throws service: {@link S3Exception} - thrown when checking for existence of the object
|
30 | * @throws validation: {@link StorageValidationErrorCode } - Validation errors.
|
31 | *
|
32 | * @example
|
33 | * ```ts
|
34 | * // Upload a file to s3 bucket
|
35 | * await uploadData({ key, data: file, options: {
|
36 | * onProgress, // Optional progress callback.
|
37 | * } }).result;
|
38 | * ```
|
39 | * @example
|
40 | * ```ts
|
41 | * // Cancel a task
|
42 | * const uploadTask = uploadData({ key, data: file });
|
43 | * //...
|
44 | * uploadTask.cancel();
|
45 | * try {
|
46 | * await uploadTask.result;
|
47 | * } catch (error) {
|
48 | * if(isCancelError(error)) {
|
49 | * // Handle error thrown by task cancelation.
|
50 | * }
|
51 | * }
|
52 | *```
|
53 | *
|
54 | * @example
|
55 | * ```ts
|
56 | * // Pause and resume a task
|
57 | * const uploadTask = uploadData({ key, data: file });
|
58 | * //...
|
59 | * uploadTask.pause();
|
60 | * //...
|
61 | * uploadTask.resume();
|
62 | * //...
|
63 | * await uploadTask.result;
|
64 | * ```
|
65 | */
|
66 | const uploadData = (input) => {
|
67 | const { data } = input;
|
68 | const dataByteLength = byteLength(data);
|
69 | assertValidationError(dataByteLength === undefined || dataByteLength <= MAX_OBJECT_SIZE, StorageValidationErrorCode.ObjectIsTooLarge);
|
70 | if (dataByteLength && dataByteLength <= DEFAULT_PART_SIZE) {
|
71 | const abortController = new AbortController();
|
72 | return createUploadTask({
|
73 | isMultipartUpload: false,
|
74 | job: putObjectJob(input, abortController.signal, dataByteLength),
|
75 | onCancel: (message) => {
|
76 | abortController.abort(message);
|
77 | },
|
78 | });
|
79 | }
|
80 | else {
|
81 | const { multipartUploadJob, onPause, onResume, onCancel } = getMultipartUploadHandlers(input, dataByteLength);
|
82 | return createUploadTask({
|
83 | isMultipartUpload: true,
|
84 | job: multipartUploadJob,
|
85 | onCancel: (message) => {
|
86 | onCancel(message);
|
87 | },
|
88 | onPause,
|
89 | onResume,
|
90 | });
|
91 | }
|
92 | };
|
93 |
|
94 | export { uploadData };
|
95 | //# sourceMappingURL=index.mjs.map
|