1 | import middleware from "../middleware/middleware-component";
|
2 |
|
3 | const path = require ("path");
|
4 | const fs = require("fs");
|
5 |
|
6 | const LOCAL_ENDPOINT = 'http://localhost:3002';
|
7 |
|
8 | export const STORAGE_ACTION = {
|
9 | UPLOAD: "UPLOAD",
|
10 | LIST: "LIST"
|
11 |
|
12 | };
|
13 |
|
14 | const getBucket = () => process.env.BUCKET_ID;
|
15 |
|
16 | const isOffline = () => !(getBucket().startsWith("infrcomp"));
|
17 |
|
18 | const getS3 = () => {
|
19 |
|
20 | const AWS = require('aws-sdk');
|
21 |
|
22 | return new AWS.S3(Object.assign({
|
23 | apiVersion: '2006-03-01'
|
24 | },
|
25 | isOffline() ? {
|
26 | s3ForcePathStyle: true,
|
27 | accessKeyId: 'S3RVER',
|
28 | secretAccessKey: 'S3RVER',
|
29 | endpoint: new AWS.Endpoint(LOCAL_ENDPOINT),
|
30 | } : {}
|
31 | ));
|
32 | };
|
33 |
|
34 | function prepareLocalFs () {
|
35 | const getTempName = isOffline() ? () => {
|
36 |
|
37 |
|
38 | const targetFolder = ".s3";
|
39 |
|
40 | if ( !fs.existsSync( targetFolder ) ) {
|
41 | fs.mkdirSync( targetFolder, {recursive: true} );
|
42 |
|
43 | }
|
44 | fs.chmodSync( targetFolder, 0o777);
|
45 |
|
46 | return targetFolder;
|
47 | } : () => "/tmp";
|
48 |
|
49 | return getTempName();
|
50 | }
|
51 |
|
52 |
|
53 | export const uploadMiddleware = (storageId) => middleware({
|
54 | callback: async function (req, res, next) {
|
55 |
|
56 | const parsedBody = JSON.parse(req.body);
|
57 |
|
58 | if (parsedBody.action !== STORAGE_ACTION.UPLOAD) {
|
59 | return next();
|
60 | }
|
61 |
|
62 |
|
63 |
|
64 |
|
65 | const s3 = getS3();
|
66 |
|
67 |
|
68 | const buffer = Buffer.from(parsedBody.file_data.match(/^data:.+\/(.+);base64,(.*)$/)[2], 'base64');
|
69 | const tmpName = path.join(prepareLocalFs(),parsedBody.file);
|
70 |
|
71 | await new Promise((resolve, reject) => {
|
72 | fs.writeFile(tmpName, buffer,
|
73 | (err) => {
|
74 | if (err) {
|
75 |
|
76 | reject(err);
|
77 | } else {
|
78 |
|
79 | resolve();
|
80 | }
|
81 | });
|
82 | });
|
83 |
|
84 | const prefix = parsedBody.prefix !== undefined && parsedBody.prefix.replace(/(^\/)|(\/$)/g, "").length > 0 ?
|
85 | parsedBody.prefix.replace(/(^\/)|(\/$)/g, "") + "/" : "";
|
86 |
|
87 | const getFilePartKey = (idx) => storageId + "/" + prefix + parsedBody.file + "_ICPART_" + idx;
|
88 |
|
89 |
|
90 | await s3.upload({
|
91 | Bucket: getBucket(),
|
92 | Key: getFilePartKey(parsedBody.part),
|
93 | Body: fs.createReadStream(tmpName),
|
94 |
|
95 | }).promise().then(
|
96 | function (data) {
|
97 |
|
98 | },
|
99 | function (error) {
|
100 | console.log("could not upload to s3 ", error);
|
101 | }
|
102 | );
|
103 |
|
104 | if (parseInt(parsedBody.part) + 1 == parseInt(parsedBody.total_parts)) {
|
105 |
|
106 | const parts = Buffer.concat(await Promise.all(Array.apply(null, Array(parseInt(parsedBody.total_parts))).map(
|
107 | function (part, idx) {
|
108 | return new Promise((resolve, reject) => {
|
109 | const partParams = {
|
110 | Bucket: getBucket(),
|
111 | Key: getFilePartKey(idx)
|
112 | };
|
113 |
|
114 | return s3.getObject(partParams).promise().then(
|
115 | async function (data) {
|
116 |
|
117 |
|
118 | await s3.deleteObject(partParams).promise().then(
|
119 | ok => ok,
|
120 | err => {
|
121 | console.log("could not delete part ", idx, err);
|
122 | }
|
123 | );
|
124 |
|
125 | resolve(Buffer.from(data.Body, 'base64'));
|
126 |
|
127 | },
|
128 | function (error) {
|
129 | console.log("could not load part ", idx, error);
|
130 | reject(error);
|
131 | }
|
132 | );
|
133 |
|
134 | });
|
135 |
|
136 | }
|
137 | )));
|
138 |
|
139 |
|
140 |
|
141 | const finalparams = {
|
142 | Bucket: getBucket(),
|
143 | Key: storageId + "/" +prefix + parsedBody.file,
|
144 | Body: parts,
|
145 |
|
146 | };
|
147 |
|
148 | await s3.upload(finalparams).promise().then(
|
149 | function (data) {
|
150 |
|
151 |
|
152 | res.status(200)
|
153 | .set({
|
154 | "Access-Control-Allow-Origin" : "*",
|
155 | "Access-Control-Allow-Credentials" : true
|
156 | })
|
157 | .send(JSON.stringify({uri: data.Location }));
|
158 |
|
159 | return;
|
160 |
|
161 | },
|
162 | function (error) {
|
163 | console.log("could not upload to s3 ", error);
|
164 |
|
165 | res.status(500).set({
|
166 | "Access-Control-Allow-Origin" : "*",
|
167 | "Access-Control-Allow-Credentials" : true
|
168 | }).send("error");
|
169 |
|
170 | return
|
171 | }
|
172 | );
|
173 |
|
174 | } else {
|
175 | res.status(200).set({
|
176 | "Access-Control-Allow-Origin" : "*",
|
177 | "Access-Control-Allow-Credentials" : true
|
178 | }).send(JSON.stringify({part: parsedBody.part, total_parts: parsedBody.total_parts }));
|
179 | }
|
180 |
|
181 |
|
182 | }
|
183 | });
|
184 |
|
185 |
|
186 |
|
187 |
|
188 |
|
189 |
|
190 |
|
191 |
|
192 |
|
193 |
|
194 | export const uploadFile = (
|
195 | storageId: string,
|
196 | prefix: string | undefined,
|
197 | file,
|
198 | data,
|
199 | onProgess: (uploaded: number) => Boolean,
|
200 | onComplete: (uri: string) => void,
|
201 | onError: (err: string) => void
|
202 | ) => {
|
203 |
|
204 |
|
205 | if (!file) {
|
206 | onError("not a valid file!");
|
207 | return;
|
208 | }
|
209 |
|
210 |
|
211 | const slice_size = 100 * 1024;
|
212 | const reader = new FileReader();
|
213 |
|
214 | function upload_file( start, part ) {
|
215 |
|
216 | const next_slice = start + slice_size + 1;
|
217 | const totalParts = Math.ceil(file.size / slice_size);
|
218 |
|
219 | const blob = file.slice( start, next_slice );
|
220 |
|
221 |
|
222 | reader.onload = function( event ) {
|
223 | if ( event.target.readyState !== FileReader.DONE ) {
|
224 | return;
|
225 | }
|
226 |
|
227 | require("infrastructure-components").callService(
|
228 | storageId,
|
229 | Object.assign({
|
230 | action: STORAGE_ACTION.UPLOAD,
|
231 | file_data: event.target.result,
|
232 | file: file.name,
|
233 | file_type: file.type,
|
234 | prefix: prefix,
|
235 | part: part,
|
236 | total_parts: totalParts,
|
237 | }, part +1 == totalParts ? {
|
238 | data: data
|
239 | } : {}),
|
240 | (data: any) => {
|
241 |
|
242 | data.json().then(parsedBody => {
|
243 |
|
244 |
|
245 | const size_done = start + slice_size;
|
246 |
|
247 | if ( next_slice < file.size ) {
|
248 | if (onProgess(size_done)) {
|
249 |
|
250 | upload_file( next_slice, part+1 );
|
251 | } else {
|
252 | onError("cancelled");
|
253 | }
|
254 |
|
255 | } else {
|
256 |
|
257 | onComplete(parsedBody.uri);
|
258 | }
|
259 | });
|
260 |
|
261 |
|
262 |
|
263 |
|
264 |
|
265 | },
|
266 | (error) => {
|
267 | onError(error);
|
268 | }
|
269 | );
|
270 |
|
271 | };
|
272 |
|
273 | reader.readAsDataURL( blob );
|
274 | }
|
275 |
|
276 | upload_file(0, 0);
|
277 |
|
278 | };
|
279 |
|
280 | export const LISTFILES_MODE = {
|
281 | FILES: "FILES",
|
282 | FOLDERS: "FOLDERS",
|
283 | ALL: "ALL"
|
284 | }
|
285 |
|
286 |
|
287 |
|
288 |
|
289 |
|
290 | export const listFiles = (
|
291 | storageId: string,
|
292 | prefix: string,
|
293 | listMode: string,
|
294 | data: any,
|
295 | onComplete: (result: any) => void,
|
296 | onError: (err: string) => void,
|
297 | config: any = undefined,
|
298 | isOffline: Boolean = false
|
299 | ) => {
|
300 |
|
301 |
|
302 | require("infrastructure-components").callService(
|
303 | storageId,
|
304 | {
|
305 | action: STORAGE_ACTION.LIST,
|
306 | prefix: prefix,
|
307 | listMode: listMode,
|
308 | data: data
|
309 | },
|
310 | (data) => {
|
311 | data.json().then(parsedBody => {
|
312 |
|
313 | onComplete({data: parsedBody.data, files: parsedBody.files, folders: parsedBody.folders})
|
314 | });
|
315 | },
|
316 | (error) => {
|
317 |
|
318 | onError(error);
|
319 | },
|
320 | config,
|
321 | isOffline
|
322 | );
|
323 | }
|
324 |
|
325 |
|
326 | export const listMiddleware = (storageId) => middleware({
|
327 | callback: async function (req, res, next) {
|
328 |
|
329 | const parsedBody = JSON.parse(req.body);
|
330 |
|
331 | if (parsedBody.action !== STORAGE_ACTION.LIST) {
|
332 | return next();
|
333 | }
|
334 |
|
335 | const s3 = getS3();
|
336 |
|
337 |
|
338 |
|
339 | await s3.listObjectsV2({
|
340 | Bucket: getBucket(),
|
341 | Prefix: storageId + "/" + (parsedBody.prefix ? parsedBody.prefix : "").replace(/(^\/)|(\/$)/g, "")
|
342 | }).promise().then(
|
343 | function (data) {
|
344 |
|
345 |
|
346 |
|
347 | const rawFilesList = data.Contents.map(item => ({
|
348 | file: item.Key.substring(item.Key.lastIndexOf("/")+1),
|
349 | url: (isOffline() ? LOCAL_ENDPOINT + "/"+data.Name+"/" : "https://"+data.Name+".s3.amazonaws.com/")+item.Key,
|
350 | lastModified: item.LastModified,
|
351 | itemKey: item.Key.substring(item.Key.indexOf(storageId)+storageId.length),
|
352 | }));
|
353 |
|
354 |
|
355 | const userPrefix = parsedBody.prefix.replace(/(^\/)|(\/$)/g, "");
|
356 | const baseFolder = userPrefix.length == 0 ? ["."] : [];
|
357 |
|
358 | res.status(200)
|
359 | .set({
|
360 | "Access-Control-Allow-Origin" : "*",
|
361 | "Access-Control-Allow-Credentials" : true
|
362 | })
|
363 | .send(Object.assign(
|
364 | {data: res.locals},
|
365 | parsedBody.listMode !== LISTFILES_MODE.FOLDERS ?
|
366 | {
|
367 | files: rawFilesList.filter(
|
368 | item => {
|
369 | if (parsedBody.listMode === LISTFILES_MODE.ALL) {
|
370 | return true;
|
371 | }
|
372 |
|
373 | const temp = path.join(storageId, parsedBody.prefix ? parsedBody.prefix : "").replace(/(^\/)|(\/$)/g, "");
|
374 | const isInThisFolder = item.url.indexOf(temp)+temp.length+1 == item.url.indexOf(item.file);
|
375 |
|
376 |
|
377 |
|
378 | return (parsedBody.listMode === LISTFILES_MODE.FILES && isInThisFolder) |
379 | ;
|
380 | }
|
381 | )
|
382 | } : {
|
383 | folders: Array.from(new Set(baseFolder.concat(rawFilesList.map(item => {
|
384 |
|
385 | const temp = path.join(storageId, parsedBody.prefix ? parsedBody.prefix : "").replace(/(^\/)|(\/$)/g, "");
|
386 | const isInThisFolder = item.url.indexOf(temp)+temp.length+1 == item.url.indexOf(item.file);
|
387 |
|
388 |
|
389 |
|
390 | const wf = item.itemKey.substring(0, item.itemKey.lastIndexOf("/")).replace(/(^\/)|(\/$)/g, "")+"/";
|
391 |
|
392 |
|
393 |
|
394 | if (wf == userPrefix+"/") {
|
395 | return "."
|
396 | };
|
397 |
|
398 |
|
399 |
|
400 |
|
401 |
|
402 | const folder = userPrefix && userPrefix.length > 0 ? (
|
403 | wf.startsWith(userPrefix+"/")? wf.substring(userPrefix.length).replace(/(^\/)|(\/$)/g, ""):""
|
404 | ): wf;
|
405 |
|
406 |
|
407 |
|
408 |
|
409 | return folder.indexOf("/") >= 0 ? folder.substring(0, folder.indexOf("/")) : folder
|
410 | }).filter(item => item.length > 0))))
|
411 | } |
412 |
|
413 |
|
414 |
|
415 |
|
416 |
|
417 |
|
418 |
|
419 |
|
420 |
|
421 |
|
422 |
|
423 |
|
424 |
|
425 |
|
426 |
|
427 |
|
428 | ));
|
429 |
|
430 | return;
|
431 | },
|
432 | function (error) {
|
433 | console.log("could not list s3 ", error);
|
434 |
|
435 | res.status(500).set({
|
436 | "Access-Control-Allow-Origin" : "*",
|
437 | "Access-Control-Allow-Credentials" : true
|
438 | }).send("error");
|
439 |
|
440 | return
|
441 | }
|
442 | );
|
443 |
|
444 | }
|
445 | }); |
\ | No newline at end of file |