UNPKG

15.4 kBPlain TextView Raw
1import middleware from "../middleware/middleware-component";
2
3const path = require ("path");
4const fs = require("fs");
5
6const LOCAL_ENDPOINT = 'http://localhost:3002';
7
8export const STORAGE_ACTION = {
9 UPLOAD: "UPLOAD",
10 LIST: "LIST"
11
12};
13
14const getBucket = () => process.env.BUCKET_ID;
15
16const isOffline = () => !(getBucket().startsWith("infrcomp"));
17
18const getS3 = () => {
19 //AWS.config.update({region: 'eu-west-1'});
20 const AWS = require('aws-sdk');
21
22 return new AWS.S3(Object.assign({
23 apiVersion: '2006-03-01'
24 },
25 isOffline() ? {
26 s3ForcePathStyle: true,
27 accessKeyId: 'S3RVER', // This specific key is required when working offline
28 secretAccessKey: 'S3RVER',
29 endpoint: new AWS.Endpoint(LOCAL_ENDPOINT),
30 } : {}
31 ));
32};
33
34function prepareLocalFs () {
35 const getTempName = isOffline() ? () => {
36
37
38 const targetFolder = ".s3";
39 //check if folder needs to be created or integrated
40 if ( !fs.existsSync( targetFolder ) ) {
41 fs.mkdirSync( targetFolder, {recursive: true} );
42
43 }
44 fs.chmodSync( targetFolder, 0o777);
45
46 return targetFolder;
47 } : () => "/tmp";
48
49 return getTempName();
50}
51
52
53export const uploadMiddleware = (storageId) => middleware({
54 callback: async function (req, res, next) {
55
56 const parsedBody = JSON.parse(req.body);
57
58 if (parsedBody.action !== STORAGE_ACTION.UPLOAD) {
59 return next();
60 }
61 //console.log("this is the storage-service: ", parsedBody.part, " of ", parsedBody.total_parts, ", offline: ", isOffline());
62
63 //console.log("data: ", parsedBody.data);
64
65 const s3 = getS3();
66
67 // prepare file data
68 const buffer = Buffer.from(parsedBody.file_data.match(/^data:.+\/(.+);base64,(.*)$/)[2], 'base64');
69 const tmpName = path.join(prepareLocalFs(),parsedBody.file);
70
71 await new Promise((resolve, reject) => {
72 fs.writeFile(tmpName, buffer,
73 (err) => {
74 if (err) {
75 //console.log(err);
76 reject(err);
77 } else {
78 //console.log("Successfully Written File to tmp.");
79 resolve();
80 }
81 });
82 });
83
84 const prefix = parsedBody.prefix !== undefined && parsedBody.prefix.replace(/(^\/)|(\/$)/g, "").length > 0 ?
85 parsedBody.prefix.replace(/(^\/)|(\/$)/g, "") + "/" : "";
86
87 const getFilePartKey = (idx) => storageId + "/" + prefix + parsedBody.file + "_ICPART_" + idx;
88
89
90 await s3.upload({
91 Bucket: getBucket(),
92 Key: getFilePartKey(parsedBody.part),
93 Body: fs.createReadStream(tmpName),
94 //Expires:expiryDate
95 }).promise().then(
96 function (data) {
97 //console.log("file uploaded: ", data);
98 },
99 function (error) {
100 console.log("could not upload to s3 ", error);
101 }
102 );
103
104 if (parseInt(parsedBody.part) + 1 == parseInt(parsedBody.total_parts)) {
105
106 const parts = Buffer.concat(await Promise.all(Array.apply(null, Array(parseInt(parsedBody.total_parts))).map(
107 function (part, idx) {
108 return new Promise((resolve, reject) => {
109 const partParams = {
110 Bucket: getBucket(),
111 Key: getFilePartKey(idx)
112 };
113
114 return s3.getObject(partParams).promise().then(
115 async function (data) {
116 //console.log("file downloaded: ", data);
117
118 await s3.deleteObject(partParams).promise().then(
119 ok => ok,
120 err => {
121 console.log("could not delete part ", idx, err);
122 }
123 );
124
125 resolve(Buffer.from(data.Body, 'base64'));
126
127 },
128 function (error) {
129 console.log("could not load part ", idx, error);
130 reject(error);
131 }
132 );
133
134 });
135
136 }
137 )));
138
139 //console.log("upload to: ", storageId + "/" +prefix + parsedBody.fil);
140
141 const finalparams = {
142 Bucket: getBucket(),
143 Key: storageId + "/" +prefix + parsedBody.file,
144 Body: parts,
145 //Expires:expiryDate
146 };
147
148 await s3.upload(finalparams).promise().then(
149 function (data) {
150 //console.log("file uploaded: ", data);
151
152 res.status(200)
153 .set({
154 "Access-Control-Allow-Origin" : "*", // Required for CORS support to work
155 "Access-Control-Allow-Credentials" : true // Required for cookies, authorization headers with HTTPS
156 })
157 .send(JSON.stringify({uri: data.Location }));
158
159 return;
160
161 },
162 function (error) {
163 console.log("could not upload to s3 ", error);
164
165 res.status(500).set({
166 "Access-Control-Allow-Origin" : "*", // Required for CORS support to work
167 "Access-Control-Allow-Credentials" : true // Required for cookies, authorization headers with HTTPS
168 }).send("error");
169
170 return
171 }
172 );
173
174 } else {
175 res.status(200).set({
176 "Access-Control-Allow-Origin" : "*", // Required for CORS support to work
177 "Access-Control-Allow-Credentials" : true // Required for cookies, authorization headers with HTTPS
178 }).send(JSON.stringify({part: parsedBody.part, total_parts: parsedBody.total_parts }));
179 }
180
181
182 }
183});
184
185/**
186 * User-function to upload a file from the front-end
187 *
188 * @param storageId
189 * @param file
190 * @param onProgess
191 * @param onComplete
192 * @param onError
193 */
194export const uploadFile = (
195 storageId: string,
196 prefix: string | undefined,
197 file,
198 data,
199 onProgess: (uploaded: number) => Boolean,
200 onComplete: (uri: string) => void,
201 onError: (err: string) => void
202) => {
203
204
205 if (!file) {
206 onError("not a valid file!");
207 return;
208 }
209
210
211 const slice_size = 100 * 1024;
212 const reader = new FileReader();
213
214 function upload_file( start, part ) {
215
216 const next_slice = start + slice_size + 1;
217 const totalParts = Math.ceil(file.size / slice_size);
218
219 const blob = file.slice( start, next_slice );
220
221
222 reader.onload = function( event ) {
223 if ( event.target.readyState !== FileReader.DONE ) {
224 return;
225 }
226
227 require("infrastructure-components").callService(
228 storageId,
229 Object.assign({
230 action: STORAGE_ACTION.UPLOAD,
231 file_data: event.target.result,
232 file: file.name,
233 file_type: file.type,
234 prefix: prefix,
235 part: part,
236 total_parts: totalParts,
237 }, part +1 == totalParts ? {
238 data: data
239 } : {}),
240 (data: any) => {
241
242 data.json().then(parsedBody => {
243 //console.log("parsedBody: ", parsedBody);
244
245 const size_done = start + slice_size;
246
247 if ( next_slice < file.size ) {
248 if (onProgess(size_done)) {
249 // More to upload, call function recursively
250 upload_file( next_slice, part+1 );
251 } else {
252 onError("cancelled");
253 }
254
255 } else {
256 // Update upload progress
257 onComplete(parsedBody.uri);
258 }
259 });
260
261
262
263
264
265 },
266 (error) => {
267 onError(error);
268 }
269 );
270
271 };
272
273 reader.readAsDataURL( blob );
274 }
275
276 upload_file(0, 0);
277
278};
279
280export const LISTFILES_MODE = {
281 FILES: "FILES",
282 FOLDERS: "FOLDERS",
283 ALL: "ALL"
284}
285
286/**
287 * User-function to get a list of the files. Call from the front-end
288 *
289 */
290export const listFiles = (
291 storageId: string,
292 prefix: string,
293 listMode: string,
294 data: any,
295 onComplete: (result: any) => void,
296 onError: (err: string) => void,
297 config: any = undefined,
298 isOffline: Boolean = false
299) => {
300
301 //console.log("listFiles")
302 require("infrastructure-components").callService(
303 storageId,
304 {
305 action: STORAGE_ACTION.LIST,
306 prefix: prefix,
307 listMode: listMode,
308 data: data
309 },
310 (data) => {
311 data.json().then(parsedBody => {
312 //console.log(parsedBody);
313 onComplete({data: parsedBody.data, files: parsedBody.files, folders: parsedBody.folders})
314 });
315 },
316 (error) => {
317 //console.log("error: ", error);
318 onError(error);
319 },
320 config,
321 isOffline
322 );
323}
324
325
326export const listMiddleware = (storageId) => middleware({
327 callback: async function (req, res, next) {
328
329 const parsedBody = JSON.parse(req.body);
330
331 if (parsedBody.action !== STORAGE_ACTION.LIST) {
332 return next();
333 }
334
335 const s3 = getS3();
336 //const getFilePartKey = (idx) => + parsedBody.file + "_ICPART_" + idx;
337
338
339 await s3.listObjectsV2({
340 Bucket: getBucket(),
341 Prefix: storageId + "/" + (parsedBody.prefix ? parsedBody.prefix : "").replace(/(^\/)|(\/$)/g, "")
342 }).promise().then(
343 function (data) {
344 //console.log("parsed Prefix: ", parsedBody.prefix);
345 //console.log("listed: ", data);
346
347 const rawFilesList = data.Contents.map(item => ({
348 file: item.Key.substring(item.Key.lastIndexOf("/")+1),
349 url: (isOffline() ? LOCAL_ENDPOINT + "/"+data.Name+"/" : "https://"+data.Name+".s3.amazonaws.com/")+item.Key,
350 lastModified: item.LastModified,
351 itemKey: item.Key.substring(item.Key.indexOf(storageId)+storageId.length),
352 }));
353
354
355 const userPrefix = parsedBody.prefix.replace(/(^\/)|(\/$)/g, "");
356 const baseFolder = userPrefix.length == 0 ? ["."] : [];
357
358 res.status(200)
359 .set({
360 "Access-Control-Allow-Origin" : "*", // Required for CORS support to work
361 "Access-Control-Allow-Credentials" : true // Required for cookies, authorization headers with HTTPS
362 })
363 .send(Object.assign(
364 {data: res.locals},
365 parsedBody.listMode !== LISTFILES_MODE.FOLDERS ?
366 {
367 files: rawFilesList.filter(
368 item => {
369 if (parsedBody.listMode === LISTFILES_MODE.ALL) {
370 return true;
371 }
372
373 const temp = path.join(storageId, parsedBody.prefix ? parsedBody.prefix : "").replace(/(^\/)|(\/$)/g, "");
374 const isInThisFolder = item.url.indexOf(temp)+temp.length+1 == item.url.indexOf(item.file);
375
376 //console.log(temp, " | ", item.url, " | ", item.file);
377
378 return (parsedBody.listMode === LISTFILES_MODE.FILES && isInThisFolder) /*||
379 (parsedBody.listMode === LISTFILES_MODE.FOLDERS && !isInThisFolder)*/;
380 }
381 )
382 } : {
383 folders: Array.from(new Set(baseFolder.concat(rawFilesList.map(item => {
384
385 const temp = path.join(storageId, parsedBody.prefix ? parsedBody.prefix : "").replace(/(^\/)|(\/$)/g, "");
386 const isInThisFolder = item.url.indexOf(temp)+temp.length+1 == item.url.indexOf(item.file);
387
388
389 // remove files
390 const wf = item.itemKey.substring(0, item.itemKey.lastIndexOf("/")).replace(/(^\/)|(\/$)/g, "")+"/";
391
392 //console.log("removed files: ", wf);
393
394 if (wf == userPrefix+"/") {
395 return "."
396 };
397
398
399 //wf.substring(wf.indexOf(parsedBody.prefix)+parsedBody.prefix.length),
400
401 //console.log("userPrefix: ", userPrefix);
402 const folder = userPrefix && userPrefix.length > 0 ? (
403 wf.startsWith(userPrefix+"/")? wf.substring(userPrefix.length).replace(/(^\/)|(\/$)/g, ""):""
404 ): wf;
405
406 //console.log("folder: ", folder);
407
408 // return only direct children
409 return folder.indexOf("/") >= 0 ? folder.substring(0, folder.indexOf("/")) : folder
410 }).filter(item => item.length > 0))))
411 }/*
412 (result, current) => {
413 // if we want a list of folders, map the result
414 //console.log(current.itemKey);
415
416 //console.log("key: ", folder);
417
418 /*const obj = {};
419 obj[folder] = Object.assign({
420 folder: folder.indexOf("/") >= 0 ? folder.substring(0,folder.indexOf("/")) : folder
421 }, current);
422 return Object.assign(obj, result)* /
423
424
425 }, {} // starting with an empty list*/
426 //))}
427
428 ));
429
430 return;
431 },
432 function (error) {
433 console.log("could not list s3 ", error);
434
435 res.status(500).set({
436 "Access-Control-Allow-Origin" : "*", // Required for CORS support to work
437 "Access-Control-Allow-Credentials" : true // Required for cookies, authorization headers with HTTPS
438 }).send("error");
439
440 return
441 }
442 );
443
444 }
445});
\No newline at end of file