UNPKG

10.3 kBJavaScriptView Raw
1"use strict";
2var __importDefault = (this && this.__importDefault) || function (mod) {
3 return (mod && mod.__esModule) ? mod : { "default": mod };
4};
5Object.defineProperty(exports, "__esModule", { value: true });
6exports.processRequest = exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL = void 0;
7const busboy_1 = __importDefault(require("busboy"));
8const fs_capacitor_1 = require("./fs-capacitor");
9const http_errors_1 = __importDefault(require("http-errors"));
10const object_path_1 = __importDefault(require("object-path"));
11const ignoreStream_1 = require("./ignoreStream");
12const Upload_1 = require("./Upload");
13exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL = 'https://github.com/jaydenseric/graphql-multipart-request-spec';
14async function processRequest(request, response, options) {
15 const { maxFieldSize = 1000000, maxFileSize = Infinity, maxFiles = Infinity } = options || {};
16 return new Promise((resolve, reject) => {
17 let released;
18 let exitError;
19 let operations;
20 let operationsPath;
21 let map;
22 const parser = (0, busboy_1.default)({
23 headers: request.headers,
24 defParamCharset: 'utf8',
25 limits: {
26 fieldSize: maxFieldSize,
27 fields: 2, // Only operations and map.
28 fileSize: maxFileSize,
29 files: maxFiles,
30 },
31 });
32 /**
33 * Exits request processing with an error. Successive calls have no effect.
34 * @param {Error} error Error instance.
35 * @param {boolean} [isParserError] Is the error from the parser.
36 */
37 function exit(error, isParserError = false) {
38 if (exitError)
39 return;
40 exitError = error;
41 if (map)
42 for (const upload of map.values())
43 if (!upload.file)
44 upload.reject(exitError);
45 // If the error came from the parser, don’t cause it to be emitted again.
46 isParserError ? parser.destroy() : parser.destroy(exitError);
47 request.unpipe(parser);
48 // With a sufficiently large request body, subsequent events in the same
49 // event frame cause the stream to pause after the parser is destroyed. To
50 // ensure that the request resumes, the call to .resume() is scheduled for
51 // later in the event loop.
52 setImmediate(() => {
53 request.resume();
54 });
55 reject(exitError);
56 }
57 parser.on('field', (fieldName, value, { valueTruncated }) => {
58 if (valueTruncated)
59 return exit((0, http_errors_1.default)(413, `The ‘${fieldName}’ multipart field value exceeds the ${maxFieldSize} byte size limit.`));
60 switch (fieldName) {
61 case 'operations':
62 try {
63 operations = JSON.parse(value);
64 }
65 catch (error) {
66 return exit((0, http_errors_1.default)(400, `Invalid JSON in the ‘operations’ multipart field (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
67 }
68 // `operations` should be an object or an array. Note that arrays
69 // and `null` have an `object` type.
70 if (typeof operations !== 'object' || !operations)
71 return exit((0, http_errors_1.default)(400, `Invalid type for the ‘operations’ multipart field (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
72 operationsPath = (0, object_path_1.default)(operations);
73 break;
74 case 'map': {
75 if (!operations)
76 return exit((0, http_errors_1.default)(400, `Disordered multipart fields; ‘map’ should follow ‘operations’ (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
77 let parsedMap;
78 try {
79 parsedMap = JSON.parse(value);
80 }
81 catch (error) {
82 return exit((0, http_errors_1.default)(400, `Invalid JSON in the ‘map’ multipart field (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
83 }
84 // `map` should be an object.
85 if (typeof parsedMap !== 'object' || !parsedMap || Array.isArray(parsedMap))
86 return exit((0, http_errors_1.default)(400, `Invalid type for the ‘map’ multipart field (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
87 const mapEntries = Object.entries(parsedMap);
88 // Check max files is not exceeded, even though the number of files
89 // to parse might not match the map provided by the client.
90 if (mapEntries.length > maxFiles)
91 return exit((0, http_errors_1.default)(413, `${maxFiles} max file uploads exceeded.`));
92 map = new Map();
93 for (const [fieldName, paths] of mapEntries) {
94 if (!Array.isArray(paths))
95 return exit((0, http_errors_1.default)(400, `Invalid type for the ‘map’ multipart field entry key ‘${fieldName}’ array (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
96 map.set(fieldName, new Upload_1.Upload());
97 for (const [index, path] of paths.entries()) {
98 if (typeof path !== 'string')
99 return exit((0, http_errors_1.default)(400, `Invalid type for the ‘map’ multipart field entry key ‘${fieldName}’ array index ‘${index}’ value (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
100 try {
101 operationsPath.set(path, map.get(fieldName));
102 }
103 catch (error) {
104 return exit((0, http_errors_1.default)(400, `Invalid object path for the ‘map’ multipart field entry key ‘${fieldName}’ array index ‘${index}’ value ‘${path}’ (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
105 }
106 }
107 }
108 resolve(operations);
109 }
110 }
111 });
112 parser.on('file', (fieldName, stream, { filename, encoding, mimeType: mimetype }) => {
113 if (!map) {
114 (0, ignoreStream_1.ignoreStream)(stream);
115 return exit((0, http_errors_1.default)(400, `Disordered multipart fields; files should follow ‘map’ (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
116 }
117 const upload = map.get(fieldName);
118 if (!upload) {
119 // The file is extraneous. As the rest can still be processed, just
120 // ignore it and don’t exit with an error.
121 (0, ignoreStream_1.ignoreStream)(stream);
122 return;
123 }
124 let fileError;
125 const capacitor = new fs_capacitor_1.WriteStream();
126 capacitor.on('error', () => {
127 stream.unpipe();
128 stream.resume();
129 });
130 stream.on('limit', () => {
131 fileError = (0, http_errors_1.default)(413, `File truncated as it exceeds the ${maxFileSize} byte size limit.`);
132 stream.unpipe();
133 capacitor.destroy(fileError);
134 });
135 stream.on('error', (error) => {
136 fileError = error;
137 stream.unpipe();
138 capacitor.destroy(fileError);
139 });
140 const file = {
141 fieldName,
142 filename,
143 mimetype,
144 encoding,
145 // @ts-ignore
146 createReadStream(options) {
147 const error = fileError || (released ? exitError : null);
148 if (error)
149 throw error;
150 return capacitor.createReadStream(options);
151 },
152 capacitor,
153 };
154 Object.defineProperty(file, 'capacitor', {
155 enumerable: false,
156 configurable: false,
157 writable: false,
158 });
159 stream.pipe(capacitor);
160 upload.resolve(file);
161 });
162 parser.once('filesLimit', () => exit((0, http_errors_1.default)(413, `${maxFiles} max file uploads exceeded.`)));
163 parser.once('finish', () => {
164 request.unpipe(parser);
165 request.resume();
166 if (!operations)
167 return exit((0, http_errors_1.default)(400, `Missing multipart field ‘operations’ (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
168 if (!map)
169 return exit((0, http_errors_1.default)(400, `Missing multipart field ‘map’ (${exports.GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`));
170 for (const upload of map.values())
171 if (!upload.file)
172 upload.reject((0, http_errors_1.default)(400, 'File missing in the request.'));
173 });
174 // Use the `on` method instead of `once` as in edge cases the same parser
175 // could have multiple `error` events and all must be handled to prevent the
176 // Node.js process exiting with an error. One edge case is if there is a
177 // malformed part header as well as an unexpected end of the form.
178 parser.on('error', (error) => {
179 exit(error, true);
180 });
181 response.once('close', () => {
182 released = true;
183 if (map)
184 for (const upload of map.values())
185 if (upload.file)
186 // Release resources and clean up temporary files.
187 upload.file.capacitor.release();
188 });
189 request.once('close', () => {
190 if (!request.readableEnded)
191 exit((0, http_errors_1.default)(499, 'Request disconnected during file upload stream parsing.'));
192 });
193 request.pipe(parser);
194 });
195}
196exports.processRequest = processRequest;
197//# sourceMappingURL=processRequest.js.map
\No newline at end of file