UNPKG

24.9 kBJavaScriptView Raw
1import {
2 MultipartFile,
3 computeFileTypeFromName,
4 getFileType,
5 supportMagicFileTypes
6} from "./chunk-NRCULWNL.js";
7
8// src/bodyparser_middleware.ts
9import { tmpdir } from "node:os";
10import { Exception as Exception4 } from "@poppinss/utils";
11import { join, isAbsolute } from "node:path";
12import { createId } from "@paralleldrive/cuid2";
13
14// src/debug.ts
15import { debuglog } from "node:util";
16var debug_default = debuglog("adonisjs:bodyparser");
17
18// src/parsers/form.ts
19import raw from "raw-body";
20import inflate from "inflation";
21import qs from "qs";
22async function parseForm(req, options) {
23 const normalizedOptions = Object.assign(
24 {
25 encoding: "utf8",
26 limit: "56kb",
27 length: 0
28 },
29 options
30 );
31 const queryStringOptions = Object.assign({}, normalizedOptions.queryString);
32 if (queryStringOptions.allowDots === void 0) {
33 queryStringOptions.allowDots = true;
34 }
35 const contentLength = req.headers["content-length"];
36 const encoding = req.headers["content-encoding"] || "identity";
37 if (contentLength && encoding === "identity") {
38 normalizedOptions.length = ~~contentLength;
39 }
40 if (normalizedOptions.convertEmptyStringsToNull) {
41 queryStringOptions.decoder = function(str, defaultDecoder, charset, type) {
42 const value = defaultDecoder(str, defaultDecoder, charset);
43 if (type === "value" && value === "") {
44 return null;
45 }
46 return value;
47 };
48 }
49 const requestBody = await raw(inflate(req), normalizedOptions);
50 const parsed = qs.parse(requestBody, queryStringOptions);
51 return { parsed, raw: requestBody };
52}
53
54// src/parsers/json.ts
55import raw2 from "raw-body";
56import inflate2 from "inflation";
57import json from "@poppinss/utils/json";
58import { Exception } from "@poppinss/utils";
59var strictJSONReg = /^[\x20\x09\x0a\x0d]*(\[|\{)/;
60function convertEmptyStringsToNull(key, value) {
61 if (key === "") {
62 return value;
63 }
64 if (value === "") {
65 return null;
66 }
67 return value;
68}
69async function parseJSON(req, options) {
70 const normalizedOptions = Object.assign(
71 {
72 encoding: "utf8",
73 limit: "1mb",
74 length: 0
75 },
76 options
77 );
78 const contentLength = req.headers["content-length"];
79 const encoding = req.headers["content-encoding"] || "identity";
80 if (contentLength && encoding === "identity") {
81 normalizedOptions.length = ~~contentLength;
82 }
83 const strict = normalizedOptions.strict !== false;
84 const reviver = normalizedOptions.convertEmptyStringsToNull ? convertEmptyStringsToNull : void 0;
85 const requestBody = await raw2(inflate2(req), normalizedOptions);
86 if (!requestBody) {
87 return strict ? {
88 parsed: {},
89 raw: requestBody
90 } : {
91 parsed: requestBody,
92 raw: requestBody
93 };
94 }
95 if (strict && !strictJSONReg.test(requestBody)) {
96 throw new Exception("Invalid JSON, only supports object and array", { status: 422 });
97 }
98 try {
99 return {
100 parsed: json.safeParse(requestBody, reviver),
101 raw: requestBody
102 };
103 } catch (error) {
104 error.status = 400;
105 error.body = requestBody;
106 throw error;
107 }
108}
109
110// src/multipart/main.ts
111import multiparty from "@poppinss/multiparty";
112import bytes from "bytes";
113import { Exception as Exception3 } from "@poppinss/utils";
114
115// src/form_fields.ts
116import lodash from "@poppinss/utils/lodash";
117var FormFields = class {
118 #fields = {};
119 #config;
120 constructor(config) {
121 this.#config = config;
122 }
123 /**
124 * Add a new key/value pair. The keys with array like
125 * expressions are handled properly.
126 *
127 * @example
128 * ```
129 * formfields.add('username', 'virk')
130 *
131 * // array
132 * formfields.add('username[]', 'virk')
133 * formfields.add('username[]', 'nikk')
134 *
135 * // Indexed keys are orderd properly
136 * formfields.add('username[1]', 'virk')
137 * formfields.add('username[0]', 'nikk')
138 * ```
139 */
140 add(key, value) {
141 let isArray = false;
142 if (this.#config.convertEmptyStringsToNull && value === "") {
143 value = null;
144 }
145 key = key.replace(/\[]$/, () => {
146 isArray = true;
147 return "";
148 });
149 const existingValue = lodash.get(this.#fields, key);
150 if (!existingValue) {
151 lodash.set(this.#fields, key, isArray ? [value] : value);
152 return;
153 }
154 if (Array.isArray(existingValue)) {
155 existingValue.push(value);
156 return;
157 }
158 lodash.set(this.#fields, key, [existingValue, value]);
159 }
160 /**
161 * Returns the copy of form fields
162 */
163 get() {
164 return this.#fields;
165 }
166};
167
168// src/multipart/part_handler.ts
169import { extname } from "node:path";
170import { Exception as Exception2 } from "@poppinss/utils";
171var PartHandler = class {
172 #part;
173 #options;
174 /**
175 * The stream buffer reported by the stream consumer. We hold the buffer until are
176 * able to detect the file extension and then buff memory is released
177 */
178 #buff;
179 /**
180 * A boolean to know, if we have emitted the error event after one or
181 * more validation errors. We need this flag, since the race conditions
182 * between `data` and `error` events will trigger multiple `error`
183 * emit.
184 */
185 #emittedValidationError = false;
186 /**
187 * A boolean to know if we can use the magic number to detect the file type. This is how it
188 * works.
189 *
190 * - We begin by extracting the file extension from the file name
191 * - If the file has no extension, we try to inspect the buffer
192 * - If the extension is something we support via magic numbers, then we ignore the extension
193 * and inspect the buffer
194 * - Otherwise, we have no other option than to trust the extension
195 *
196 * Think of this as using the optimal way for validating the file type
197 */
198 get #canFileTypeBeDetected() {
199 const fileExtension = extname(this.#part.filename).replace(/^\./, "");
200 return fileExtension ? supportMagicFileTypes.has(fileExtension) : true;
201 }
202 /**
203 * Creating a new file object for each part inside the multipart
204 * form data
205 */
206 file;
207 constructor(part, options) {
208 this.#part = part;
209 this.#options = options;
210 this.file = new MultipartFile(
211 {
212 clientName: part.filename,
213 fieldName: part.name,
214 headers: part.headers
215 },
216 {
217 size: options.size,
218 extnames: options.extnames
219 }
220 );
221 }
222 /**
223 * Detects the file type and extension and also validates it when validations
224 * are not deferred.
225 */
226 async #detectFileTypeAndExtension() {
227 if (!this.#buff) {
228 return;
229 }
230 const fileType = this.#canFileTypeBeDetected ? await getFileType(this.#buff) : computeFileTypeFromName(this.file.clientName, this.file.headers);
231 if (fileType) {
232 this.file.extname = fileType.ext;
233 this.file.type = fileType.type;
234 this.file.subtype = fileType.subtype;
235 }
236 }
237 /**
238 * Skip the stream or end it forcefully. This is invoked when the
239 * streaming consumer reports an error
240 */
241 #skipEndStream() {
242 this.#part.emit("close");
243 }
244 /**
245 * Finish the process of listening for any more events and mark the
246 * file state as consumed.
247 */
248 #finish() {
249 this.file.state = "consumed";
250 if (!this.#options.deferValidations) {
251 this.file.validate();
252 }
253 }
254 /**
255 * Start the process the updating the file state
256 * to streaming mode.
257 */
258 begin() {
259 this.file.state = "streaming";
260 }
261 /**
262 * Handles the file upload progress by validating the file size and
263 * extension.
264 */
265 async reportProgress(line, bufferLength) {
266 if (this.file.state !== "streaming") {
267 return;
268 }
269 if (this.file.extname === void 0) {
270 this.#buff = this.#buff ? Buffer.concat([this.#buff, line]) : line;
271 await this.#detectFileTypeAndExtension();
272 } else {
273 this.#buff = void 0;
274 }
275 this.file.size = this.file.size + bufferLength;
276 if (this.#options.deferValidations) {
277 return;
278 }
279 this.file.validate();
280 if (!this.file.isValid && !this.#emittedValidationError) {
281 this.#emittedValidationError = true;
282 this.#part.emit(
283 "error",
284 new Exception2("one or more validations failed", {
285 code: "E_STREAM_VALIDATION_FAILURE",
286 status: 400
287 })
288 );
289 }
290 }
291 /**
292 * Report errors encountered while processing the stream. These can be errors
293 * apart from the one reported by this class. For example: The `s3` failure
294 * due to some bad credentails.
295 */
296 async reportError(error) {
297 if (this.file.state !== "streaming") {
298 return;
299 }
300 this.#skipEndStream();
301 this.#finish();
302 if (error.code === "E_STREAM_VALIDATION_FAILURE") {
303 return;
304 }
305 this.file.errors.push({
306 fieldName: this.file.fieldName,
307 clientName: this.file.clientName,
308 type: "fatal",
309 message: error.message
310 });
311 }
312 /**
313 * Report success data about the file.
314 */
315 async reportSuccess(data) {
316 if (this.file.state !== "streaming") {
317 return;
318 }
319 if (this.file.extname === void 0) {
320 await this.#detectFileTypeAndExtension();
321 }
322 if (data) {
323 const { filePath, tmpPath, ...meta } = data;
324 if (filePath) {
325 this.file.filePath = filePath;
326 }
327 if (tmpPath) {
328 this.file.tmpPath = tmpPath;
329 }
330 this.file.meta = meta || {};
331 }
332 this.#finish();
333 }
334};
335
336// src/multipart/main.ts
337var Multipart = class {
338 #ctx;
339 #config;
340 /**
341 * The registered handlers to handle the file uploads
342 */
343 #handlers = {};
344 /**
345 * Collected fields from the multipart stream
346 */
347 #fields;
348 /**
349 * Collected files from the multipart stream. Files are only collected
350 * when there is an attached listener for a given file.
351 */
352 #files;
353 /**
354 * We track the finishing of `this.onFile` async handlers
355 * to make sure that `process` promise resolves for all
356 * handlers to finish.
357 */
358 #pendingHandlers = 0;
359 /**
360 * The reference to underlying multiparty form
361 */
362 #form;
363 /**
364 * Total size limit of the multipart stream. If it goes beyond
365 * the limit, then an exception will be raised.
366 */
367 #upperLimit;
368 /**
369 * Total size in bytes for all the fields (not the files)
370 */
371 #maxFieldsSize;
372 /**
373 * A track of total number of file bytes processed so far
374 */
375 #processedBytes = 0;
376 /**
377 * The current state of the multipart form handler
378 */
379 state = "idle";
380 constructor(ctx, config = {}) {
381 this.#ctx = ctx;
382 this.#config = config;
383 this.#fields = new FormFields({
384 convertEmptyStringsToNull: this.#config.convertEmptyStringsToNull === true
385 });
386 this.#files = new FormFields({
387 convertEmptyStringsToNull: this.#config.convertEmptyStringsToNull === true
388 });
389 }
390 /**
391 * Returns a boolean telling whether all streams have been
392 * consumed along with all handlers execution
393 */
394 #isClosed() {
395 return this.#form["flushing"] <= 0 && this.#pendingHandlers <= 0;
396 }
397 /**
398 * Removes array like expression from the part name to
399 * find the handler
400 */
401 #getHandlerName(name) {
402 return name.replace(/\[\d*\]/, "");
403 }
404 /**
405 * Validates and returns an error when upper limit is defined and
406 * processed bytes is over the upper limit
407 */
408 #validateProcessedBytes(chunkLength) {
409 if (!this.#upperLimit) {
410 return;
411 }
412 this.#processedBytes += chunkLength;
413 if (this.#processedBytes > this.#upperLimit) {
414 return new Exception3("request entity too large", {
415 code: "E_REQUEST_ENTITY_TOO_LARGE",
416 status: 413
417 });
418 }
419 }
420 /**
421 * Handles a given part by invoking it's handler or
422 * by resuming the part, if there is no defined
423 * handler
424 */
425 async #handlePart(part) {
426 if (!part.name || !part.filename) {
427 part.resume();
428 return;
429 }
430 const name = this.#getHandlerName(part.name);
431 const handler = this.#handlers[name] || this.#handlers["*"];
432 if (!handler) {
433 debug_default('skipping multipart part as there are no handlers "%s"', name);
434 part.resume();
435 return;
436 }
437 debug_default('processing multipart part "%s"', name);
438 this.#pendingHandlers++;
439 const partHandler = new PartHandler(part, handler.options);
440 partHandler.begin();
441 this.#files.add(partHandler.file.fieldName, partHandler.file);
442 part.file = partHandler.file;
443 try {
444 const response = await handler.handler(part, async (line) => {
445 if (this.state !== "processing") {
446 return;
447 }
448 const lineLength = line.length;
449 const error = this.#validateProcessedBytes(lineLength);
450 if (error) {
451 part.emit("error", error);
452 this.abort(error);
453 return;
454 }
455 try {
456 await partHandler.reportProgress(line, lineLength);
457 } catch (err) {
458 this.#ctx.logger.fatal(
459 'Unhandled multipart stream error. Make sure to handle "error" events for all manually processed streams'
460 );
461 }
462 });
463 await partHandler.reportSuccess(response || {});
464 } catch (error) {
465 await partHandler.reportError(error);
466 }
467 this.#pendingHandlers--;
468 }
469 /**
470 * Record the fields inside multipart contract
471 */
472 #handleField(key, value) {
473 if (!key) {
474 return;
475 }
476 this.#fields.add(key, value);
477 }
478 /**
479 * Processes the user config and computes the `upperLimit` value from
480 * it.
481 */
482 #processConfig(config) {
483 this.#config = Object.assign(this.#config, config);
484 this.#maxFieldsSize = typeof this.#config.fieldsLimit === "string" ? bytes(this.#config.fieldsLimit) : this.#config.fieldsLimit;
485 this.#upperLimit = typeof this.#config.limit === "string" ? bytes(this.#config.limit) : this.#config.limit;
486 }
487 /**
488 * Mark the process as finished
489 */
490 #finish(newState) {
491 if (this.state === "idle" || this.state === "processing") {
492 this.state = newState;
493 this.#ctx.request["__raw_files"] = this.#files.get();
494 this.#ctx.request.setInitialBody(this.#fields.get());
495 }
496 }
497 /**
498 * Attach handler for a given file. To handle all files, you
499 * can attach a wildcard handler.
500 *
501 * @example
502 * ```ts
503 * multipart.onFile('package', {}, async (stream) => {
504 * })
505 *
506 * multipart.onFile('*', {}, async (stream) => {
507 * })
508 * ```
509 */
510 onFile(name, options, handler) {
511 this.#handlers[name] = { handler, options };
512 return this;
513 }
514 /**
515 * Abort request by emitting error
516 */
517 abort(error) {
518 this.#form.emit("error", error);
519 }
520 /**
521 * Process the request by going all the file and field
522 * streams.
523 */
524 process(config) {
525 return new Promise((resolve, reject) => {
526 if (this.state !== "idle") {
527 reject(
528 new Exception3("multipart stream has already been consumed", {
529 code: "E_RUNTIME_EXCEPTION"
530 })
531 );
532 return;
533 }
534 this.state = "processing";
535 this.#processConfig(config);
536 this.#form = new multiparty.Form({
537 maxFields: this.#config.maxFields,
538 maxFieldsSize: this.#maxFieldsSize
539 });
540 debug_default("processing multipart body");
541 this.#form.on("error", (error) => {
542 this.#finish("error");
543 process.nextTick(() => {
544 if (this.#ctx.request.request.readable) {
545 this.#ctx.request.request.resume();
546 }
547 if (error.message.match(/stream ended unexpectedly/)) {
548 reject(
549 new Exception3("Invalid multipart request", {
550 status: 400,
551 code: "E_INVALID_MULTIPART_REQUEST"
552 })
553 );
554 } else if (error.message.match(/maxFields [0-9]+ exceeded/)) {
555 reject(
556 new Exception3("Fields length limit exceeded", {
557 status: 413,
558 code: "E_REQUEST_ENTITY_TOO_LARGE"
559 })
560 );
561 } else if (error.message.match(/maxFieldsSize [0-9]+ exceeded/)) {
562 reject(
563 new Exception3("Fields size in bytes exceeded", {
564 status: 413,
565 code: "E_REQUEST_ENTITY_TOO_LARGE"
566 })
567 );
568 } else {
569 reject(error);
570 }
571 });
572 });
573 this.#form.on("part", async (part) => {
574 await this.#handlePart(part);
575 if (this.#isClosed()) {
576 this.#finish("success");
577 resolve();
578 }
579 });
580 this.#form.on("field", (key, value) => {
581 try {
582 this.#handleField(key, value);
583 } catch (error) {
584 this.abort(error);
585 }
586 });
587 this.#form.on("close", () => {
588 if (this.#isClosed()) {
589 this.#finish("success");
590 resolve();
591 }
592 });
593 this.#form.parse(this.#ctx.request.request);
594 });
595 }
596};
597
598// src/multipart/stream_file.ts
599import { unlink } from "node:fs/promises";
600import { createWriteStream } from "node:fs";
601import { pipeline } from "node:stream/promises";
602async function streamFile(readStream, location, dataListener) {
603 if (typeof dataListener === "function") {
604 readStream.pause();
605 readStream.on("data", dataListener);
606 }
607 const writeStream = createWriteStream(location);
608 try {
609 await pipeline(readStream, writeStream);
610 } catch (error) {
611 unlink(writeStream.path).catch(() => {
612 });
613 throw error;
614 }
615}
616
617// src/bindings/request.ts
618import lodash2 from "@poppinss/utils/lodash";
619import { Request } from "@adonisjs/http-server";
620import { RuntimeException } from "@poppinss/utils";
621function setFileOptions(file, options) {
622 if (file.sizeLimit === void 0 && options && options.size) {
623 file.sizeLimit = options.size;
624 }
625 if (file.allowedExtensions === void 0 && options && options.extnames) {
626 file.allowedExtensions = options.extnames;
627 }
628}
629function isInstanceOfFile(file) {
630 return file && file instanceof MultipartFile;
631}
632debug_default('extending request class with "file", "files" and "allFiles" macros');
633Request.macro("toJSON", function() {
634 return {
635 ...this.serialize(),
636 files: this["__raw_files"] || {}
637 };
638});
639Request.macro(
640 "file",
641 function getFile(key, options) {
642 let file = lodash2.get(this.allFiles(), key);
643 file = Array.isArray(file) ? file[0] : file;
644 if (!isInstanceOfFile(file)) {
645 return null;
646 }
647 setFileOptions(file, options);
648 file.validate();
649 return file;
650 }
651);
652Request.macro(
653 "files",
654 function getFiles(key, options) {
655 let files = lodash2.get(this.allFiles(), key);
656 files = Array.isArray(files) ? files : files ? [files] : [];
657 return files.filter(isInstanceOfFile).map((file) => {
658 setFileOptions(file, options);
659 file.validate();
660 return file;
661 });
662 }
663);
664Request.macro("allFiles", function allFiles() {
665 if (!this.__raw_files) {
666 throw new RuntimeException(
667 "Cannot read files. Make sure the bodyparser middleware is registered"
668 );
669 }
670 return this["__raw_files"];
671});
672
673// src/parsers/text.ts
674import raw3 from "raw-body";
675import inflate3 from "inflation";
676function parseText(req, options) {
677 const normalizedOptions = Object.assign(
678 {
679 encoding: "utf8",
680 limit: "1mb",
681 length: 0
682 },
683 options
684 );
685 const contentLength = req.headers["content-length"];
686 const encoding = req.headers["content-encoding"] || "identity";
687 if (contentLength && encoding === "identity") {
688 normalizedOptions.length = ~~contentLength;
689 }
690 return raw3(inflate3(req), normalizedOptions);
691}
692
693// src/bodyparser_middleware.ts
694var BodyParserMiddleware = class {
695 /**
696 * Bodyparser config
697 */
698 #config;
699 constructor(config) {
700 this.#config = config;
701 debug_default("using config %O", this.#config);
702 }
703 /**
704 * Returns config for a given type
705 */
706 #getConfigFor(type) {
707 const config = this.#config[type];
708 return config;
709 }
710 /**
711 * Ensures that types exists and have length
712 */
713 #ensureTypes(types) {
714 return !!(types && types.length);
715 }
716 /**
717 * Returns a boolean telling if request `content-type` header
718 * matches the expected types or not
719 */
720 #isType(request, types) {
721 return !!(this.#ensureTypes(types) && request.is(types));
722 }
723 /**
724 * Returns a proper Adonis style exception for popular error codes
725 * returned by https://github.com/stream-utils/raw-body#readme.
726 */
727 #getExceptionFor(error) {
728 switch (error.type) {
729 case "encoding.unsupported":
730 return new Exception4(error.message, {
731 status: error.status,
732 code: "E_ENCODING_UNSUPPORTED"
733 });
734 case "entity.too.large":
735 return new Exception4(error.message, {
736 status: error.status,
737 code: "E_REQUEST_ENTITY_TOO_LARGE"
738 });
739 case "request.aborted":
740 return new Exception4(error.message, { status: error.status, code: "E_REQUEST_ABORTED" });
741 default:
742 return error;
743 }
744 }
745 /**
746 * Returns the tmp path for storing the files temporarly
747 */
748 #getTmpPath(config) {
749 if (typeof config.tmpFileName === "function") {
750 const tmpPath = config.tmpFileName();
751 return isAbsolute(tmpPath) ? tmpPath : join(tmpdir(), tmpPath);
752 }
753 return join(tmpdir(), createId());
754 }
755 /**
756 * Handle HTTP request body by parsing it as per the user
757 * config
758 */
759 async handle(ctx, next) {
760 ctx.request["__raw_files"] = {};
761 const requestUrl = ctx.request.url();
762 const requestMethod = ctx.request.method();
763 if (!this.#config.allowedMethods.includes(requestMethod)) {
764 debug_default('skipping HTTP request "%s:%s"', requestMethod, requestUrl);
765 return next();
766 }
767 if (!ctx.request.hasBody()) {
768 debug_default('skipping as request has no body "%s:%s"', requestMethod, requestUrl);
769 return next();
770 }
771 const multipartConfig = this.#getConfigFor("multipart");
772 if (this.#isType(ctx.request, multipartConfig.types)) {
773 debug_default('detected multipart request "%s:%s"', requestMethod, requestUrl);
774 ctx.request.multipart = new Multipart(ctx, {
775 maxFields: multipartConfig.maxFields,
776 limit: multipartConfig.limit,
777 fieldsLimit: multipartConfig.fieldsLimit,
778 convertEmptyStringsToNull: multipartConfig.convertEmptyStringsToNull
779 });
780 if (multipartConfig.autoProcess === false) {
781 debug_default('skipping auto processing of multipart request "%s:%s"', requestMethod, requestUrl);
782 return next();
783 }
784 if (ctx.route && multipartConfig.processManually.includes(ctx.route.pattern)) {
785 debug_default('skipping auto processing of multipart request "%s:%s"', requestMethod, requestUrl);
786 return next();
787 }
788 if (ctx.route && Array.isArray(multipartConfig.autoProcess) && !multipartConfig.autoProcess.includes(ctx.route.pattern)) {
789 debug_default('skipping auto processing of multipart request "%s:%s"', requestMethod, requestUrl);
790 return next();
791 }
792 debug_default('auto processing multipart request "%s:%s"', requestMethod, requestUrl);
793 ctx.request.multipart.onFile("*", { deferValidations: true }, async (part, reporter) => {
794 try {
795 const tmpPath = this.#getTmpPath(multipartConfig);
796 await streamFile(part, tmpPath, reporter);
797 return { tmpPath };
798 } catch (error) {
799 ctx.request.multipart.abort(error);
800 }
801 });
802 try {
803 await ctx.request.multipart.process();
804 return next();
805 } catch (error) {
806 throw error;
807 }
808 }
809 const formConfig = this.#getConfigFor("form");
810 if (this.#isType(ctx.request, formConfig.types)) {
811 debug_default('detected urlencoded request "%s:%s"', requestMethod, requestUrl);
812 try {
813 const { parsed, raw: raw4 } = await parseForm(ctx.request.request, formConfig);
814 ctx.request.setInitialBody(parsed);
815 ctx.request.updateRawBody(raw4);
816 return next();
817 } catch (error) {
818 throw this.#getExceptionFor(error);
819 }
820 }
821 const jsonConfig = this.#getConfigFor("json");
822 if (this.#isType(ctx.request, jsonConfig.types)) {
823 debug_default('detected JSON request "%s:%s"', requestMethod, requestUrl);
824 try {
825 const { parsed, raw: raw4 } = await parseJSON(ctx.request.request, jsonConfig);
826 ctx.request.setInitialBody(parsed);
827 ctx.request.updateRawBody(raw4);
828 return next();
829 } catch (error) {
830 throw this.#getExceptionFor(error);
831 }
832 }
833 const rawConfig = this.#getConfigFor("raw");
834 if (this.#isType(ctx.request, rawConfig.types)) {
835 debug_default('parsing raw body "%s:%s"', requestMethod, requestUrl);
836 try {
837 ctx.request.setInitialBody({});
838 ctx.request.updateRawBody(await parseText(ctx.request.request, rawConfig));
839 return next();
840 } catch (error) {
841 throw this.#getExceptionFor(error);
842 }
843 }
844 await next();
845 }
846};
847
848export {
849 BodyParserMiddleware
850};
851//# sourceMappingURL=chunk-XVMZ4GI3.js.map
\No newline at end of file