UNPKG

6.91 kBJavaScriptView Raw
1
2require("./json")
3
4var fs = require("fs")
5, Writable = require("stream").Writable
6, accept = require("./accept.js").accept
7, util = require("./util")
8, rnrn = Buffer.from("\r\n\r\n")
9, negotiateContent = accept({
10 "application/json": function(str) {
11 return JSON.parse(str || "{}")
12 },
13 "application/x-www-form-urlencoded": querystring,
14 "multipart/*;boundary=": null,
15 "text/csv;br=\"\r\n\";delimiter=\",\";fields=;header=;NULL=;select=": require("./csv.js").decode
16})
17, negotiateDisposition = accept([
18 "form-data;name=;filename="
19])
20, seq = 0
21, decompress = {
22 br: "createBrotliDecompress",
23 gzip: "createUnzip",
24 deflate: "createUnzip"
25}
26
27makeTable(rnrn)
28
29module.exports = getContent
30getContent.querystring = querystring
31
32function getContent(next, reqOpts) {
33 var i, tmp
34 , req = this
35 , head = req.headers
36 , negod = negotiateContent(head["content-type"] || head.accept)
37 , stream = req
38 , maxBodySize = util.num(reqOpts && reqOpts.maxBodySize, req.opts.maxBodySize, 1e6)
39
40 if (!negod.match) {
41 return handleEnd("Unsupported Media Type")
42 }
43
44 req.body = {}
45
46 if (head["content-encoding"]) {
47 tmp = head["content-encoding"].split(/\W+/)
48 for (i = tmp.length; i--; ) {
49 if (req.opts.compress && decompress[tmp[i]]) {
50 stream = stream.pipe(require("zlib")[decompress[tmp[i]]]({
51 maxOutputLength: maxBodySize
52 }))
53 } else if (tmp[i] && tmp[i] !== "identity") {
54 return handleEnd("Unsupported Media Type")
55 }
56 }
57 }
58
59 if (negod.type === "multipart") {
60 stream = stream
61 .pipe(multipart(negod.boundary, reqOpts || {}, req))
62 .on("finish", handleEnd)
63 } else {
64 tmp = ""
65 stream.on("data", function handleData(data) {
66 tmp += data
67 // FLOOD ATTACK OR FAULTY CLIENT, NUKE REQUEST
68 if (tmp.length > maxBodySize) {
69 handleEnd("Payload Too Large") // 431 Payload Too Large
70 stream.destroy()
71 }
72 })
73 .on("end", handleEnd)
74 }
75
76 stream.on("error", handleEnd)
77
78 function handleEnd(err) {
79 if (next) {
80 if (err) next(err)
81 else try {
82 if (negod.o) req.body = negod.o(tmp, negod)
83 next(null, req.body, req.files, negod)
84 } catch (e) {
85 next(e)
86 }
87 next = null
88 if (req.files) {
89 for (var i = req.files.length; i--; ) {
90 if (req.files[i].tmp) fs.unlink(req.files[i].tmp, util.nop)
91 }
92 }
93 }
94 }
95}
96
97function querystring(str) {
98 var step, map = {}
99 if (typeof str === "string" && str !== "") {
100 var arr = str.split("&")
101 , i = 0
102 , l = arr.length
103 for (; i < l; ) {
104 step = arr[i++].replace(/\+/g, " ").split("=")
105 JSON.setForm(map, unescape(step[0]), unescape(step[1] || ""))
106 }
107 }
108 return map
109}
110
111function multipart(boundary, reqOpts, req) {
112 makeTable(boundary = Buffer.from("\r\n--" + boundary))
113
114 var headers, fileStream
115 , negod = reqOpts.preamble && { preamble: true }
116 , needle = boundary
117 , bufs = [rnrn.slice(2)]
118 , bufsBytes = 2
119 , nextPos = needle.length - 3
120 , remainingFields = util.num(reqOpts.maxFields, req.opts.maxFields, 1000)
121 , remainingFiles = util.num(reqOpts.maxFiles, req.opts.maxFiles, 1000)
122 , savePath = (reqOpts.tmp || req.opts.tmp) + "-" + (seq++)
123 , writable = {
124 write: function(chunk, enc, cb) {
125 var buf, bufNum, i, j
126 , writable = this
127 , pos = nextPos
128 , len = chunk.length
129 , last = needle[needle.length - 1]
130 , cut = 0
131
132 if (pos > len) {
133 bufs.push(chunk)
134 bufsBytes += len
135 nextPos -= len
136 return cb()
137 }
138
139 jump:for (; pos < len; ) {
140 if (chunk[pos] === last) {
141 buf = chunk
142 bufNum = bufs.length
143 i = needle.length - 1
144 j = pos
145 for (; i > 0; ) {
146 if (j < 1 && bufNum > 0) {
147 buf = bufs[--bufNum]
148 j = buf.length
149 }
150 if (needle[--i] !== buf[--j]) {
151 pos += needle.jump[last]
152 continue jump
153 }
154 }
155 // match found
156 if (bufsBytes > 0) {
157 bufs.push(chunk)
158 buf = Buffer.concat(bufs, pos + bufsBytes - needle.length + 1)
159 bufsBytes = bufs.length = 0
160 } else if (cut > 0 || pos < len - 1) {
161 buf = buf.slice(cut, pos - needle.length + 1)
162 }
163 if (needle === boundary) {
164 if (negod) {
165 if (remainingFields-- < 1) return writable.destroy({ code: 413, message: "maxFields exceeded"})
166 if (negod.preamble) {
167 req.emit("preamble", req.preamble = buf.toString("utf8", 2))
168 } else {
169 JSON.setForm(req.body, negod.name, buf.toString())
170 }
171 negod = null
172 } else if (fileStream) {
173 fileStream.end(buf)
174 fileStream = null
175 }
176 needle = rnrn
177 } else {
178 // content start
179 headers = parseHeaders(buf.toString())
180 negod = negotiateDisposition(headers["content-disposition"])
181 negod.headers = headers
182
183 if (negod.filename) {
184 if (remainingFiles-- < 1) return writable.destroy({ code: 413, message: "maxFiles exceeded"})
185 if (!req.files) req.files = []
186 req.files.push(negod)
187 negod.tmp = savePath + "-" + remainingFiles
188 req.emit("file", negod, saveTo)
189 if (!fileStream) {
190 saveTo(negod.tmp)
191 }
192 }
193 needle = boundary
194 }
195 cut = pos + 1
196 last = needle[needle.length - 1]
197 pos += needle.length
198 } else {
199 pos += needle.jump[chunk[pos]]
200 }
201 }
202
203 nextPos = pos - len
204
205 if (cut < len) {
206 bufs.push(cut ? chunk.slice(cut) : chunk)
207 bufsBytes += bufs[bufs.length - 1].length
208 }
209
210 if (fileStream) {
211 for (; bufs.length > 1 && bufsBytes - bufs[bufs.length - 1].length > needle.length; ) {
212 if (!fileStream.write(bufs.pop())) {
213 return fileStream.once("drain", cb)
214 }
215 }
216 }
217
218 cb()
219 }
220 }
221
222 if (reqOpts && reqOpts.epilogue) {
223 writable.final = function(cb) {
224 req.epilogue = Buffer.concat(bufs).toString("utf8", 4)
225 cb()
226 }
227 }
228
229 return new Writable(writable)
230
231 function saveTo(stream) {
232 fileStream = (
233 typeof stream === "string" ?
234 fs.createWriteStream(negod.tmp = stream) :
235 stream
236 )
237 negod = null
238 }
239}
240
241// multipart/form-data part accepts only Content-Type, Content-Disposition, and (in limited circumstances) Content-Transfer-Encoding.
242// Other header fields MUST NOT be included and MUST be ignored.
243
244// Content-Transfer-Encoding: 7bit / 8bit / binary / quoted-printable / base64 / ietf-token / x-token
245//
246// User Agents that recognize Multipart/Related will ignore the Content-Disposition header's disposition type.
247// Other User Agents will process the Multipart/Related as Multipart/Mixed and may make use of that header's information.
248
249function parseHeaders(str) {
250 // 4.8. Other "Content-" Header Fields
251 var i
252 , headers = {}
253 , lines = str.split("\r\n")
254 , len = lines.length
255 for (; len; ) {
256 i = lines[--len].indexOf(":")
257 if (i > 0) {
258 headers[
259 lines[len].slice(0, i).toLowerCase()
260 ] = lines[len].slice(i + 1).trim()
261 }
262 }
263 return headers
264}
265
266function makeTable(buf) {
267 var len = buf.length
268 , i = 0
269 , pos = len - 1
270 , jump = buf.jump = new Uint8Array(256).fill(len)
271
272 for (; i < pos; ++i) {
273 jump[buf[i]] = pos - i
274 }
275}
276
277
278
\No newline at end of file