UNPKG

7.08 kBJavaScriptView Raw
1
2require("./json")
3
4var fs = require("fs")
5, Writable = require("stream").Writable
6, accept = require("./accept.js").accept
7, util = require("./util")
8, rnrn = Buffer.from("\r\n\r\n")
9, negotiateContent = accept({
10 "application/json": function(str) {
11 return JSON.parse(str || "{}")
12 },
13 "application/x-www-form-urlencoded": querystring,
14 "multipart/*;boundary=": null,
15 "text/csv;br=\"\r\n\";delimiter=\",\";fields=;header=;NULL=;select=": require("./csv.js").decode
16})
17, negotiateDisposition = accept([
18 "form-data;name=;filename="
19])
20, seq = 0
21, decompress = {
22 br: "createBrotliDecompress",
23 gzip: "createUnzip",
24 deflate: "createUnzip"
25}
26
27makeTable(rnrn)
28
29module.exports = getContent
30getContent.querystring = querystring
31
32function getContent(next, reqOpts) {
33 var i, tmp
34 , req = this
35 , head = req.headers
36 , negod = negotiateContent(head["content-type"] || head.accept)
37 , stream = req
38 , maxBodySize = util.num(reqOpts && reqOpts.maxBodySize, req.opts.maxBodySize, 1e6)
39
40 if (!negod.match) {
41 return handleEnd("Unsupported Media Type")
42 }
43
44 req.body = {}
45
46 if (head["content-encoding"]) {
47 tmp = head["content-encoding"].split(/\W+/)
48 for (i = tmp.length; i--; ) {
49 if (req.opts.compress && decompress[tmp[i]]) {
50 stream = stream.pipe(require("zlib")[decompress[tmp[i]]]({
51 maxOutputLength: maxBodySize
52 }))
53 } else if (tmp[i] && tmp[i] !== "identity") {
54 return handleEnd("Unsupported Media Type")
55 }
56 }
57 }
58
59 if (negod.type === "multipart") {
60 ;(req.res || req).on("close", function() {
61 if (req.files) for (var i = req.files.length; i--; ) {
62 if (req.files[i].tmp) fs.unlink(req.files[i].tmp, util.nop)
63 }
64 })
65 stream = stream
66 .pipe(multipart(negod.boundary, reqOpts || {}, req))
67 .on("finish", handleEnd)
68 } else {
69 tmp = ""
70 stream.on("data", function handleData(data) {
71 tmp += data
72 // FLOOD ATTACK OR FAULTY CLIENT, NUKE REQUEST
73 if (tmp.length > maxBodySize) {
74 handleEnd("Payload Too Large") // 431 Payload Too Large
75 stream.destroy()
76 }
77 })
78 .on("end", handleEnd)
79 }
80
81 stream.on("error", handleEnd)
82
83 function handleEnd(err) {
84 if (next) {
85 if (err) next(err)
86 else try {
87 if (negod.o) req.body = negod.o(tmp, negod)
88 next(null, req.body, req.files, negod)
89 } catch (e) {
90 next(e)
91 }
92 next = null
93 }
94 }
95}
96
97function querystring(str) {
98 var step, map = {}
99 if (typeof str === "string" && str !== "") {
100 var arr = str.split("&")
101 , i = 0
102 , l = arr.length
103 for (; i < l; ) {
104 step = arr[i++].replace(/\+/g, " ").split("=")
105 JSON.setForm(map, unescape(step[0]), unescape(step[1] || ""))
106 }
107 }
108 return map
109}
110
111function multipart(boundary, reqOpts, req) {
112 makeTable(boundary = Buffer.from("\r\n--" + boundary))
113
114 var headers, fileStream
115 , negod = reqOpts.preamble && { preamble: true }
116 , needle = boundary
117 , bufs = [rnrn.slice(2)]
118 , bufsBytes = 2
119 , nextPos = needle.length - 3
120 , remainingFields = util.num(reqOpts.maxFields, req.opts.maxFields, 1000)
121 , remainingFiles = util.num(reqOpts.maxFiles, req.opts.maxFiles, 1000)
122 , savePath = (reqOpts.tmp || req.opts.tmp) + "-" + (seq++)
123 , writable = {
124 write: function(chunk, enc, cb) {
125 var buf, bufNum, i, j
126 , writable = this
127 , pos = nextPos
128 , len = chunk.length
129 , last = needle.readUInt8(needle.length - 1)
130 , cut = 0
131
132 if (pos > len) {
133 bufs.push(chunk)
134 bufsBytes += len
135 nextPos -= len
136 return cb()
137 }
138
139 jump:for (; pos < len; ) {
140 if (chunk.readUInt8(pos) === last) {
141 buf = chunk
142 bufNum = bufs.length
143 i = needle.length - 1
144 j = pos
145 for (; i > 0; ) {
146 if (j < 1 && bufNum > 0) {
147 buf = bufs[--bufNum]
148 j = buf.length
149 }
150 if (needle.readUInt8(--i) !== buf.readUInt8(--j)) {
151 pos += needle.jump[last]
152 continue jump
153 }
154 }
155 // match found
156 if (bufsBytes > 0) {
157 bufs.push(chunk)
158 buf = Buffer.concat(bufs, pos + bufsBytes - needle.length + 1)
159 bufsBytes = bufs.length = 0
160 } else if (cut > 0) {
161 buf = buf.slice(cut, pos - needle.length + 1)
162 }
163 if (needle === boundary) {
164 if (negod) {
165 if (remainingFields-- < 1) return cb({ code: 413, message: "maxFields exceeded"})
166 if (negod.preamble) {
167 req.emit("preamble", req.preamble = buf.toString("utf8", 2))
168 } else {
169 JSON.setForm(req.body, negod.name, buf.toString())
170 }
171 negod = null
172 } else if (fileStream) {
173 fileStream.end(buf)
174 fileStream = null
175 }
176 needle = rnrn
177 } else {
178 // content start
179 headers = parseHeaders(buf.toString())
180 negod = negotiateDisposition(headers["content-disposition"])
181 negod.headers = headers
182
183 if (negod.filename) {
184 if (remainingFiles-- < 1) return cb({ code: 413, message: "maxFiles exceeded"})
185 if (!req.files) req.files = []
186 req.files.push(negod)
187 negod.tmp = savePath + "-" + remainingFiles
188 req.emit("file", negod, saveTo)
189 if (!fileStream) {
190 saveTo(negod.tmp)
191 }
192 }
193 needle = boundary
194 }
195 cut = pos + 1
196 last = needle.readUInt8(needle.length - 1)
197 pos += needle.length
198 } else {
199 pos += needle.jump[chunk.readUInt8(pos)]
200 }
201 }
202
203 nextPos = pos - len
204
205 if (cut < len) {
206 bufs.push(cut ? chunk.slice(cut) : chunk)
207 bufsBytes += bufs[bufs.length - 1].length
208 }
209
210 writeChunk()
211
212 function writeChunk() {
213 if (fileStream && bufs[1] && bufsBytes - bufs[0].length > needle.length) {
214 bufsBytes -= bufs[0].length
215 fileStream.write(bufs.shift(), writeChunk)
216 } else {
217 process.nextTick(cb)
218 }
219 }
220 }
221 }
222
223 if (reqOpts && reqOpts.epilogue) {
224 writable.final = function(cb) {
225 req.epilogue = Buffer.concat(bufs).toString("utf8", 4)
226 cb()
227 }
228 }
229
230 var stream = new Writable(writable)
231 if (stream._readyToWrite) stream._readyToWrite()
232 return stream
233
234 function saveTo(stream) {
235 fileStream = (
236 typeof stream === "string" ?
237 fs.createWriteStream(negod.tmp = stream) :
238 stream
239 )
240 negod = null
241 }
242}
243
244// multipart/form-data part accepts only Content-Type, Content-Disposition, and (in limited circumstances) Content-Transfer-Encoding.
245// Other header fields MUST NOT be included and MUST be ignored.
246
247// Content-Transfer-Encoding: 7bit / 8bit / binary / quoted-printable / base64 / ietf-token / x-token
248//
249// User Agents that recognize Multipart/Related will ignore the Content-Disposition header's disposition type.
250// Other User Agents will process the Multipart/Related as Multipart/Mixed and may make use of that header's information.
251
252function parseHeaders(str) {
253 // 4.8. Other "Content-" Header Fields
254 var i
255 , headers = {}
256 , lines = str.split("\r\n")
257 , len = lines.length
258 for (; len; ) {
259 i = lines[--len].indexOf(":")
260 if (i > 0) {
261 headers[
262 lines[len].slice(0, i).toLowerCase()
263 ] = lines[len].slice(i + 1).trim()
264 }
265 }
266 return headers
267}
268
269function makeTable(buf) {
270 var len = buf.length
271 , i = 0
272 , pos = len - 1
273 , jump = buf.jump = new Uint8Array(256).fill(len)
274
275 for (; i < pos; ++i) {
276 jump[buf.readUInt8(i)] = pos - i
277 }
278}
279
280
281
\No newline at end of file