UNPKG

7.04 kBJavaScriptView Raw
1
2var fs = require("fs")
3, os = require("os")
4, path = require("path")
5, qs = require("querystring")
6, Writable = require("stream").Writable
7, accept = require("./accept.js").accept
8, rnrn = Buffer.from("\r\n\r\n")
9, negotiateContent = accept({
10 'application/json': function(str) {
11 return JSON.parse(str || "{}")
12 },
13 'application/x-www-form-urlencoded': function(str) {
14 return qs.parse(str)
15 },
16 // Subtypes:
17 // - alternative
18 // - byterange https://tools.ietf.org/html/rfc7233#section-5.4.1
19 // - digest
20 // - encrypted
21 // - form-data https://tools.ietf.org/html/rfc7578
22 // - mixed
23 // - related https://tools.ietf.org/html/rfc2387
24 // - report
25 // - signed
26 // - x-mixed-replace
27 'multipart/*;boundary=': null
28})
29, negotiateDisposition = accept([
30 'form-data;name="";filename=""'
31])
32, seq = 0
33
34
35module.exports = getContent
36getContent.multipart = multipart
37
38
39makeTable(rnrn)
40
41function getContent(next, reqOpts) {
42 var i, tmp
43 , req = this
44 , head = req.headers
45 , negod = negotiateContent(head["content-type"] || head.accept || "*")
46 , stream = req
47
48 req.body = {}
49
50 if (head["content-encoding"]) {
51 tmp = head["content-encoding"].split(/\W+/)
52 for (i = tmp.length; i--; ) {
53 if (tmp[i] === "gzip" || tmp[i] === "deflate") {
54 // Decompress Gzip or Deflate by auto-detecting the header
55 stream = stream.pipe(zlib.createUnzip())
56 } else if (tmp[i] && tmp[i] !== "identity") {
57 throw "Unsupported Media Type"
58 }
59 }
60 }
61
62 if (negod.type === "multipart") {
63 stream = stream.pipe(multipart(negod.boundary, reqOpts || {}, req))
64
65 stream.on("field", function(negod) {
66 var step = req.body
67 , key = negod.name
68 key.replace(/\[(.*?)\]/g, function(_, _key, offset) {
69 if (step == req.body) key = key.slice(0, offset)
70 step = step[key] || (step[key] = _key && +_key != _key ? {} : [])
71 key = _key
72 })
73 step[key || step.length] = negod.content.toString()
74 })
75 stream.on("file", function(negod) {
76 if (!req.files) req.files = []
77 req.files.push(negod)
78 })
79 stream.on("finish", function() {
80 next(null, req.body, req.files)
81 if (req.files) {
82 for (var i = req.files.length; i--; ) {
83 if (req.files[i].tmp) fs.unlink(req.files[i].tmp, nop)
84 }
85 }
86 })
87 } else {
88 tmp = ""
89 stream.on("data", function handleData(data) {
90 tmp += data
91 // FLOOD ATTACK OR FAULTY CLIENT, NUKE REQUEST
92 if (tmp.length > req.opts.maxBodySize) {
93 stream.destroy("Payload Too Large") // 431 Payload Too Large
94 }
95 })
96 .on("end", handleEnd)
97 }
98
99 stream.on("error", next)
100
101 function handleEnd() {
102 try {
103 req.body = typeof negod.o === "function" ? negod.o(tmp) : tmp
104 next(null, req.body, req.files, negod)
105 } catch (e) {
106 next(e)
107 }
108 }
109}
110
111function multipart(boundary, reqOpts, req) {
112 // nodejs HTTP_MAX_HEADER_SIZE (80*1024)
113 // { "name": "in[]", filename: "a.jpg", tmp: "/tmp/a.123", size: 123 }
114 if (typeof boundary === "string") {
115 boundary = Buffer.from("\r\n--" + boundary)
116 }
117
118 makeTable(boundary)
119
120 var headers, fileStream
121 , negod = reqOpts.preamble && { preamble: true }
122 , needle = boundary
123 , bufs = [rnrn.slice(2)]
124 , bufsBytes = 2
125 , nextPos = needle.length - 3
126 , remainingFields = number(reqOpts.maxFields, req.opts.maxFields, 1000)
127 , remainingFiles = number(reqOpts.maxFiles, req.opts.maxFiles, 1000)
128 , savePath = (reqOpts.path || os.tmpdir() + "/up-") + process.pid + "-" + (seq++) + "-"
129
130 return new Writable({
131 write: function(chunk, enc, cb) {
132 var buf, bufNum, i, j
133 , writable = this
134 , pos = nextPos
135 , len = chunk.length
136 , last = needle[needle.length - 1]
137 , cut = 0
138
139 if (pos > len) {
140 bufs.push(chunk)
141 bufsBytes += len
142 nextPos -= len
143 return cb()
144 }
145
146 jump:for (; pos < len; ) {
147 if (chunk[pos] === last) {
148 buf = chunk
149 bufNum = bufs.length
150 i = needle.length - 1
151 j = pos
152 for (; i > 0; ) {
153 if (j < 1 && bufNum > 0) {
154 buf = bufs[--bufNum]
155 j = buf.length
156 }
157 if (needle[--i] !== buf[--j]) {
158 pos += needle.jump[last]
159 continue jump
160 }
161 }
162 // match found
163 if (bufsBytes > 0) {
164 bufs.push(chunk)
165 buf = Buffer.concat(bufs, pos + bufsBytes - needle.length + 1)
166 bufsBytes = bufs.length = 0
167 } else if (cut > 0 || pos < len - 1) {
168 buf = buf.slice(cut, pos - needle.length + 1)
169 }
170 if (needle === boundary) {
171 if (negod) {
172 if (!remainingFields--) return writable.destroy({ code: 413, message: "maxFields exceeded"})
173 negod.content = buf
174 writable.emit(negod.filename === "" ? "field" : "file", negod)
175 negod = null
176 } else if (fileStream) {
177 fileStream.end(buf)
178 fileStream = null
179 }
180 needle = rnrn
181 } else {
182 // content start
183 headers = parseHeaders(buf.toString())
184 negod = negotiateDisposition(headers["content-disposition"])
185 negod.headers = headers
186
187 if (negod.filename && reqOpts.path !== null) {
188 if (!remainingFiles--) return writable.destroy({ code: 413, message: "maxFiles exceeded"})
189 writable.emit("file", negod, saveTo)
190 if (!fileStream) {
191 saveTo(savePath + remainingFiles)
192 }
193 }
194 needle = boundary
195 }
196 cut = pos + 1
197 last = needle[needle.length - 1]
198 pos += needle.length
199 } else {
200 pos += needle.jump[chunk[pos]]
201 }
202 }
203
204 nextPos = pos - len
205
206 if (cut < len) {
207 bufs.push(cut ? chunk.slice(cut) : chunk)
208 bufsBytes += bufs[bufs.length - 1].length
209 }
210
211 if (fileStream) {
212 for (; bufs.length > 1 && bufsBytes - bufs[bufs.length - 1].length > needle.length; ) {
213 if (!fileStream.write(bufs.pop())) {
214 return fileStream.once("drain", cb)
215 }
216 }
217 }
218
219 cb()
220 }
221 })
222
223 function saveTo(stream) {
224 fileStream = (
225 typeof stream === "string" ?
226 fs.createWriteStream(negod.tmp = stream) :
227 stream
228 )
229 negod = null
230 }
231}
232
233function nop() {}
234
235function number(a, b, c) {
236 return (
237 typeof a === "number" ? a :
238 typeof b === "number" ? b :
239 c
240 )
241}
242
243// multipart/form-data part accepts only Content-Type, Content-Disposition, and (in limited circumstances) Content-Transfer-Encoding.
244// Other header fields MUST NOT be included and MUST be ignored.
245
246// Content-Transfer-Encoding: 7bit / 8bit / binary / quoted-printable / base64 / ietf-token / x-token
247//
248// User Agents that recognize Multipart/Related will ignore the Content-Disposition header's disposition type.
249// Other User Agents will process the Multipart/Related as Multipart/Mixed and may make use of that header's information.
250
251function parseHeaders(str) {
252 // 4.8. Other "Content-" Header Fields
253 var i
254 , headers = {}
255 , lines = str.split("\r\n")
256 , len = lines.length
257 for (; len; ) {
258 i = lines[--len].indexOf(":")
259 if (i > 0) {
260 headers[
261 lines[len].slice(0, i).toLowerCase()
262 ] = lines[len].slice(i + 1).trim()
263 }
264 }
265 return headers
266}
267
268function makeTable(buf) {
269 var len = buf.length
270 , i = 0
271 , pos = len - 1
272 , jump = buf.jump = new Uint8Array(256).fill(len)
273
274 for (; i < pos; ++i) {
275 jump[buf[i]] = pos - i
276 }
277}
278
279