processRequest.js 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424
  1. // @ts-check
  2. "use strict";
  3. const busboy = require("busboy");
  4. const { WriteStream } = require("fs-capacitor");
  5. const createError = require("http-errors");
  6. const objectPath = require("object-path");
  7. const GRAPHQL_MULTIPART_REQUEST_SPEC_URL = require("./GRAPHQL_MULTIPART_REQUEST_SPEC_URL.js");
  8. const ignoreStream = require("./ignoreStream.js");
  9. const Upload = require("./Upload.js");
  10. /** @typedef {import("./GraphQLUpload.js")} GraphQLUpload */
  11. /**
  12. * Processes an incoming
  13. * [GraphQL multipart request](https://github.com/jaydenseric/graphql-multipart-request-spec).
  14. * It parses the `operations` and `map` fields to create an {@linkcode Upload}
  15. * instance for each expected file upload, placing references wherever the file
  16. * is expected in the GraphQL operation for the {@linkcode GraphQLUpload} scalar
  17. * to derive it’s value. Errors are created with
  18. * [`http-errors`](https://npm.im/http-errors) to assist in sending responses
  19. * with appropriate HTTP status codes. Used to create custom middleware.
  20. * @type {ProcessRequestFunction}
  21. */
  22. function processRequest(
  23. request,
  24. response,
  25. {
  26. maxFieldSize = 1000000, // 1 MB
  27. maxFileSize = Infinity,
  28. maxFiles = Infinity,
  29. } = {}
  30. ) {
  31. return new Promise((resolve, reject) => {
  32. /** @type {boolean} */
  33. let released;
  34. /** @type {Error} */
  35. let exitError;
  36. /**
  37. * @type {{ [key: string]: unknown } | Array<
  38. * { [key: string]: unknown }
  39. * >}
  40. */
  41. let operations;
  42. /**
  43. * @type {import("object-path").ObjectPathBound<
  44. * { [key: string]: unknown } | Array<{ [key: string]: unknown }>
  45. * >}
  46. */
  47. let operationsPath;
  48. /** @type {Map<string, Upload>} */
  49. let map;
  50. const parser = busboy({
  51. // @ts-ignore This is about to change with `busboy` v1 types.
  52. headers: request.headers,
  53. limits: {
  54. fieldSize: maxFieldSize,
  55. fields: 2, // Only operations and map.
  56. fileSize: maxFileSize,
  57. files: maxFiles,
  58. },
  59. });
  60. /**
  61. * Exits request processing with an error. Successive calls have no effect.
  62. * @param {Error} error Error instance.
  63. * @param {boolean} [isParserError] Is the error from the parser.
  64. */
  65. function exit(error, isParserError = false) {
  66. if (exitError) return;
  67. exitError = error;
  68. if (map)
  69. for (const upload of map.values())
  70. if (!upload.file) upload.reject(exitError);
  71. // If the error came from the parser, don’t cause it to be emitted again.
  72. isParserError ? parser.destroy() : parser.destroy(exitError);
  73. request.unpipe(parser);
  74. // With a sufficiently large request body, subsequent events in the same
  75. // event frame cause the stream to pause after the parser is destroyed. To
  76. // ensure that the request resumes, the call to .resume() is scheduled for
  77. // later in the event loop.
  78. setImmediate(() => {
  79. request.resume();
  80. });
  81. reject(exitError);
  82. }
  83. parser.on("field", (fieldName, value, { valueTruncated }) => {
  84. if (valueTruncated)
  85. return exit(
  86. createError(
  87. 413,
  88. `The ‘${fieldName}’ multipart field value exceeds the ${maxFieldSize} byte size limit.`
  89. )
  90. );
  91. switch (fieldName) {
  92. case "operations":
  93. try {
  94. operations = JSON.parse(value);
  95. } catch (error) {
  96. return exit(
  97. createError(
  98. 400,
  99. `Invalid JSON in the ‘operations’ multipart field (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`
  100. )
  101. );
  102. }
  103. // `operations` should be an object or an array. Note that arrays
  104. // and `null` have an `object` type.
  105. if (typeof operations !== "object" || !operations)
  106. return exit(
  107. createError(
  108. 400,
  109. `Invalid type for the ‘operations’ multipart field (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`
  110. )
  111. );
  112. operationsPath = objectPath(operations);
  113. break;
  114. case "map": {
  115. if (!operations)
  116. return exit(
  117. createError(
  118. 400,
  119. `Misordered multipart fields; ‘map’ should follow ‘operations’ (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`
  120. )
  121. );
  122. let parsedMap;
  123. try {
  124. parsedMap = JSON.parse(value);
  125. } catch (error) {
  126. return exit(
  127. createError(
  128. 400,
  129. `Invalid JSON in the ‘map’ multipart field (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`
  130. )
  131. );
  132. }
  133. // `map` should be an object.
  134. if (
  135. typeof parsedMap !== "object" ||
  136. !parsedMap ||
  137. Array.isArray(parsedMap)
  138. )
  139. return exit(
  140. createError(
  141. 400,
  142. `Invalid type for the ‘map’ multipart field (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`
  143. )
  144. );
  145. const mapEntries = Object.entries(parsedMap);
  146. // Check max files is not exceeded, even though the number of files
  147. // to parse might not match the map provided by the client.
  148. if (mapEntries.length > maxFiles)
  149. return exit(
  150. createError(413, `${maxFiles} max file uploads exceeded.`)
  151. );
  152. map = new Map();
  153. for (const [fieldName, paths] of mapEntries) {
  154. if (!Array.isArray(paths))
  155. return exit(
  156. createError(
  157. 400,
  158. `Invalid type for the ‘map’ multipart field entry key ‘${fieldName}’ array (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`
  159. )
  160. );
  161. map.set(fieldName, new Upload());
  162. for (const [index, path] of paths.entries()) {
  163. if (typeof path !== "string")
  164. return exit(
  165. createError(
  166. 400,
  167. `Invalid type for the ‘map’ multipart field entry key ‘${fieldName}’ array index ‘${index}’ value (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`
  168. )
  169. );
  170. try {
  171. operationsPath.set(path, map.get(fieldName));
  172. } catch (error) {
  173. return exit(
  174. createError(
  175. 400,
  176. `Invalid object path for the ‘map’ multipart field entry key ‘${fieldName}’ array index ‘${index}’ value ‘${path}’ (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`
  177. )
  178. );
  179. }
  180. }
  181. }
  182. resolve(operations);
  183. }
  184. }
  185. });
  186. parser.on(
  187. "file",
  188. (fieldName, stream, { filename, encoding, mimeType: mimetype }) => {
  189. if (!map) {
  190. ignoreStream(stream);
  191. return exit(
  192. createError(
  193. 400,
  194. `Misordered multipart fields; files should follow ‘map’ (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`
  195. )
  196. );
  197. }
  198. const upload = map.get(fieldName);
  199. if (!upload) {
  200. // The file is extraneous. As the rest can still be processed, just
  201. // ignore it and don’t exit with an error.
  202. ignoreStream(stream);
  203. return;
  204. }
  205. /** @type {Error} */
  206. let fileError;
  207. const capacitor = new WriteStream();
  208. capacitor.on("error", () => {
  209. stream.unpipe();
  210. stream.resume();
  211. });
  212. stream.on("limit", () => {
  213. fileError = createError(
  214. 413,
  215. `File truncated as it exceeds the ${maxFileSize} byte size limit.`
  216. );
  217. stream.unpipe();
  218. capacitor.destroy(fileError);
  219. });
  220. stream.on("error", (error) => {
  221. fileError = error;
  222. stream.unpipe();
  223. capacitor.destroy(fileError);
  224. });
  225. /** @type {FileUpload} */
  226. const file = {
  227. filename,
  228. mimetype,
  229. encoding,
  230. createReadStream(options) {
  231. const error = fileError || (released ? exitError : null);
  232. if (error) throw error;
  233. return capacitor.createReadStream(options);
  234. },
  235. capacitor,
  236. };
  237. Object.defineProperty(file, "capacitor", {
  238. enumerable: false,
  239. configurable: false,
  240. writable: false,
  241. });
  242. stream.pipe(capacitor);
  243. upload.resolve(file);
  244. }
  245. );
  246. parser.once("filesLimit", () =>
  247. exit(createError(413, `${maxFiles} max file uploads exceeded.`))
  248. );
  249. parser.once("finish", () => {
  250. request.unpipe(parser);
  251. request.resume();
  252. if (!operations)
  253. return exit(
  254. createError(
  255. 400,
  256. `Missing multipart field ‘operations’ (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`
  257. )
  258. );
  259. if (!map)
  260. return exit(
  261. createError(
  262. 400,
  263. `Missing multipart field ‘map’ (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).`
  264. )
  265. );
  266. for (const upload of map.values())
  267. if (!upload.file)
  268. upload.reject(createError(400, "File missing in the request."));
  269. });
  270. // Use the `on` method instead of `once` as in edge cases the same parser
  271. // could have multiple `error` events and all must be handled to prevent the
  272. // Node.js process exiting with an error. One edge case is if there is a
  273. // malformed part header as well as an unexpected end of the form.
  274. parser.on("error", (/** @type {Error} */ error) => {
  275. exit(error, true);
  276. });
  277. response.once("close", () => {
  278. released = true;
  279. if (map)
  280. for (const upload of map.values())
  281. if (upload.file)
  282. // Release resources and clean up temporary files.
  283. upload.file.capacitor.release();
  284. });
  285. request.once("close", () => {
  286. if (!request.readableEnded)
  287. exit(
  288. createError(
  289. 499,
  290. "Request disconnected during file upload stream parsing."
  291. )
  292. );
  293. });
  294. request.pipe(parser);
  295. });
  296. }
  297. module.exports = processRequest;
  298. /**
  299. * File upload details that are only available after the file’s field in the
  300. * [GraphQL multipart request](https://github.com/jaydenseric/graphql-multipart-request-spec)
  301. * has begun streaming in.
  302. * @typedef {object} FileUpload
  303. * @prop {string} filename File name.
  304. * @prop {string} mimetype File MIME type. Provided by the client and can’t be
  305. * trusted.
  306. * @prop {string} encoding File stream transfer encoding.
  307. * @prop {import("fs-capacitor").WriteStream} capacitor A private implementation
  308. * detail that shouldn’t be used outside
  309. * [`graphql-upload`](https://npm.im/graphql-upload).
  310. * @prop {FileUploadCreateReadStream} createReadStream Creates a
  311. * [Node.js readable stream](https://nodejs.org/api/stream.html#readable-streams)
  312. * of the file’s contents, for processing and storage.
  313. */
  314. /**
  315. * Creates a
  316. * [Node.js readable stream](https://nodejs.org/api/stream.html#readable-streams)
  317. * of an {@link FileUpload uploading file’s} contents, for processing and
  318. * storage. Multiple calls create independent streams. Throws if called after
  319. * all resolvers have resolved, or after an error has interrupted the request.
  320. * @callback FileUploadCreateReadStream
  321. * @param {FileUploadCreateReadStreamOptions} [options] Options.
  322. * @returns {import("stream").Readable}
  323. * [Node.js readable stream](https://nodejs.org/api/stream.html#readable-streams)
  324. * of the file’s contents.
  325. * @see [Node.js `Readable` stream constructor docs](https://nodejs.org/api/stream.html#new-streamreadableoptions).
  326. * @see [Node.js stream backpressure guide](https://nodejs.org/en/docs/guides/backpressuring-in-streams).
  327. */
  328. /**
  329. * {@linkcode FileUploadCreateReadStream} options.
  330. * @typedef {object} FileUploadCreateReadStreamOptions
  331. * @prop {string} [options.encoding] Specify an encoding for the
  332. * [`data`](https://nodejs.org/api/stream.html#event-data) chunks to be
  333. * strings (without splitting multi-byte characters across chunks) instead of
  334. * Node.js [`Buffer`](https://nodejs.org/api/buffer.html#buffer) instances.
  335. * Supported values depend on the
  336. * [`Buffer` implementation](https://github.com/nodejs/node/blob/v18.1.0/lib/buffer.js#L590-L680)
  337. * and include `utf8`, `ucs2`, `utf16le`, `latin1`, `ascii`, `base64`,
  338. * `base64url`, or `hex`. Defaults to `utf8`.
  339. * @prop {number} [options.highWaterMark] Maximum number of bytes to store in
  340. * the internal buffer before ceasing to read from the underlying resource.
  341. * Defaults to `16384`.
  342. */
  343. /**
  344. * Processes an incoming
  345. * [GraphQL multipart request](https://github.com/jaydenseric/graphql-multipart-request-spec).
  346. * @callback ProcessRequestFunction
  347. * @param {import("http").IncomingMessage} request
  348. * [Node.js HTTP server request instance](https://nodejs.org/api/http.html#http_class_http_incomingmessage).
  349. * @param {import("http").ServerResponse} response
  350. * [Node.js HTTP server response instance](https://nodejs.org/api/http.html#http_class_http_serverresponse).
  351. * @param {ProcessRequestOptions} [options] Options.
  352. * @returns {Promise<
  353. * { [key: string]: unknown } | Array<{ [key: string]: unknown }>
  354. * >} GraphQL operation or batch of operations for a GraphQL server to consume
  355. * (usually as the request body). A GraphQL operation typically has the
  356. * properties `query` and `variables`.
  357. */
  358. /**
  359. * {@linkcode ProcessRequestFunction} options.
  360. * @typedef {object} ProcessRequestOptions
  361. * @prop {number} [maxFieldSize] Maximum allowed non file multipart form field
  362. * size in bytes; enough for your queries. Defaults to `1000000` (1 MB).
  363. * @prop {number} [maxFileSize] Maximum allowed file size in bytes. Defaults to
  364. * `Infinity`.
  365. * @prop {number} [maxFiles] Maximum allowed number of files. Defaults to
  366. * `Infinity`.
  367. */