From a5771c34992e0ae4e1c2ae5c296523676a33f0cf Mon Sep 17 00:00:00 2001 From: Rim Date: Wed, 16 Apr 2025 09:33:10 -0400 Subject: [PATCH] chore: update deps --- node_modules/.bin/node-which | 16 + node_modules/.bin/node-which.cmd | 17 + node_modules/.bin/node-which.ps1 | 28 ++ node_modules/.package-lock.json | 36 --- .../@fastify/busboy/deps/dicer/lib/Dicer.js | 213 ------------ .../busboy/deps/dicer/lib/HeaderParser.js | 100 ------ .../busboy/deps/dicer/lib/PartStream.js | 13 - .../@fastify/busboy/deps/dicer/lib/dicer.d.ts | 164 ---------- .../@fastify/busboy/deps/streamsearch/sbmh.js | 228 ------------- node_modules/@fastify/busboy/lib/main.d.ts | 196 ----------- node_modules/@fastify/busboy/lib/main.js | 85 ----- .../@fastify/busboy/lib/types/multipart.js | 306 ------------------ .../@fastify/busboy/lib/types/urlencoded.js | 190 ----------- .../@fastify/busboy/lib/utils/Decoder.js | 54 ---- .../@fastify/busboy/lib/utils/basename.js | 14 - .../@fastify/busboy/lib/utils/decodeText.js | 114 ------- .../@fastify/busboy/lib/utils/getLimit.js | 16 - .../@fastify/busboy/lib/utils/parseParams.js | 196 ----------- node_modules/@fastify/busboy/package.json | 86 ----- node_modules/which/bin/node-which | 68 ++-- node_modules/which/which.js | 159 ++++----- package-lock.json | 37 --- package.json | 1 - 23 files changed, 176 insertions(+), 2161 deletions(-) create mode 100644 node_modules/.bin/node-which create mode 100644 node_modules/.bin/node-which.cmd create mode 100644 node_modules/.bin/node-which.ps1 delete mode 100644 node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js delete mode 100644 node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js delete mode 100644 node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js delete mode 100644 node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts delete mode 100644 node_modules/@fastify/busboy/deps/streamsearch/sbmh.js delete mode 100644 node_modules/@fastify/busboy/lib/main.d.ts delete mode 100644 node_modules/@fastify/busboy/lib/main.js delete mode 100644 node_modules/@fastify/busboy/lib/types/multipart.js delete mode 100644 node_modules/@fastify/busboy/lib/types/urlencoded.js delete mode 100644 node_modules/@fastify/busboy/lib/utils/Decoder.js delete mode 100644 node_modules/@fastify/busboy/lib/utils/basename.js delete mode 100644 node_modules/@fastify/busboy/lib/utils/decodeText.js delete mode 100644 node_modules/@fastify/busboy/lib/utils/getLimit.js delete mode 100644 node_modules/@fastify/busboy/lib/utils/parseParams.js delete mode 100644 node_modules/@fastify/busboy/package.json diff --git a/node_modules/.bin/node-which b/node_modules/.bin/node-which new file mode 100644 index 0000000..b49b03f --- /dev/null +++ b/node_modules/.bin/node-which @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../which/bin/node-which" "$@" +else + exec node "$basedir/../which/bin/node-which" "$@" +fi diff --git a/node_modules/.bin/node-which.cmd b/node_modules/.bin/node-which.cmd new file mode 100644 index 0000000..8738aed --- /dev/null +++ b/node_modules/.bin/node-which.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\which\bin\node-which" %* diff --git a/node_modules/.bin/node-which.ps1 b/node_modules/.bin/node-which.ps1 new file mode 100644 index 0000000..cfb09e8 --- /dev/null +++ b/node_modules/.bin/node-which.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../which/bin/node-which" $args + } else { + & "$basedir/node$exe" "$basedir/../which/bin/node-which" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../which/bin/node-which" $args + } else { + & "node$exe" "$basedir/../which/bin/node-which" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index db58e60..3ff6e12 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -62,15 +62,6 @@ "node": ">=6.9.0" } }, - "node_modules/@fastify/busboy": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", - "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", - "license": "MIT", - "engines": { - "node": ">=14" - } - }, "node_modules/@isaacs/cliui": { "version": "8.0.2", "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", @@ -517,33 +508,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/call-of-duty-api": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/call-of-duty-api/-/call-of-duty-api-4.1.0.tgz", - "integrity": "sha512-f5DJ6gQru6f406QVBZkkXOv0gUzFu0hykdkyKRa2Am6iWwGRVzcBK7rq+xHpNI6oTq3EFfw0T70kb38rZaezNA==", - "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", - "license": "MIT", - "dependencies": { - "tslib": "^2.4.0", - "undici": "^5.12.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/Lierrmm" - } - }, - "node_modules/call-of-duty-api/node_modules/undici": { - "version": "5.29.0", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", - "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", - "license": "MIT", - "dependencies": { - "@fastify/busboy": "^2.0.0" - }, - "engines": { - "node": ">=14.0" - } - }, "node_modules/camel-case": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-3.0.0.tgz", diff --git a/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js b/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js deleted file mode 100644 index 3c8c081..0000000 --- a/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js +++ /dev/null @@ -1,213 +0,0 @@ -'use strict' - -const WritableStream = require('node:stream').Writable -const inherits = require('node:util').inherits - -const StreamSearch = require('../../streamsearch/sbmh') - -const PartStream = require('./PartStream') -const HeaderParser = require('./HeaderParser') - -const DASH = 45 -const B_ONEDASH = Buffer.from('-') -const B_CRLF = Buffer.from('\r\n') -const EMPTY_FN = function () {} - -function Dicer (cfg) { - if (!(this instanceof Dicer)) { return new Dicer(cfg) } - WritableStream.call(this, cfg) - - if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') } - - if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined } - - this._headerFirst = cfg.headerFirst - - this._dashes = 0 - this._parts = 0 - this._finished = false - this._realFinish = false - this._isPreamble = true - this._justMatched = false - this._firstWrite = true - this._inHeader = true - this._part = undefined - this._cb = undefined - this._ignoreData = false - this._partOpts = { highWaterMark: cfg.partHwm } - this._pause = false - - const self = this - this._hparser = new HeaderParser(cfg) - this._hparser.on('header', function (header) { - self._inHeader = false - self._part.emit('header', header) - }) -} -inherits(Dicer, WritableStream) - -Dicer.prototype.emit = function (ev) { - if (ev === 'finish' && !this._realFinish) { - if (!this._finished) { - const self = this - process.nextTick(function () { - self.emit('error', new Error('Unexpected end of multipart data')) - if (self._part && !self._ignoreData) { - const type = (self._isPreamble ? 'Preamble' : 'Part') - self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data')) - self._part.push(null) - process.nextTick(function () { - self._realFinish = true - self.emit('finish') - self._realFinish = false - }) - return - } - self._realFinish = true - self.emit('finish') - self._realFinish = false - }) - } - } else { WritableStream.prototype.emit.apply(this, arguments) } -} - -Dicer.prototype._write = function (data, encoding, cb) { - // ignore unexpected data (e.g. extra trailer data after finished) - if (!this._hparser && !this._bparser) { return cb() } - - if (this._headerFirst && this._isPreamble) { - if (!this._part) { - this._part = new PartStream(this._partOpts) - if (this.listenerCount('preamble') !== 0) { this.emit('preamble', this._part) } else { this._ignore() } - } - const r = this._hparser.push(data) - if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } - } - - // allows for "easier" testing - if (this._firstWrite) { - this._bparser.push(B_CRLF) - this._firstWrite = false - } - - this._bparser.push(data) - - if (this._pause) { this._cb = cb } else { cb() } -} - -Dicer.prototype.reset = function () { - this._part = undefined - this._bparser = undefined - this._hparser = undefined -} - -Dicer.prototype.setBoundary = function (boundary) { - const self = this - this._bparser = new StreamSearch('\r\n--' + boundary) - this._bparser.on('info', function (isMatch, data, start, end) { - self._oninfo(isMatch, data, start, end) - }) -} - -Dicer.prototype._ignore = function () { - if (this._part && !this._ignoreData) { - this._ignoreData = true - this._part.on('error', EMPTY_FN) - // we must perform some kind of read on the stream even though we are - // ignoring the data, otherwise node's Readable stream will not emit 'end' - // after pushing null to the stream - this._part.resume() - } -} - -Dicer.prototype._oninfo = function (isMatch, data, start, end) { - let buf; const self = this; let i = 0; let r; let shouldWriteMore = true - - if (!this._part && this._justMatched && data) { - while (this._dashes < 2 && (start + i) < end) { - if (data[start + i] === DASH) { - ++i - ++this._dashes - } else { - if (this._dashes) { buf = B_ONEDASH } - this._dashes = 0 - break - } - } - if (this._dashes === 2) { - if ((start + i) < end && this.listenerCount('trailer') !== 0) { this.emit('trailer', data.slice(start + i, end)) } - this.reset() - this._finished = true - // no more parts will be added - if (self._parts === 0) { - self._realFinish = true - self.emit('finish') - self._realFinish = false - } - } - if (this._dashes) { return } - } - if (this._justMatched) { this._justMatched = false } - if (!this._part) { - this._part = new PartStream(this._partOpts) - this._part._read = function (n) { - self._unpause() - } - if (this._isPreamble && this.listenerCount('preamble') !== 0) { - this.emit('preamble', this._part) - } else if (this._isPreamble !== true && this.listenerCount('part') !== 0) { - this.emit('part', this._part) - } else { - this._ignore() - } - if (!this._isPreamble) { this._inHeader = true } - } - if (data && start < end && !this._ignoreData) { - if (this._isPreamble || !this._inHeader) { - if (buf) { shouldWriteMore = this._part.push(buf) } - shouldWriteMore = this._part.push(data.slice(start, end)) - if (!shouldWriteMore) { this._pause = true } - } else if (!this._isPreamble && this._inHeader) { - if (buf) { this._hparser.push(buf) } - r = this._hparser.push(data.slice(start, end)) - if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) } - } - } - if (isMatch) { - this._hparser.reset() - if (this._isPreamble) { this._isPreamble = false } else { - if (start !== end) { - ++this._parts - this._part.on('end', function () { - if (--self._parts === 0) { - if (self._finished) { - self._realFinish = true - self.emit('finish') - self._realFinish = false - } else { - self._unpause() - } - } - }) - } - } - this._part.push(null) - this._part = undefined - this._ignoreData = false - this._justMatched = true - this._dashes = 0 - } -} - -Dicer.prototype._unpause = function () { - if (!this._pause) { return } - - this._pause = false - if (this._cb) { - const cb = this._cb - this._cb = undefined - cb() - } -} - -module.exports = Dicer diff --git a/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js b/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js deleted file mode 100644 index 65f667b..0000000 --- a/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js +++ /dev/null @@ -1,100 +0,0 @@ -'use strict' - -const EventEmitter = require('node:events').EventEmitter -const inherits = require('node:util').inherits -const getLimit = require('../../../lib/utils/getLimit') - -const StreamSearch = require('../../streamsearch/sbmh') - -const B_DCRLF = Buffer.from('\r\n\r\n') -const RE_CRLF = /\r\n/g -const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex - -function HeaderParser (cfg) { - EventEmitter.call(this) - - cfg = cfg || {} - const self = this - this.nread = 0 - this.maxed = false - this.npairs = 0 - this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000) - this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024) - this.buffer = '' - this.header = {} - this.finished = false - this.ss = new StreamSearch(B_DCRLF) - this.ss.on('info', function (isMatch, data, start, end) { - if (data && !self.maxed) { - if (self.nread + end - start >= self.maxHeaderSize) { - end = self.maxHeaderSize - self.nread + start - self.nread = self.maxHeaderSize - self.maxed = true - } else { self.nread += (end - start) } - - self.buffer += data.toString('binary', start, end) - } - if (isMatch) { self._finish() } - }) -} -inherits(HeaderParser, EventEmitter) - -HeaderParser.prototype.push = function (data) { - const r = this.ss.push(data) - if (this.finished) { return r } -} - -HeaderParser.prototype.reset = function () { - this.finished = false - this.buffer = '' - this.header = {} - this.ss.reset() -} - -HeaderParser.prototype._finish = function () { - if (this.buffer) { this._parseHeader() } - this.ss.matches = this.ss.maxMatches - const header = this.header - this.header = {} - this.buffer = '' - this.finished = true - this.nread = this.npairs = 0 - this.maxed = false - this.emit('header', header) -} - -HeaderParser.prototype._parseHeader = function () { - if (this.npairs === this.maxHeaderPairs) { return } - - const lines = this.buffer.split(RE_CRLF) - const len = lines.length - let m, h - - for (var i = 0; i < len; ++i) { // eslint-disable-line no-var - if (lines[i].length === 0) { continue } - if (lines[i][0] === '\t' || lines[i][0] === ' ') { - // folded header content - // RFC2822 says to just remove the CRLF and not the whitespace following - // it, so we follow the RFC and include the leading whitespace ... - if (h) { - this.header[h][this.header[h].length - 1] += lines[i] - continue - } - } - - const posColon = lines[i].indexOf(':') - if ( - posColon === -1 || - posColon === 0 - ) { - return - } - m = RE_HDR.exec(lines[i]) - h = m[1].toLowerCase() - this.header[h] = this.header[h] || [] - this.header[h].push((m[2] || '')) - if (++this.npairs === this.maxHeaderPairs) { break } - } -} - -module.exports = HeaderParser diff --git a/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js b/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js deleted file mode 100644 index c91da1c..0000000 --- a/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict' - -const inherits = require('node:util').inherits -const ReadableStream = require('node:stream').Readable - -function PartStream (opts) { - ReadableStream.call(this, opts) -} -inherits(PartStream, ReadableStream) - -PartStream.prototype._read = function (n) {} - -module.exports = PartStream diff --git a/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts b/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts deleted file mode 100644 index 3c5b896..0000000 --- a/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts +++ /dev/null @@ -1,164 +0,0 @@ -// Type definitions for dicer 0.2 -// Project: https://github.com/mscdex/dicer -// Definitions by: BendingBender -// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped -// TypeScript Version: 2.2 -/// - -import stream = require("stream"); - -// tslint:disable:unified-signatures - -/** - * A very fast streaming multipart parser for node.js. - * Dicer is a WritableStream - * - * Dicer (special) events: - * - on('finish', ()) - Emitted when all parts have been parsed and the Dicer instance has been ended. - * - on('part', (stream: PartStream)) - Emitted when a new part has been found. - * - on('preamble', (stream: PartStream)) - Emitted for preamble if you should happen to need it (can usually be ignored). - * - on('trailer', (data: Buffer)) - Emitted when trailing data was found after the terminating boundary (as with the preamble, this can usually be ignored too). - */ -export class Dicer extends stream.Writable { - /** - * Creates and returns a new Dicer instance with the following valid config settings: - * - * @param config The configuration to use - */ - constructor(config: Dicer.Config); - /** - * Sets the boundary to use for parsing and performs some initialization needed for parsing. - * You should only need to use this if you set headerFirst to true in the constructor and are parsing the boundary from the preamble header. - * - * @param boundary The boundary to use - */ - setBoundary(boundary: string): void; - addListener(event: "finish", listener: () => void): this; - addListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; - addListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; - addListener(event: "trailer", listener: (data: Buffer) => void): this; - addListener(event: "close", listener: () => void): this; - addListener(event: "drain", listener: () => void): this; - addListener(event: "error", listener: (err: Error) => void): this; - addListener(event: "pipe", listener: (src: stream.Readable) => void): this; - addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; - addListener(event: string, listener: (...args: any[]) => void): this; - on(event: "finish", listener: () => void): this; - on(event: "part", listener: (stream: Dicer.PartStream) => void): this; - on(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; - on(event: "trailer", listener: (data: Buffer) => void): this; - on(event: "close", listener: () => void): this; - on(event: "drain", listener: () => void): this; - on(event: "error", listener: (err: Error) => void): this; - on(event: "pipe", listener: (src: stream.Readable) => void): this; - on(event: "unpipe", listener: (src: stream.Readable) => void): this; - on(event: string, listener: (...args: any[]) => void): this; - once(event: "finish", listener: () => void): this; - once(event: "part", listener: (stream: Dicer.PartStream) => void): this; - once(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; - once(event: "trailer", listener: (data: Buffer) => void): this; - once(event: "close", listener: () => void): this; - once(event: "drain", listener: () => void): this; - once(event: "error", listener: (err: Error) => void): this; - once(event: "pipe", listener: (src: stream.Readable) => void): this; - once(event: "unpipe", listener: (src: stream.Readable) => void): this; - once(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "finish", listener: () => void): this; - prependListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; - prependListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; - prependListener(event: "trailer", listener: (data: Buffer) => void): this; - prependListener(event: "close", listener: () => void): this; - prependListener(event: "drain", listener: () => void): this; - prependListener(event: "error", listener: (err: Error) => void): this; - prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; - prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; - prependListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "finish", listener: () => void): this; - prependOnceListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; - prependOnceListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; - prependOnceListener(event: "trailer", listener: (data: Buffer) => void): this; - prependOnceListener(event: "close", listener: () => void): this; - prependOnceListener(event: "drain", listener: () => void): this; - prependOnceListener(event: "error", listener: (err: Error) => void): this; - prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; - prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - removeListener(event: "finish", listener: () => void): this; - removeListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; - removeListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; - removeListener(event: "trailer", listener: (data: Buffer) => void): this; - removeListener(event: "close", listener: () => void): this; - removeListener(event: "drain", listener: () => void): this; - removeListener(event: "error", listener: (err: Error) => void): this; - removeListener(event: "pipe", listener: (src: stream.Readable) => void): this; - removeListener(event: "unpipe", listener: (src: stream.Readable) => void): this; - removeListener(event: string, listener: (...args: any[]) => void): this; -} - -declare namespace Dicer { - interface Config { - /** - * This is the boundary used to detect the beginning of a new part. - */ - boundary?: string | undefined; - /** - * If true, preamble header parsing will be performed first. - */ - headerFirst?: boolean | undefined; - /** - * The maximum number of header key=>value pairs to parse Default: 2000 (same as node's http). - */ - maxHeaderPairs?: number | undefined; - } - - /** - * PartStream is a _ReadableStream_ - * - * PartStream (special) events: - * - on('header', (header: object)) - An object containing the header for this particular part. Each property value is an array of one or more string values. - */ - interface PartStream extends stream.Readable { - addListener(event: "header", listener: (header: object) => void): this; - addListener(event: "close", listener: () => void): this; - addListener(event: "data", listener: (chunk: Buffer | string) => void): this; - addListener(event: "end", listener: () => void): this; - addListener(event: "readable", listener: () => void): this; - addListener(event: "error", listener: (err: Error) => void): this; - addListener(event: string, listener: (...args: any[]) => void): this; - on(event: "header", listener: (header: object) => void): this; - on(event: "close", listener: () => void): this; - on(event: "data", listener: (chunk: Buffer | string) => void): this; - on(event: "end", listener: () => void): this; - on(event: "readable", listener: () => void): this; - on(event: "error", listener: (err: Error) => void): this; - on(event: string, listener: (...args: any[]) => void): this; - once(event: "header", listener: (header: object) => void): this; - once(event: "close", listener: () => void): this; - once(event: "data", listener: (chunk: Buffer | string) => void): this; - once(event: "end", listener: () => void): this; - once(event: "readable", listener: () => void): this; - once(event: "error", listener: (err: Error) => void): this; - once(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "header", listener: (header: object) => void): this; - prependListener(event: "close", listener: () => void): this; - prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; - prependListener(event: "end", listener: () => void): this; - prependListener(event: "readable", listener: () => void): this; - prependListener(event: "error", listener: (err: Error) => void): this; - prependListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "header", listener: (header: object) => void): this; - prependOnceListener(event: "close", listener: () => void): this; - prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; - prependOnceListener(event: "end", listener: () => void): this; - prependOnceListener(event: "readable", listener: () => void): this; - prependOnceListener(event: "error", listener: (err: Error) => void): this; - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - removeListener(event: "header", listener: (header: object) => void): this; - removeListener(event: "close", listener: () => void): this; - removeListener(event: "data", listener: (chunk: Buffer | string) => void): this; - removeListener(event: "end", listener: () => void): this; - removeListener(event: "readable", listener: () => void): this; - removeListener(event: "error", listener: (err: Error) => void): this; - removeListener(event: string, listener: (...args: any[]) => void): this; - } -} \ No newline at end of file diff --git a/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js b/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js deleted file mode 100644 index b90c0e8..0000000 --- a/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js +++ /dev/null @@ -1,228 +0,0 @@ -'use strict' - -/** - * Copyright Brian White. All rights reserved. - * - * @see https://github.com/mscdex/streamsearch - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - * - * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation - * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool - */ -const EventEmitter = require('node:events').EventEmitter -const inherits = require('node:util').inherits - -function SBMH (needle) { - if (typeof needle === 'string') { - needle = Buffer.from(needle) - } - - if (!Buffer.isBuffer(needle)) { - throw new TypeError('The needle has to be a String or a Buffer.') - } - - const needleLength = needle.length - - if (needleLength === 0) { - throw new Error('The needle cannot be an empty String/Buffer.') - } - - if (needleLength > 256) { - throw new Error('The needle cannot have a length bigger than 256.') - } - - this.maxMatches = Infinity - this.matches = 0 - - this._occ = new Array(256) - .fill(needleLength) // Initialize occurrence table. - this._lookbehind_size = 0 - this._needle = needle - this._bufpos = 0 - - this._lookbehind = Buffer.alloc(needleLength) - - // Populate occurrence table with analysis of the needle, - // ignoring last letter. - for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var - this._occ[needle[i]] = needleLength - 1 - i - } -} -inherits(SBMH, EventEmitter) - -SBMH.prototype.reset = function () { - this._lookbehind_size = 0 - this.matches = 0 - this._bufpos = 0 -} - -SBMH.prototype.push = function (chunk, pos) { - if (!Buffer.isBuffer(chunk)) { - chunk = Buffer.from(chunk, 'binary') - } - const chlen = chunk.length - this._bufpos = pos || 0 - let r - while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } - return r -} - -SBMH.prototype._sbmh_feed = function (data) { - const len = data.length - const needle = this._needle - const needleLength = needle.length - const lastNeedleChar = needle[needleLength - 1] - - // Positive: points to a position in `data` - // pos == 3 points to data[3] - // Negative: points to a position in the lookbehind buffer - // pos == -2 points to lookbehind[lookbehind_size - 2] - let pos = -this._lookbehind_size - let ch - - if (pos < 0) { - // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool - // search with character lookup code that considers both the - // lookbehind buffer and the current round's haystack data. - // - // Loop until - // there is a match. - // or until - // we've moved past the position that requires the - // lookbehind buffer. In this case we switch to the - // optimized loop. - // or until - // the character to look at lies outside the haystack. - while (pos < 0 && pos <= len - needleLength) { - ch = this._sbmh_lookup_char(data, pos + needleLength - 1) - - if ( - ch === lastNeedleChar && - this._sbmh_memcmp(data, pos, needleLength - 1) - ) { - this._lookbehind_size = 0 - ++this.matches - this.emit('info', true) - - return (this._bufpos = pos + needleLength) - } - pos += this._occ[ch] - } - - // No match. - - if (pos < 0) { - // There's too few data for Boyer-Moore-Horspool to run, - // so let's use a different algorithm to skip as much as - // we can. - // Forward pos until - // the trailing part of lookbehind + data - // looks like the beginning of the needle - // or until - // pos == 0 - while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } - } - - if (pos >= 0) { - // Discard lookbehind buffer. - this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) - this._lookbehind_size = 0 - } else { - // Cut off part of the lookbehind buffer that has - // been processed and append the entire haystack - // into it. - const bytesToCutOff = this._lookbehind_size + pos - if (bytesToCutOff > 0) { - // The cut off data is guaranteed not to contain the needle. - this.emit('info', false, this._lookbehind, 0, bytesToCutOff) - } - - this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, - this._lookbehind_size - bytesToCutOff) - this._lookbehind_size -= bytesToCutOff - - data.copy(this._lookbehind, this._lookbehind_size) - this._lookbehind_size += len - - this._bufpos = len - return len - } - } - - pos += (pos >= 0) * this._bufpos - - // Lookbehind buffer is now empty. We only need to check if the - // needle is in the haystack. - if (data.indexOf(needle, pos) !== -1) { - pos = data.indexOf(needle, pos) - ++this.matches - if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } - - return (this._bufpos = pos + needleLength) - } else { - pos = len - needleLength - } - - // There was no match. If there's trailing haystack data that we cannot - // match yet using the Boyer-Moore-Horspool algorithm (because the trailing - // data is less than the needle size) then match using a modified - // algorithm that starts matching from the beginning instead of the end. - // Whatever trailing data is left after running this algorithm is added to - // the lookbehind buffer. - while ( - pos < len && - ( - data[pos] !== needle[0] || - ( - (Buffer.compare( - data.subarray(pos, pos + len - pos), - needle.subarray(0, len - pos) - ) !== 0) - ) - ) - ) { - ++pos - } - if (pos < len) { - data.copy(this._lookbehind, 0, pos, pos + (len - pos)) - this._lookbehind_size = len - pos - } - - // Everything until pos is guaranteed not to contain needle data. - if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } - - this._bufpos = len - return len -} - -SBMH.prototype._sbmh_lookup_char = function (data, pos) { - return (pos < 0) - ? this._lookbehind[this._lookbehind_size + pos] - : data[pos] -} - -SBMH.prototype._sbmh_memcmp = function (data, pos, len) { - for (var i = 0; i < len; ++i) { // eslint-disable-line no-var - if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } - } - return true -} - -module.exports = SBMH diff --git a/node_modules/@fastify/busboy/lib/main.d.ts b/node_modules/@fastify/busboy/lib/main.d.ts deleted file mode 100644 index 91b6448..0000000 --- a/node_modules/@fastify/busboy/lib/main.d.ts +++ /dev/null @@ -1,196 +0,0 @@ -// Definitions by: Jacob Baskin -// BendingBender -// Igor Savin - -/// - -import * as http from 'http'; -import { Readable, Writable } from 'stream'; -export { Dicer } from "../deps/dicer/lib/dicer"; - -export const Busboy: BusboyConstructor; -export default Busboy; - -export interface BusboyConfig { - /** - * These are the HTTP headers of the incoming request, which are used by individual parsers. - */ - headers: BusboyHeaders; - /** - * `highWaterMark` to use for this Busboy instance. - * @default WritableStream default. - */ - highWaterMark?: number | undefined; - /** - * highWaterMark to use for file streams. - * @default ReadableStream default. - */ - fileHwm?: number | undefined; - /** - * Default character set to use when one isn't defined. - * @default 'utf8' - */ - defCharset?: string | undefined; - /** - * Detect if a Part is a file. - * - * By default a file is detected if contentType - * is application/octet-stream or fileName is not - * undefined. - * - * Modify this to handle e.g. Blobs. - */ - isPartAFile?: (fieldName: string | undefined, contentType: string | undefined, fileName: string | undefined) => boolean; - /** - * If paths in the multipart 'filename' field shall be preserved. - * @default false - */ - preservePath?: boolean | undefined; - /** - * Various limits on incoming data. - */ - limits?: - | { - /** - * Max field name size (in bytes) - * @default 100 bytes - */ - fieldNameSize?: number | undefined; - /** - * Max field value size (in bytes) - * @default 1MB - */ - fieldSize?: number | undefined; - /** - * Max number of non-file fields - * @default Infinity - */ - fields?: number | undefined; - /** - * For multipart forms, the max file size (in bytes) - * @default Infinity - */ - fileSize?: number | undefined; - /** - * For multipart forms, the max number of file fields - * @default Infinity - */ - files?: number | undefined; - /** - * For multipart forms, the max number of parts (fields + files) - * @default Infinity - */ - parts?: number | undefined; - /** - * For multipart forms, the max number of header key=>value pairs to parse - * @default 2000 - */ - headerPairs?: number | undefined; - - /** - * For multipart forms, the max size of a header part - * @default 81920 - */ - headerSize?: number | undefined; - } - | undefined; -} - -export type BusboyHeaders = { 'content-type': string } & http.IncomingHttpHeaders; - -export interface BusboyFileStream extends - Readable { - - truncated: boolean; - - /** - * The number of bytes that have been read so far. - */ - bytesRead: number; -} - -export interface Busboy extends Writable { - addListener(event: Event, listener: BusboyEvents[Event]): this; - - addListener(event: string | symbol, listener: (...args: any[]) => void): this; - - on(event: Event, listener: BusboyEvents[Event]): this; - - on(event: string | symbol, listener: (...args: any[]) => void): this; - - once(event: Event, listener: BusboyEvents[Event]): this; - - once(event: string | symbol, listener: (...args: any[]) => void): this; - - removeListener(event: Event, listener: BusboyEvents[Event]): this; - - removeListener(event: string | symbol, listener: (...args: any[]) => void): this; - - off(event: Event, listener: BusboyEvents[Event]): this; - - off(event: string | symbol, listener: (...args: any[]) => void): this; - - prependListener(event: Event, listener: BusboyEvents[Event]): this; - - prependListener(event: string | symbol, listener: (...args: any[]) => void): this; - - prependOnceListener(event: Event, listener: BusboyEvents[Event]): this; - - prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; -} - -export interface BusboyEvents { - /** - * Emitted for each new file form field found. - * - * * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the - * file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents), - * otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any** - * incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically - * and safely discarded (these discarded files do still count towards `files` and `parts` limits). - * * If a configured file size limit was reached, `stream` will both have a boolean property `truncated` - * (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens. - * - * @param listener.transferEncoding Contains the 'Content-Transfer-Encoding' value for the file stream. - * @param listener.mimeType Contains the 'Content-Type' value for the file stream. - */ - file: ( - fieldname: string, - stream: BusboyFileStream, - filename: string, - transferEncoding: string, - mimeType: string, - ) => void; - /** - * Emitted for each new non-file field found. - */ - field: ( - fieldname: string, - value: string, - fieldnameTruncated: boolean, - valueTruncated: boolean, - transferEncoding: string, - mimeType: string, - ) => void; - finish: () => void; - /** - * Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted. - */ - partsLimit: () => void; - /** - * Emitted when specified `files` limit has been reached. No more 'file' events will be emitted. - */ - filesLimit: () => void; - /** - * Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted. - */ - fieldsLimit: () => void; - error: (error: unknown) => void; -} - -export interface BusboyConstructor { - (options: BusboyConfig): Busboy; - - new(options: BusboyConfig): Busboy; -} - diff --git a/node_modules/@fastify/busboy/lib/main.js b/node_modules/@fastify/busboy/lib/main.js deleted file mode 100644 index 8794beb..0000000 --- a/node_modules/@fastify/busboy/lib/main.js +++ /dev/null @@ -1,85 +0,0 @@ -'use strict' - -const WritableStream = require('node:stream').Writable -const { inherits } = require('node:util') -const Dicer = require('../deps/dicer/lib/Dicer') - -const MultipartParser = require('./types/multipart') -const UrlencodedParser = require('./types/urlencoded') -const parseParams = require('./utils/parseParams') - -function Busboy (opts) { - if (!(this instanceof Busboy)) { return new Busboy(opts) } - - if (typeof opts !== 'object') { - throw new TypeError('Busboy expected an options-Object.') - } - if (typeof opts.headers !== 'object') { - throw new TypeError('Busboy expected an options-Object with headers-attribute.') - } - if (typeof opts.headers['content-type'] !== 'string') { - throw new TypeError('Missing Content-Type-header.') - } - - const { - headers, - ...streamOptions - } = opts - - this.opts = { - autoDestroy: false, - ...streamOptions - } - WritableStream.call(this, this.opts) - - this._done = false - this._parser = this.getParserByHeaders(headers) - this._finished = false -} -inherits(Busboy, WritableStream) - -Busboy.prototype.emit = function (ev) { - if (ev === 'finish') { - if (!this._done) { - this._parser?.end() - return - } else if (this._finished) { - return - } - this._finished = true - } - WritableStream.prototype.emit.apply(this, arguments) -} - -Busboy.prototype.getParserByHeaders = function (headers) { - const parsed = parseParams(headers['content-type']) - - const cfg = { - defCharset: this.opts.defCharset, - fileHwm: this.opts.fileHwm, - headers, - highWaterMark: this.opts.highWaterMark, - isPartAFile: this.opts.isPartAFile, - limits: this.opts.limits, - parsedConType: parsed, - preservePath: this.opts.preservePath - } - - if (MultipartParser.detect.test(parsed[0])) { - return new MultipartParser(this, cfg) - } - if (UrlencodedParser.detect.test(parsed[0])) { - return new UrlencodedParser(this, cfg) - } - throw new Error('Unsupported Content-Type.') -} - -Busboy.prototype._write = function (chunk, encoding, cb) { - this._parser.write(chunk, cb) -} - -module.exports = Busboy -module.exports.default = Busboy -module.exports.Busboy = Busboy - -module.exports.Dicer = Dicer diff --git a/node_modules/@fastify/busboy/lib/types/multipart.js b/node_modules/@fastify/busboy/lib/types/multipart.js deleted file mode 100644 index d691eca..0000000 --- a/node_modules/@fastify/busboy/lib/types/multipart.js +++ /dev/null @@ -1,306 +0,0 @@ -'use strict' - -// TODO: -// * support 1 nested multipart level -// (see second multipart example here: -// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data) -// * support limits.fieldNameSize -// -- this will require modifications to utils.parseParams - -const { Readable } = require('node:stream') -const { inherits } = require('node:util') - -const Dicer = require('../../deps/dicer/lib/Dicer') - -const parseParams = require('../utils/parseParams') -const decodeText = require('../utils/decodeText') -const basename = require('../utils/basename') -const getLimit = require('../utils/getLimit') - -const RE_BOUNDARY = /^boundary$/i -const RE_FIELD = /^form-data$/i -const RE_CHARSET = /^charset$/i -const RE_FILENAME = /^filename$/i -const RE_NAME = /^name$/i - -Multipart.detect = /^multipart\/form-data/i -function Multipart (boy, cfg) { - let i - let len - const self = this - let boundary - const limits = cfg.limits - const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)) - const parsedConType = cfg.parsedConType || [] - const defCharset = cfg.defCharset || 'utf8' - const preservePath = cfg.preservePath - const fileOpts = { highWaterMark: cfg.fileHwm } - - for (i = 0, len = parsedConType.length; i < len; ++i) { - if (Array.isArray(parsedConType[i]) && - RE_BOUNDARY.test(parsedConType[i][0])) { - boundary = parsedConType[i][1] - break - } - } - - function checkFinished () { - if (nends === 0 && finished && !boy._done) { - finished = false - self.end() - } - } - - if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') } - - const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) - const fileSizeLimit = getLimit(limits, 'fileSize', Infinity) - const filesLimit = getLimit(limits, 'files', Infinity) - const fieldsLimit = getLimit(limits, 'fields', Infinity) - const partsLimit = getLimit(limits, 'parts', Infinity) - const headerPairsLimit = getLimit(limits, 'headerPairs', 2000) - const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024) - - let nfiles = 0 - let nfields = 0 - let nends = 0 - let curFile - let curField - let finished = false - - this._needDrain = false - this._pause = false - this._cb = undefined - this._nparts = 0 - this._boy = boy - - const parserCfg = { - boundary, - maxHeaderPairs: headerPairsLimit, - maxHeaderSize: headerSizeLimit, - partHwm: fileOpts.highWaterMark, - highWaterMark: cfg.highWaterMark - } - - this.parser = new Dicer(parserCfg) - this.parser.on('drain', function () { - self._needDrain = false - if (self._cb && !self._pause) { - const cb = self._cb - self._cb = undefined - cb() - } - }).on('part', function onPart (part) { - if (++self._nparts > partsLimit) { - self.parser.removeListener('part', onPart) - self.parser.on('part', skipPart) - boy.hitPartsLimit = true - boy.emit('partsLimit') - return skipPart(part) - } - - // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let - // us emit 'end' early since we know the part has ended if we are already - // seeing the next part - if (curField) { - const field = curField - field.emit('end') - field.removeAllListeners('end') - } - - part.on('header', function (header) { - let contype - let fieldname - let parsed - let charset - let encoding - let filename - let nsize = 0 - - if (header['content-type']) { - parsed = parseParams(header['content-type'][0]) - if (parsed[0]) { - contype = parsed[0].toLowerCase() - for (i = 0, len = parsed.length; i < len; ++i) { - if (RE_CHARSET.test(parsed[i][0])) { - charset = parsed[i][1].toLowerCase() - break - } - } - } - } - - if (contype === undefined) { contype = 'text/plain' } - if (charset === undefined) { charset = defCharset } - - if (header['content-disposition']) { - parsed = parseParams(header['content-disposition'][0]) - if (!RE_FIELD.test(parsed[0])) { return skipPart(part) } - for (i = 0, len = parsed.length; i < len; ++i) { - if (RE_NAME.test(parsed[i][0])) { - fieldname = parsed[i][1] - } else if (RE_FILENAME.test(parsed[i][0])) { - filename = parsed[i][1] - if (!preservePath) { filename = basename(filename) } - } - } - } else { return skipPart(part) } - - if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' } - - let onData, - onEnd - - if (isPartAFile(fieldname, contype, filename)) { - // file/binary field - if (nfiles === filesLimit) { - if (!boy.hitFilesLimit) { - boy.hitFilesLimit = true - boy.emit('filesLimit') - } - return skipPart(part) - } - - ++nfiles - - if (boy.listenerCount('file') === 0) { - self.parser._ignore() - return - } - - ++nends - const file = new FileStream(fileOpts) - curFile = file - file.on('end', function () { - --nends - self._pause = false - checkFinished() - if (self._cb && !self._needDrain) { - const cb = self._cb - self._cb = undefined - cb() - } - }) - file._read = function (n) { - if (!self._pause) { return } - self._pause = false - if (self._cb && !self._needDrain) { - const cb = self._cb - self._cb = undefined - cb() - } - } - boy.emit('file', fieldname, file, filename, encoding, contype) - - onData = function (data) { - if ((nsize += data.length) > fileSizeLimit) { - const extralen = fileSizeLimit - nsize + data.length - if (extralen > 0) { file.push(data.slice(0, extralen)) } - file.truncated = true - file.bytesRead = fileSizeLimit - part.removeAllListeners('data') - file.emit('limit') - return - } else if (!file.push(data)) { self._pause = true } - - file.bytesRead = nsize - } - - onEnd = function () { - curFile = undefined - file.push(null) - } - } else { - // non-file field - if (nfields === fieldsLimit) { - if (!boy.hitFieldsLimit) { - boy.hitFieldsLimit = true - boy.emit('fieldsLimit') - } - return skipPart(part) - } - - ++nfields - ++nends - let buffer = '' - let truncated = false - curField = part - - onData = function (data) { - if ((nsize += data.length) > fieldSizeLimit) { - const extralen = (fieldSizeLimit - (nsize - data.length)) - buffer += data.toString('binary', 0, extralen) - truncated = true - part.removeAllListeners('data') - } else { buffer += data.toString('binary') } - } - - onEnd = function () { - curField = undefined - if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) } - boy.emit('field', fieldname, buffer, false, truncated, encoding, contype) - --nends - checkFinished() - } - } - - /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become - broken. Streams2/streams3 is a huge black box of confusion, but - somehow overriding the sync state seems to fix things again (and still - seems to work for previous node versions). - */ - part._readableState.sync = false - - part.on('data', onData) - part.on('end', onEnd) - }).on('error', function (err) { - if (curFile) { curFile.emit('error', err) } - }) - }).on('error', function (err) { - boy.emit('error', err) - }).on('finish', function () { - finished = true - checkFinished() - }) -} - -Multipart.prototype.write = function (chunk, cb) { - const r = this.parser.write(chunk) - if (r && !this._pause) { - cb() - } else { - this._needDrain = !r - this._cb = cb - } -} - -Multipart.prototype.end = function () { - const self = this - - if (self.parser.writable) { - self.parser.end() - } else if (!self._boy._done) { - process.nextTick(function () { - self._boy._done = true - self._boy.emit('finish') - }) - } -} - -function skipPart (part) { - part.resume() -} - -function FileStream (opts) { - Readable.call(this, opts) - - this.bytesRead = 0 - - this.truncated = false -} - -inherits(FileStream, Readable) - -FileStream.prototype._read = function (n) {} - -module.exports = Multipart diff --git a/node_modules/@fastify/busboy/lib/types/urlencoded.js b/node_modules/@fastify/busboy/lib/types/urlencoded.js deleted file mode 100644 index 6f5f784..0000000 --- a/node_modules/@fastify/busboy/lib/types/urlencoded.js +++ /dev/null @@ -1,190 +0,0 @@ -'use strict' - -const Decoder = require('../utils/Decoder') -const decodeText = require('../utils/decodeText') -const getLimit = require('../utils/getLimit') - -const RE_CHARSET = /^charset$/i - -UrlEncoded.detect = /^application\/x-www-form-urlencoded/i -function UrlEncoded (boy, cfg) { - const limits = cfg.limits - const parsedConType = cfg.parsedConType - this.boy = boy - - this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) - this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100) - this.fieldsLimit = getLimit(limits, 'fields', Infinity) - - let charset - for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var - if (Array.isArray(parsedConType[i]) && - RE_CHARSET.test(parsedConType[i][0])) { - charset = parsedConType[i][1].toLowerCase() - break - } - } - - if (charset === undefined) { charset = cfg.defCharset || 'utf8' } - - this.decoder = new Decoder() - this.charset = charset - this._fields = 0 - this._state = 'key' - this._checkingBytes = true - this._bytesKey = 0 - this._bytesVal = 0 - this._key = '' - this._val = '' - this._keyTrunc = false - this._valTrunc = false - this._hitLimit = false -} - -UrlEncoded.prototype.write = function (data, cb) { - if (this._fields === this.fieldsLimit) { - if (!this.boy.hitFieldsLimit) { - this.boy.hitFieldsLimit = true - this.boy.emit('fieldsLimit') - } - return cb() - } - - let idxeq; let idxamp; let i; let p = 0; const len = data.length - - while (p < len) { - if (this._state === 'key') { - idxeq = idxamp = undefined - for (i = p; i < len; ++i) { - if (!this._checkingBytes) { ++p } - if (data[i] === 0x3D/* = */) { - idxeq = i - break - } else if (data[i] === 0x26/* & */) { - idxamp = i - break - } - if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) { - this._hitLimit = true - break - } else if (this._checkingBytes) { ++this._bytesKey } - } - - if (idxeq !== undefined) { - // key with assignment - if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) } - this._state = 'val' - - this._hitLimit = false - this._checkingBytes = true - this._val = '' - this._bytesVal = 0 - this._valTrunc = false - this.decoder.reset() - - p = idxeq + 1 - } else if (idxamp !== undefined) { - // key with no assignment - ++this._fields - let key; const keyTrunc = this._keyTrunc - if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key } - - this._hitLimit = false - this._checkingBytes = true - this._key = '' - this._bytesKey = 0 - this._keyTrunc = false - this.decoder.reset() - - if (key.length) { - this.boy.emit('field', decodeText(key, 'binary', this.charset), - '', - keyTrunc, - false) - } - - p = idxamp + 1 - if (this._fields === this.fieldsLimit) { return cb() } - } else if (this._hitLimit) { - // we may not have hit the actual limit if there are encoded bytes... - if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) } - p = i - if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) { - // yep, we actually did hit the limit - this._checkingBytes = false - this._keyTrunc = true - } - } else { - if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) } - p = len - } - } else { - idxamp = undefined - for (i = p; i < len; ++i) { - if (!this._checkingBytes) { ++p } - if (data[i] === 0x26/* & */) { - idxamp = i - break - } - if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) { - this._hitLimit = true - break - } else if (this._checkingBytes) { ++this._bytesVal } - } - - if (idxamp !== undefined) { - ++this._fields - if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) } - this.boy.emit('field', decodeText(this._key, 'binary', this.charset), - decodeText(this._val, 'binary', this.charset), - this._keyTrunc, - this._valTrunc) - this._state = 'key' - - this._hitLimit = false - this._checkingBytes = true - this._key = '' - this._bytesKey = 0 - this._keyTrunc = false - this.decoder.reset() - - p = idxamp + 1 - if (this._fields === this.fieldsLimit) { return cb() } - } else if (this._hitLimit) { - // we may not have hit the actual limit if there are encoded bytes... - if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) } - p = i - if ((this._val === '' && this.fieldSizeLimit === 0) || - (this._bytesVal = this._val.length) === this.fieldSizeLimit) { - // yep, we actually did hit the limit - this._checkingBytes = false - this._valTrunc = true - } - } else { - if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) } - p = len - } - } - } - cb() -} - -UrlEncoded.prototype.end = function () { - if (this.boy._done) { return } - - if (this._state === 'key' && this._key.length > 0) { - this.boy.emit('field', decodeText(this._key, 'binary', this.charset), - '', - this._keyTrunc, - false) - } else if (this._state === 'val') { - this.boy.emit('field', decodeText(this._key, 'binary', this.charset), - decodeText(this._val, 'binary', this.charset), - this._keyTrunc, - this._valTrunc) - } - this.boy._done = true - this.boy.emit('finish') -} - -module.exports = UrlEncoded diff --git a/node_modules/@fastify/busboy/lib/utils/Decoder.js b/node_modules/@fastify/busboy/lib/utils/Decoder.js deleted file mode 100644 index 7917678..0000000 --- a/node_modules/@fastify/busboy/lib/utils/Decoder.js +++ /dev/null @@ -1,54 +0,0 @@ -'use strict' - -const RE_PLUS = /\+/g - -const HEX = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, - 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 -] - -function Decoder () { - this.buffer = undefined -} -Decoder.prototype.write = function (str) { - // Replace '+' with ' ' before decoding - str = str.replace(RE_PLUS, ' ') - let res = '' - let i = 0; let p = 0; const len = str.length - for (; i < len; ++i) { - if (this.buffer !== undefined) { - if (!HEX[str.charCodeAt(i)]) { - res += '%' + this.buffer - this.buffer = undefined - --i // retry character - } else { - this.buffer += str[i] - ++p - if (this.buffer.length === 2) { - res += String.fromCharCode(parseInt(this.buffer, 16)) - this.buffer = undefined - } - } - } else if (str[i] === '%') { - if (i > p) { - res += str.substring(p, i) - p = i - } - this.buffer = '' - ++p - } - } - if (p < len && this.buffer === undefined) { res += str.substring(p) } - return res -} -Decoder.prototype.reset = function () { - this.buffer = undefined -} - -module.exports = Decoder diff --git a/node_modules/@fastify/busboy/lib/utils/basename.js b/node_modules/@fastify/busboy/lib/utils/basename.js deleted file mode 100644 index db58819..0000000 --- a/node_modules/@fastify/busboy/lib/utils/basename.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' - -module.exports = function basename (path) { - if (typeof path !== 'string') { return '' } - for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var - switch (path.charCodeAt(i)) { - case 0x2F: // '/' - case 0x5C: // '\' - path = path.slice(i + 1) - return (path === '..' || path === '.' ? '' : path) - } - } - return (path === '..' || path === '.' ? '' : path) -} diff --git a/node_modules/@fastify/busboy/lib/utils/decodeText.js b/node_modules/@fastify/busboy/lib/utils/decodeText.js deleted file mode 100644 index eac7d35..0000000 --- a/node_modules/@fastify/busboy/lib/utils/decodeText.js +++ /dev/null @@ -1,114 +0,0 @@ -'use strict' - -// Node has always utf-8 -const utf8Decoder = new TextDecoder('utf-8') -const textDecoders = new Map([ - ['utf-8', utf8Decoder], - ['utf8', utf8Decoder] -]) - -function getDecoder (charset) { - let lc - while (true) { - switch (charset) { - case 'utf-8': - case 'utf8': - return decoders.utf8 - case 'latin1': - case 'ascii': // TODO: Make these a separate, strict decoder? - case 'us-ascii': - case 'iso-8859-1': - case 'iso8859-1': - case 'iso88591': - case 'iso_8859-1': - case 'windows-1252': - case 'iso_8859-1:1987': - case 'cp1252': - case 'x-cp1252': - return decoders.latin1 - case 'utf16le': - case 'utf-16le': - case 'ucs2': - case 'ucs-2': - return decoders.utf16le - case 'base64': - return decoders.base64 - default: - if (lc === undefined) { - lc = true - charset = charset.toLowerCase() - continue - } - return decoders.other.bind(charset) - } - } -} - -const decoders = { - utf8: (data, sourceEncoding) => { - if (data.length === 0) { - return '' - } - if (typeof data === 'string') { - data = Buffer.from(data, sourceEncoding) - } - return data.utf8Slice(0, data.length) - }, - - latin1: (data, sourceEncoding) => { - if (data.length === 0) { - return '' - } - if (typeof data === 'string') { - return data - } - return data.latin1Slice(0, data.length) - }, - - utf16le: (data, sourceEncoding) => { - if (data.length === 0) { - return '' - } - if (typeof data === 'string') { - data = Buffer.from(data, sourceEncoding) - } - return data.ucs2Slice(0, data.length) - }, - - base64: (data, sourceEncoding) => { - if (data.length === 0) { - return '' - } - if (typeof data === 'string') { - data = Buffer.from(data, sourceEncoding) - } - return data.base64Slice(0, data.length) - }, - - other: (data, sourceEncoding) => { - if (data.length === 0) { - return '' - } - if (typeof data === 'string') { - data = Buffer.from(data, sourceEncoding) - } - - if (textDecoders.has(this.toString())) { - try { - return textDecoders.get(this).decode(data) - } catch {} - } - return typeof data === 'string' - ? data - : data.toString() - } -} - -function decodeText (text, sourceEncoding, destEncoding) { - if (text) { - return getDecoder(destEncoding)(text, sourceEncoding) - } - return text -} - -module.exports = decodeText diff --git a/node_modules/@fastify/busboy/lib/utils/getLimit.js b/node_modules/@fastify/busboy/lib/utils/getLimit.js deleted file mode 100644 index cb64fd6..0000000 --- a/node_modules/@fastify/busboy/lib/utils/getLimit.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict' - -module.exports = function getLimit (limits, name, defaultLimit) { - if ( - !limits || - limits[name] === undefined || - limits[name] === null - ) { return defaultLimit } - - if ( - typeof limits[name] !== 'number' || - isNaN(limits[name]) - ) { throw new TypeError('Limit ' + name + ' is not a valid number') } - - return limits[name] -} diff --git a/node_modules/@fastify/busboy/lib/utils/parseParams.js b/node_modules/@fastify/busboy/lib/utils/parseParams.js deleted file mode 100644 index 1698e62..0000000 --- a/node_modules/@fastify/busboy/lib/utils/parseParams.js +++ /dev/null @@ -1,196 +0,0 @@ -/* eslint-disable object-property-newline */ -'use strict' - -const decodeText = require('./decodeText') - -const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g - -const EncodedLookup = { - '%00': '\x00', '%01': '\x01', '%02': '\x02', '%03': '\x03', '%04': '\x04', - '%05': '\x05', '%06': '\x06', '%07': '\x07', '%08': '\x08', '%09': '\x09', - '%0a': '\x0a', '%0A': '\x0a', '%0b': '\x0b', '%0B': '\x0b', '%0c': '\x0c', - '%0C': '\x0c', '%0d': '\x0d', '%0D': '\x0d', '%0e': '\x0e', '%0E': '\x0e', - '%0f': '\x0f', '%0F': '\x0f', '%10': '\x10', '%11': '\x11', '%12': '\x12', - '%13': '\x13', '%14': '\x14', '%15': '\x15', '%16': '\x16', '%17': '\x17', - '%18': '\x18', '%19': '\x19', '%1a': '\x1a', '%1A': '\x1a', '%1b': '\x1b', - '%1B': '\x1b', '%1c': '\x1c', '%1C': '\x1c', '%1d': '\x1d', '%1D': '\x1d', - '%1e': '\x1e', '%1E': '\x1e', '%1f': '\x1f', '%1F': '\x1f', '%20': '\x20', - '%21': '\x21', '%22': '\x22', '%23': '\x23', '%24': '\x24', '%25': '\x25', - '%26': '\x26', '%27': '\x27', '%28': '\x28', '%29': '\x29', '%2a': '\x2a', - '%2A': '\x2a', '%2b': '\x2b', '%2B': '\x2b', '%2c': '\x2c', '%2C': '\x2c', - '%2d': '\x2d', '%2D': '\x2d', '%2e': '\x2e', '%2E': '\x2e', '%2f': '\x2f', - '%2F': '\x2f', '%30': '\x30', '%31': '\x31', '%32': '\x32', '%33': '\x33', - '%34': '\x34', '%35': '\x35', '%36': '\x36', '%37': '\x37', '%38': '\x38', - '%39': '\x39', '%3a': '\x3a', '%3A': '\x3a', '%3b': '\x3b', '%3B': '\x3b', - '%3c': '\x3c', '%3C': '\x3c', '%3d': '\x3d', '%3D': '\x3d', '%3e': '\x3e', - '%3E': '\x3e', '%3f': '\x3f', '%3F': '\x3f', '%40': '\x40', '%41': '\x41', - '%42': '\x42', '%43': '\x43', '%44': '\x44', '%45': '\x45', '%46': '\x46', - '%47': '\x47', '%48': '\x48', '%49': '\x49', '%4a': '\x4a', '%4A': '\x4a', - '%4b': '\x4b', '%4B': '\x4b', '%4c': '\x4c', '%4C': '\x4c', '%4d': '\x4d', - '%4D': '\x4d', '%4e': '\x4e', '%4E': '\x4e', '%4f': '\x4f', '%4F': '\x4f', - '%50': '\x50', '%51': '\x51', '%52': '\x52', '%53': '\x53', '%54': '\x54', - '%55': '\x55', '%56': '\x56', '%57': '\x57', '%58': '\x58', '%59': '\x59', - '%5a': '\x5a', '%5A': '\x5a', '%5b': '\x5b', '%5B': '\x5b', '%5c': '\x5c', - '%5C': '\x5c', '%5d': '\x5d', '%5D': '\x5d', '%5e': '\x5e', '%5E': '\x5e', - '%5f': '\x5f', '%5F': '\x5f', '%60': '\x60', '%61': '\x61', '%62': '\x62', - '%63': '\x63', '%64': '\x64', '%65': '\x65', '%66': '\x66', '%67': '\x67', - '%68': '\x68', '%69': '\x69', '%6a': '\x6a', '%6A': '\x6a', '%6b': '\x6b', - '%6B': '\x6b', '%6c': '\x6c', '%6C': '\x6c', '%6d': '\x6d', '%6D': '\x6d', - '%6e': '\x6e', '%6E': '\x6e', '%6f': '\x6f', '%6F': '\x6f', '%70': '\x70', - '%71': '\x71', '%72': '\x72', '%73': '\x73', '%74': '\x74', '%75': '\x75', - '%76': '\x76', '%77': '\x77', '%78': '\x78', '%79': '\x79', '%7a': '\x7a', - '%7A': '\x7a', '%7b': '\x7b', '%7B': '\x7b', '%7c': '\x7c', '%7C': '\x7c', - '%7d': '\x7d', '%7D': '\x7d', '%7e': '\x7e', '%7E': '\x7e', '%7f': '\x7f', - '%7F': '\x7f', '%80': '\x80', '%81': '\x81', '%82': '\x82', '%83': '\x83', - '%84': '\x84', '%85': '\x85', '%86': '\x86', '%87': '\x87', '%88': '\x88', - '%89': '\x89', '%8a': '\x8a', '%8A': '\x8a', '%8b': '\x8b', '%8B': '\x8b', - '%8c': '\x8c', '%8C': '\x8c', '%8d': '\x8d', '%8D': '\x8d', '%8e': '\x8e', - '%8E': '\x8e', '%8f': '\x8f', '%8F': '\x8f', '%90': '\x90', '%91': '\x91', - '%92': '\x92', '%93': '\x93', '%94': '\x94', '%95': '\x95', '%96': '\x96', - '%97': '\x97', '%98': '\x98', '%99': '\x99', '%9a': '\x9a', '%9A': '\x9a', - '%9b': '\x9b', '%9B': '\x9b', '%9c': '\x9c', '%9C': '\x9c', '%9d': '\x9d', - '%9D': '\x9d', '%9e': '\x9e', '%9E': '\x9e', '%9f': '\x9f', '%9F': '\x9f', - '%a0': '\xa0', '%A0': '\xa0', '%a1': '\xa1', '%A1': '\xa1', '%a2': '\xa2', - '%A2': '\xa2', '%a3': '\xa3', '%A3': '\xa3', '%a4': '\xa4', '%A4': '\xa4', - '%a5': '\xa5', '%A5': '\xa5', '%a6': '\xa6', '%A6': '\xa6', '%a7': '\xa7', - '%A7': '\xa7', '%a8': '\xa8', '%A8': '\xa8', '%a9': '\xa9', '%A9': '\xa9', - '%aa': '\xaa', '%Aa': '\xaa', '%aA': '\xaa', '%AA': '\xaa', '%ab': '\xab', - '%Ab': '\xab', '%aB': '\xab', '%AB': '\xab', '%ac': '\xac', '%Ac': '\xac', - '%aC': '\xac', '%AC': '\xac', '%ad': '\xad', '%Ad': '\xad', '%aD': '\xad', - '%AD': '\xad', '%ae': '\xae', '%Ae': '\xae', '%aE': '\xae', '%AE': '\xae', - '%af': '\xaf', '%Af': '\xaf', '%aF': '\xaf', '%AF': '\xaf', '%b0': '\xb0', - '%B0': '\xb0', '%b1': '\xb1', '%B1': '\xb1', '%b2': '\xb2', '%B2': '\xb2', - '%b3': '\xb3', '%B3': '\xb3', '%b4': '\xb4', '%B4': '\xb4', '%b5': '\xb5', - '%B5': '\xb5', '%b6': '\xb6', '%B6': '\xb6', '%b7': '\xb7', '%B7': '\xb7', - '%b8': '\xb8', '%B8': '\xb8', '%b9': '\xb9', '%B9': '\xb9', '%ba': '\xba', - '%Ba': '\xba', '%bA': '\xba', '%BA': '\xba', '%bb': '\xbb', '%Bb': '\xbb', - '%bB': '\xbb', '%BB': '\xbb', '%bc': '\xbc', '%Bc': '\xbc', '%bC': '\xbc', - '%BC': '\xbc', '%bd': '\xbd', '%Bd': '\xbd', '%bD': '\xbd', '%BD': '\xbd', - '%be': '\xbe', '%Be': '\xbe', '%bE': '\xbe', '%BE': '\xbe', '%bf': '\xbf', - '%Bf': '\xbf', '%bF': '\xbf', '%BF': '\xbf', '%c0': '\xc0', '%C0': '\xc0', - '%c1': '\xc1', '%C1': '\xc1', '%c2': '\xc2', '%C2': '\xc2', '%c3': '\xc3', - '%C3': '\xc3', '%c4': '\xc4', '%C4': '\xc4', '%c5': '\xc5', '%C5': '\xc5', - '%c6': '\xc6', '%C6': '\xc6', '%c7': '\xc7', '%C7': '\xc7', '%c8': '\xc8', - '%C8': '\xc8', '%c9': '\xc9', '%C9': '\xc9', '%ca': '\xca', '%Ca': '\xca', - '%cA': '\xca', '%CA': '\xca', '%cb': '\xcb', '%Cb': '\xcb', '%cB': '\xcb', - '%CB': '\xcb', '%cc': '\xcc', '%Cc': '\xcc', '%cC': '\xcc', '%CC': '\xcc', - '%cd': '\xcd', '%Cd': '\xcd', '%cD': '\xcd', '%CD': '\xcd', '%ce': '\xce', - '%Ce': '\xce', '%cE': '\xce', '%CE': '\xce', '%cf': '\xcf', '%Cf': '\xcf', - '%cF': '\xcf', '%CF': '\xcf', '%d0': '\xd0', '%D0': '\xd0', '%d1': '\xd1', - '%D1': '\xd1', '%d2': '\xd2', '%D2': '\xd2', '%d3': '\xd3', '%D3': '\xd3', - '%d4': '\xd4', '%D4': '\xd4', '%d5': '\xd5', '%D5': '\xd5', '%d6': '\xd6', - '%D6': '\xd6', '%d7': '\xd7', '%D7': '\xd7', '%d8': '\xd8', '%D8': '\xd8', - '%d9': '\xd9', '%D9': '\xd9', '%da': '\xda', '%Da': '\xda', '%dA': '\xda', - '%DA': '\xda', '%db': '\xdb', '%Db': '\xdb', '%dB': '\xdb', '%DB': '\xdb', - '%dc': '\xdc', '%Dc': '\xdc', '%dC': '\xdc', '%DC': '\xdc', '%dd': '\xdd', - '%Dd': '\xdd', '%dD': '\xdd', '%DD': '\xdd', '%de': '\xde', '%De': '\xde', - '%dE': '\xde', '%DE': '\xde', '%df': '\xdf', '%Df': '\xdf', '%dF': '\xdf', - '%DF': '\xdf', '%e0': '\xe0', '%E0': '\xe0', '%e1': '\xe1', '%E1': '\xe1', - '%e2': '\xe2', '%E2': '\xe2', '%e3': '\xe3', '%E3': '\xe3', '%e4': '\xe4', - '%E4': '\xe4', '%e5': '\xe5', '%E5': '\xe5', '%e6': '\xe6', '%E6': '\xe6', - '%e7': '\xe7', '%E7': '\xe7', '%e8': '\xe8', '%E8': '\xe8', '%e9': '\xe9', - '%E9': '\xe9', '%ea': '\xea', '%Ea': '\xea', '%eA': '\xea', '%EA': '\xea', - '%eb': '\xeb', '%Eb': '\xeb', '%eB': '\xeb', '%EB': '\xeb', '%ec': '\xec', - '%Ec': '\xec', '%eC': '\xec', '%EC': '\xec', '%ed': '\xed', '%Ed': '\xed', - '%eD': '\xed', '%ED': '\xed', '%ee': '\xee', '%Ee': '\xee', '%eE': '\xee', - '%EE': '\xee', '%ef': '\xef', '%Ef': '\xef', '%eF': '\xef', '%EF': '\xef', - '%f0': '\xf0', '%F0': '\xf0', '%f1': '\xf1', '%F1': '\xf1', '%f2': '\xf2', - '%F2': '\xf2', '%f3': '\xf3', '%F3': '\xf3', '%f4': '\xf4', '%F4': '\xf4', - '%f5': '\xf5', '%F5': '\xf5', '%f6': '\xf6', '%F6': '\xf6', '%f7': '\xf7', - '%F7': '\xf7', '%f8': '\xf8', '%F8': '\xf8', '%f9': '\xf9', '%F9': '\xf9', - '%fa': '\xfa', '%Fa': '\xfa', '%fA': '\xfa', '%FA': '\xfa', '%fb': '\xfb', - '%Fb': '\xfb', '%fB': '\xfb', '%FB': '\xfb', '%fc': '\xfc', '%Fc': '\xfc', - '%fC': '\xfc', '%FC': '\xfc', '%fd': '\xfd', '%Fd': '\xfd', '%fD': '\xfd', - '%FD': '\xfd', '%fe': '\xfe', '%Fe': '\xfe', '%fE': '\xfe', '%FE': '\xfe', - '%ff': '\xff', '%Ff': '\xff', '%fF': '\xff', '%FF': '\xff' -} - -function encodedReplacer (match) { - return EncodedLookup[match] -} - -const STATE_KEY = 0 -const STATE_VALUE = 1 -const STATE_CHARSET = 2 -const STATE_LANG = 3 - -function parseParams (str) { - const res = [] - let state = STATE_KEY - let charset = '' - let inquote = false - let escaping = false - let p = 0 - let tmp = '' - const len = str.length - - for (var i = 0; i < len; ++i) { // eslint-disable-line no-var - const char = str[i] - if (char === '\\' && inquote) { - if (escaping) { escaping = false } else { - escaping = true - continue - } - } else if (char === '"') { - if (!escaping) { - if (inquote) { - inquote = false - state = STATE_KEY - } else { inquote = true } - continue - } else { escaping = false } - } else { - if (escaping && inquote) { tmp += '\\' } - escaping = false - if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") { - if (state === STATE_CHARSET) { - state = STATE_LANG - charset = tmp.substring(1) - } else { state = STATE_VALUE } - tmp = '' - continue - } else if (state === STATE_KEY && - (char === '*' || char === '=') && - res.length) { - state = char === '*' - ? STATE_CHARSET - : STATE_VALUE - res[p] = [tmp, undefined] - tmp = '' - continue - } else if (!inquote && char === ';') { - state = STATE_KEY - if (charset) { - if (tmp.length) { - tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), - 'binary', - charset) - } - charset = '' - } else if (tmp.length) { - tmp = decodeText(tmp, 'binary', 'utf8') - } - if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp } - tmp = '' - ++p - continue - } else if (!inquote && (char === ' ' || char === '\t')) { continue } - } - tmp += char - } - if (charset && tmp.length) { - tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), - 'binary', - charset) - } else if (tmp) { - tmp = decodeText(tmp, 'binary', 'utf8') - } - - if (res[p] === undefined) { - if (tmp) { res[p] = tmp } - } else { res[p][1] = tmp } - - return res -} - -module.exports = parseParams diff --git a/node_modules/@fastify/busboy/package.json b/node_modules/@fastify/busboy/package.json deleted file mode 100644 index 83693ac..0000000 --- a/node_modules/@fastify/busboy/package.json +++ /dev/null @@ -1,86 +0,0 @@ -{ - "name": "@fastify/busboy", - "version": "2.1.1", - "private": false, - "author": "Brian White ", - "contributors": [ - { - "name": "Igor Savin", - "email": "kibertoad@gmail.com", - "url": "https://github.com/kibertoad" - }, - { - "name": "Aras Abbasi", - "email": "aras.abbasi@gmail.com", - "url": "https://github.com/uzlopak" - } - ], - "description": "A streaming parser for HTML form data for node.js", - "main": "lib/main", - "type": "commonjs", - "types": "lib/main.d.ts", - "scripts": { - "bench:busboy": "cd benchmarks && npm install && npm run benchmark-fastify", - "bench:dicer": "node bench/dicer/dicer-bench-multipart-parser.js", - "coveralls": "nyc report --reporter=lcov", - "lint": "npm run lint:standard", - "lint:everything": "npm run lint && npm run test:types", - "lint:fix": "standard --fix", - "lint:standard": "standard --verbose | snazzy", - "test:mocha": "tap", - "test:types": "tsd", - "test:coverage": "nyc npm run test", - "test": "npm run test:mocha" - }, - "engines": { - "node": ">=14" - }, - "devDependencies": { - "@types/node": "^20.1.0", - "busboy": "^1.0.0", - "photofinish": "^1.8.0", - "snazzy": "^9.0.0", - "standard": "^17.0.0", - "tap": "^16.3.8", - "tinybench": "^2.5.1", - "tsd": "^0.30.0", - "typescript": "^5.0.2" - }, - "keywords": [ - "uploads", - "forms", - "multipart", - "form-data" - ], - "license": "MIT", - "repository": { - "type": "git", - "url": "git+https://github.com/fastify/busboy.git" - }, - "tsd": { - "directory": "test/types", - "compilerOptions": { - "esModuleInterop": false, - "module": "commonjs", - "target": "ES2017" - } - }, - "standard": { - "globals": [ - "describe", - "it" - ], - "ignore": [ - "bench" - ] - }, - "files": [ - "README.md", - "LICENSE", - "lib/*", - "deps/encoding/*", - "deps/dicer/lib", - "deps/streamsearch/", - "deps/dicer/LICENSE" - ] -} diff --git a/node_modules/which/bin/node-which b/node_modules/which/bin/node-which index 6ce0612..7cee372 100644 --- a/node_modules/which/bin/node-which +++ b/node_modules/which/bin/node-which @@ -1,50 +1,52 @@ #!/usr/bin/env node -var which = require('../'); -if (process.argv.length < 3) usage(); +var which = require("../") +if (process.argv.length < 3) + usage() -function usage() { - console.error('usage: which [-as] program ...'); - process.exit(1); +function usage () { + console.error('usage: which [-as] program ...') + process.exit(1) } -var all = false; -var silent = false; -var dashdash = false; +var all = false +var silent = false +var dashdash = false var args = process.argv.slice(2).filter(function (arg) { - if (dashdash || !/^-/.test(arg)) return true; + if (dashdash || !/^-/.test(arg)) + return true if (arg === '--') { - dashdash = true; - return false; + dashdash = true + return false } - var flags = arg.substr(1).split(''); + var flags = arg.substr(1).split('') for (var f = 0; f < flags.length; f++) { - var flag = flags[f]; + var flag = flags[f] switch (flag) { case 's': - silent = true; - break; + silent = true + break case 'a': - all = true; - break; + all = true + break default: - console.error('which: illegal option -- ' + flag); - usage(); + console.error('which: illegal option -- ' + flag) + usage() } } - return false; -}); + return false +}) -process.exit( - args.reduce(function (pv, current) { - try { - var f = which.sync(current, { all: all }); - if (all) f = f.join('\n'); - if (!silent) console.log(f); - return pv; - } catch (e) { - return 1; - } - }, 0) -); +process.exit(args.reduce(function (pv, current) { + try { + var f = which.sync(current, { all: all }) + if (all) + f = f.join('\n') + if (!silent) + console.log(f) + return pv; + } catch (e) { + return 1; + } +}, 0)) diff --git a/node_modules/which/which.js b/node_modules/which/which.js index b9dfc5d..82afffd 100644 --- a/node_modules/which/which.js +++ b/node_modules/which/which.js @@ -1,124 +1,125 @@ -const isWindows = - process.platform === 'win32' || - process.env.OSTYPE === 'cygwin' || - process.env.OSTYPE === 'msys'; +const isWindows = process.platform === 'win32' || + process.env.OSTYPE === 'cygwin' || + process.env.OSTYPE === 'msys' -const path = require('path'); -const COLON = isWindows ? ';' : ':'; -const isexe = require('isexe'); +const path = require('path') +const COLON = isWindows ? ';' : ':' +const isexe = require('isexe') const getNotFoundError = (cmd) => - Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' }); + Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' }) const getPathInfo = (cmd, opt) => { - const colon = opt.colon || COLON; + const colon = opt.colon || COLON // If it has a slash, then we don't bother searching the pathenv. // just check the file itself, and that's it. - const pathEnv = - cmd.match(/\//) || (isWindows && cmd.match(/\\/)) ? - [''] - : [ + const pathEnv = cmd.match(/\//) || isWindows && cmd.match(/\\/) ? [''] + : ( + [ // windows always checks the cwd first ...(isWindows ? [process.cwd()] : []), - ...( - opt.path || - process.env.PATH || - /* istanbul ignore next: very unusual */ '' - ).split(colon), - ]; - const pathExtExe = - isWindows ? - opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM' - : ''; - const pathExt = isWindows ? pathExtExe.split(colon) : ['']; + ...(opt.path || process.env.PATH || + /* istanbul ignore next: very unusual */ '').split(colon), + ] + ) + const pathExtExe = isWindows + ? opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM' + : '' + const pathExt = isWindows ? pathExtExe.split(colon) : [''] if (isWindows) { - if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') pathExt.unshift(''); + if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') + pathExt.unshift('') } return { pathEnv, pathExt, pathExtExe, - }; -}; + } +} const which = (cmd, opt, cb) => { if (typeof opt === 'function') { - cb = opt; - opt = {}; + cb = opt + opt = {} } - if (!opt) opt = {}; + if (!opt) + opt = {} - const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt); - const found = []; + const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) + const found = [] - const step = (i) => - new Promise((resolve, reject) => { - if (i === pathEnv.length) - return opt.all && found.length ? - resolve(found) - : reject(getNotFoundError(cmd)); + const step = i => new Promise((resolve, reject) => { + if (i === pathEnv.length) + return opt.all && found.length ? resolve(found) + : reject(getNotFoundError(cmd)) - const ppRaw = pathEnv[i]; - const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw; + const ppRaw = pathEnv[i] + const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw - const pCmd = path.join(pathPart, cmd); - const p = - !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd : pCmd; + const pCmd = path.join(pathPart, cmd) + const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd + : pCmd - resolve(subStep(p, i, 0)); - }); + resolve(subStep(p, i, 0)) + }) - const subStep = (p, i, ii) => - new Promise((resolve, reject) => { - if (ii === pathExt.length) return resolve(step(i + 1)); - const ext = pathExt[ii]; - isexe(p + ext, { pathExt: pathExtExe }, (er, is) => { - if (!er && is) { - if (opt.all) found.push(p + ext); - else return resolve(p + ext); - } - return resolve(subStep(p, i, ii + 1)); - }); - }); + const subStep = (p, i, ii) => new Promise((resolve, reject) => { + if (ii === pathExt.length) + return resolve(step(i + 1)) + const ext = pathExt[ii] + isexe(p + ext, { pathExt: pathExtExe }, (er, is) => { + if (!er && is) { + if (opt.all) + found.push(p + ext) + else + return resolve(p + ext) + } + return resolve(subStep(p, i, ii + 1)) + }) + }) - return cb ? step(0).then((res) => cb(null, res), cb) : step(0); -}; + return cb ? step(0).then(res => cb(null, res), cb) : step(0) +} const whichSync = (cmd, opt) => { - opt = opt || {}; + opt = opt || {} - const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt); - const found = []; + const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) + const found = [] - for (let i = 0; i < pathEnv.length; i++) { - const ppRaw = pathEnv[i]; - const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw; + for (let i = 0; i < pathEnv.length; i ++) { + const ppRaw = pathEnv[i] + const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw - const pCmd = path.join(pathPart, cmd); - const p = - !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd : pCmd; + const pCmd = path.join(pathPart, cmd) + const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd + : pCmd - for (let j = 0; j < pathExt.length; j++) { - const cur = p + pathExt[j]; + for (let j = 0; j < pathExt.length; j ++) { + const cur = p + pathExt[j] try { - const is = isexe.sync(cur, { pathExt: pathExtExe }); + const is = isexe.sync(cur, { pathExt: pathExtExe }) if (is) { - if (opt.all) found.push(cur); - else return cur; + if (opt.all) + found.push(cur) + else + return cur } } catch (ex) {} } } - if (opt.all && found.length) return found; + if (opt.all && found.length) + return found - if (opt.nothrow) return null; + if (opt.nothrow) + return null - throw getNotFoundError(cmd); -}; + throw getNotFoundError(cmd) +} -module.exports = which; -which.sync = whichSync; +module.exports = which +which.sync = whichSync diff --git a/package-lock.json b/package-lock.json index e3f6cda..8e6f869 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,6 @@ "version": "1.0.0", "dependencies": { "body-parser": "^2.2.0", - "call-of-duty-api": "^4.1.0", "csso": "^5.0.5", "express": "^4.21.2", "express-rate-limit": "^7.5.0", @@ -85,15 +84,6 @@ "node": ">=6.9.0" } }, - "node_modules/@fastify/busboy": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", - "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", - "license": "MIT", - "engines": { - "node": ">=14" - } - }, "node_modules/@isaacs/cliui": { "version": "8.0.2", "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", @@ -540,33 +530,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/call-of-duty-api": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/call-of-duty-api/-/call-of-duty-api-4.1.0.tgz", - "integrity": "sha512-f5DJ6gQru6f406QVBZkkXOv0gUzFu0hykdkyKRa2Am6iWwGRVzcBK7rq+xHpNI6oTq3EFfw0T70kb38rZaezNA==", - "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", - "license": "MIT", - "dependencies": { - "tslib": "^2.4.0", - "undici": "^5.12.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/Lierrmm" - } - }, - "node_modules/call-of-duty-api/node_modules/undici": { - "version": "5.29.0", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", - "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", - "license": "MIT", - "dependencies": { - "@fastify/busboy": "^2.0.0" - }, - "engines": { - "node": ">=14.0" - } - }, "node_modules/camel-case": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-3.0.0.tgz", diff --git a/package.json b/package.json index 7491707..3e85363 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,6 @@ }, "dependencies": { "body-parser": "^2.2.0", - "call-of-duty-api": "^4.1.0", "csso": "^5.0.5", "express": "^4.21.2", "express-rate-limit": "^7.5.0",