chore: update deps

This commit is contained in:
Rim 2025-03-29 16:05:52 -04:00
parent a177b9bb4c
commit 505a26c84d
1489 changed files with 27814 additions and 146817 deletions

16
node_modules/.bin/acorn generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../acorn/bin/acorn" "$@"
else
exec node "$basedir/../acorn/bin/acorn" "$@"
fi

16
node_modules/.bin/glob generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../glob/dist/esm/bin.mjs" "$@"
else
exec node "$basedir/../glob/dist/esm/bin.mjs" "$@"
fi

16
node_modules/.bin/he generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../he/bin/he" "$@"
else
exec node "$basedir/../he/bin/he" "$@"
fi

16
node_modules/.bin/html-minifier generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../html-minifier/cli.js" "$@"
else
exec node "$basedir/../html-minifier/cli.js" "$@"
fi

16
node_modules/.bin/mime generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../mime/cli.js" "$@"
else
exec node "$basedir/../mime/cli.js" "$@"
fi

16
node_modules/.bin/node-which generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../which/bin/node-which" "$@"
else
exec node "$basedir/../which/bin/node-which" "$@"
fi

16
node_modules/.bin/nodemon generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../nodemon/bin/nodemon.js" "$@"
else
exec node "$basedir/../nodemon/bin/nodemon.js" "$@"
fi

16
node_modules/.bin/nodetouch generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../touch/bin/nodetouch.js" "$@"
else
exec node "$basedir/../touch/bin/nodetouch.js" "$@"
fi

16
node_modules/.bin/replace-in-file generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../replace-in-file/bin/cli.js" "$@"
else
exec node "$basedir/../replace-in-file/bin/cli.js" "$@"
fi

16
node_modules/.bin/semver generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@"
else
exec node "$basedir/../semver/bin/semver.js" "$@"
fi

16
node_modules/.bin/terser generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../terser/bin/terser" "$@"
else
exec node "$basedir/../terser/bin/terser" "$@"
fi

16
node_modules/.bin/uglifyjs generated vendored
View File

@ -1,16 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../uglify-js/bin/uglifyjs" "$@"
else
exec node "$basedir/../uglify-js/bin/uglifyjs" "$@"
fi

1153
node_modules/.package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

View File

@ -1,213 +0,0 @@
'use strict'
const WritableStream = require('node:stream').Writable
const inherits = require('node:util').inherits
const StreamSearch = require('../../streamsearch/sbmh')
const PartStream = require('./PartStream')
const HeaderParser = require('./HeaderParser')
const DASH = 45
const B_ONEDASH = Buffer.from('-')
const B_CRLF = Buffer.from('\r\n')
const EMPTY_FN = function () {}
function Dicer (cfg) {
if (!(this instanceof Dicer)) { return new Dicer(cfg) }
WritableStream.call(this, cfg)
if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') }
if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined }
this._headerFirst = cfg.headerFirst
this._dashes = 0
this._parts = 0
this._finished = false
this._realFinish = false
this._isPreamble = true
this._justMatched = false
this._firstWrite = true
this._inHeader = true
this._part = undefined
this._cb = undefined
this._ignoreData = false
this._partOpts = { highWaterMark: cfg.partHwm }
this._pause = false
const self = this
this._hparser = new HeaderParser(cfg)
this._hparser.on('header', function (header) {
self._inHeader = false
self._part.emit('header', header)
})
}
inherits(Dicer, WritableStream)
Dicer.prototype.emit = function (ev) {
if (ev === 'finish' && !this._realFinish) {
if (!this._finished) {
const self = this
process.nextTick(function () {
self.emit('error', new Error('Unexpected end of multipart data'))
if (self._part && !self._ignoreData) {
const type = (self._isPreamble ? 'Preamble' : 'Part')
self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data'))
self._part.push(null)
process.nextTick(function () {
self._realFinish = true
self.emit('finish')
self._realFinish = false
})
return
}
self._realFinish = true
self.emit('finish')
self._realFinish = false
})
}
} else { WritableStream.prototype.emit.apply(this, arguments) }
}
Dicer.prototype._write = function (data, encoding, cb) {
// ignore unexpected data (e.g. extra trailer data after finished)
if (!this._hparser && !this._bparser) { return cb() }
if (this._headerFirst && this._isPreamble) {
if (!this._part) {
this._part = new PartStream(this._partOpts)
if (this.listenerCount('preamble') !== 0) { this.emit('preamble', this._part) } else { this._ignore() }
}
const r = this._hparser.push(data)
if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() }
}
// allows for "easier" testing
if (this._firstWrite) {
this._bparser.push(B_CRLF)
this._firstWrite = false
}
this._bparser.push(data)
if (this._pause) { this._cb = cb } else { cb() }
}
Dicer.prototype.reset = function () {
this._part = undefined
this._bparser = undefined
this._hparser = undefined
}
Dicer.prototype.setBoundary = function (boundary) {
const self = this
this._bparser = new StreamSearch('\r\n--' + boundary)
this._bparser.on('info', function (isMatch, data, start, end) {
self._oninfo(isMatch, data, start, end)
})
}
Dicer.prototype._ignore = function () {
if (this._part && !this._ignoreData) {
this._ignoreData = true
this._part.on('error', EMPTY_FN)
// we must perform some kind of read on the stream even though we are
// ignoring the data, otherwise node's Readable stream will not emit 'end'
// after pushing null to the stream
this._part.resume()
}
}
Dicer.prototype._oninfo = function (isMatch, data, start, end) {
let buf; const self = this; let i = 0; let r; let shouldWriteMore = true
if (!this._part && this._justMatched && data) {
while (this._dashes < 2 && (start + i) < end) {
if (data[start + i] === DASH) {
++i
++this._dashes
} else {
if (this._dashes) { buf = B_ONEDASH }
this._dashes = 0
break
}
}
if (this._dashes === 2) {
if ((start + i) < end && this.listenerCount('trailer') !== 0) { this.emit('trailer', data.slice(start + i, end)) }
this.reset()
this._finished = true
// no more parts will be added
if (self._parts === 0) {
self._realFinish = true
self.emit('finish')
self._realFinish = false
}
}
if (this._dashes) { return }
}
if (this._justMatched) { this._justMatched = false }
if (!this._part) {
this._part = new PartStream(this._partOpts)
this._part._read = function (n) {
self._unpause()
}
if (this._isPreamble && this.listenerCount('preamble') !== 0) {
this.emit('preamble', this._part)
} else if (this._isPreamble !== true && this.listenerCount('part') !== 0) {
this.emit('part', this._part)
} else {
this._ignore()
}
if (!this._isPreamble) { this._inHeader = true }
}
if (data && start < end && !this._ignoreData) {
if (this._isPreamble || !this._inHeader) {
if (buf) { shouldWriteMore = this._part.push(buf) }
shouldWriteMore = this._part.push(data.slice(start, end))
if (!shouldWriteMore) { this._pause = true }
} else if (!this._isPreamble && this._inHeader) {
if (buf) { this._hparser.push(buf) }
r = this._hparser.push(data.slice(start, end))
if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) }
}
}
if (isMatch) {
this._hparser.reset()
if (this._isPreamble) { this._isPreamble = false } else {
if (start !== end) {
++this._parts
this._part.on('end', function () {
if (--self._parts === 0) {
if (self._finished) {
self._realFinish = true
self.emit('finish')
self._realFinish = false
} else {
self._unpause()
}
}
})
}
}
this._part.push(null)
this._part = undefined
this._ignoreData = false
this._justMatched = true
this._dashes = 0
}
}
Dicer.prototype._unpause = function () {
if (!this._pause) { return }
this._pause = false
if (this._cb) {
const cb = this._cb
this._cb = undefined
cb()
}
}
module.exports = Dicer

View File

@ -1,100 +0,0 @@
'use strict'
const EventEmitter = require('node:events').EventEmitter
const inherits = require('node:util').inherits
const getLimit = require('../../../lib/utils/getLimit')
const StreamSearch = require('../../streamsearch/sbmh')
const B_DCRLF = Buffer.from('\r\n\r\n')
const RE_CRLF = /\r\n/g
const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex
function HeaderParser (cfg) {
EventEmitter.call(this)
cfg = cfg || {}
const self = this
this.nread = 0
this.maxed = false
this.npairs = 0
this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000)
this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024)
this.buffer = ''
this.header = {}
this.finished = false
this.ss = new StreamSearch(B_DCRLF)
this.ss.on('info', function (isMatch, data, start, end) {
if (data && !self.maxed) {
if (self.nread + end - start >= self.maxHeaderSize) {
end = self.maxHeaderSize - self.nread + start
self.nread = self.maxHeaderSize
self.maxed = true
} else { self.nread += (end - start) }
self.buffer += data.toString('binary', start, end)
}
if (isMatch) { self._finish() }
})
}
inherits(HeaderParser, EventEmitter)
HeaderParser.prototype.push = function (data) {
const r = this.ss.push(data)
if (this.finished) { return r }
}
HeaderParser.prototype.reset = function () {
this.finished = false
this.buffer = ''
this.header = {}
this.ss.reset()
}
HeaderParser.prototype._finish = function () {
if (this.buffer) { this._parseHeader() }
this.ss.matches = this.ss.maxMatches
const header = this.header
this.header = {}
this.buffer = ''
this.finished = true
this.nread = this.npairs = 0
this.maxed = false
this.emit('header', header)
}
HeaderParser.prototype._parseHeader = function () {
if (this.npairs === this.maxHeaderPairs) { return }
const lines = this.buffer.split(RE_CRLF)
const len = lines.length
let m, h
for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
if (lines[i].length === 0) { continue }
if (lines[i][0] === '\t' || lines[i][0] === ' ') {
// folded header content
// RFC2822 says to just remove the CRLF and not the whitespace following
// it, so we follow the RFC and include the leading whitespace ...
if (h) {
this.header[h][this.header[h].length - 1] += lines[i]
continue
}
}
const posColon = lines[i].indexOf(':')
if (
posColon === -1 ||
posColon === 0
) {
return
}
m = RE_HDR.exec(lines[i])
h = m[1].toLowerCase()
this.header[h] = this.header[h] || []
this.header[h].push((m[2] || ''))
if (++this.npairs === this.maxHeaderPairs) { break }
}
}
module.exports = HeaderParser

View File

@ -1,13 +0,0 @@
'use strict'
const inherits = require('node:util').inherits
const ReadableStream = require('node:stream').Readable
function PartStream (opts) {
ReadableStream.call(this, opts)
}
inherits(PartStream, ReadableStream)
PartStream.prototype._read = function (n) {}
module.exports = PartStream

View File

@ -1,164 +0,0 @@
// Type definitions for dicer 0.2
// Project: https://github.com/mscdex/dicer
// Definitions by: BendingBender <https://github.com/BendingBender>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.2
/// <reference types="node" />
import stream = require("stream");
// tslint:disable:unified-signatures
/**
* A very fast streaming multipart parser for node.js.
* Dicer is a WritableStream
*
* Dicer (special) events:
* - on('finish', ()) - Emitted when all parts have been parsed and the Dicer instance has been ended.
* - on('part', (stream: PartStream)) - Emitted when a new part has been found.
* - on('preamble', (stream: PartStream)) - Emitted for preamble if you should happen to need it (can usually be ignored).
* - on('trailer', (data: Buffer)) - Emitted when trailing data was found after the terminating boundary (as with the preamble, this can usually be ignored too).
*/
export class Dicer extends stream.Writable {
/**
* Creates and returns a new Dicer instance with the following valid config settings:
*
* @param config The configuration to use
*/
constructor(config: Dicer.Config);
/**
* Sets the boundary to use for parsing and performs some initialization needed for parsing.
* You should only need to use this if you set headerFirst to true in the constructor and are parsing the boundary from the preamble header.
*
* @param boundary The boundary to use
*/
setBoundary(boundary: string): void;
addListener(event: "finish", listener: () => void): this;
addListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
addListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
addListener(event: "trailer", listener: (data: Buffer) => void): this;
addListener(event: "close", listener: () => void): this;
addListener(event: "drain", listener: () => void): this;
addListener(event: "error", listener: (err: Error) => void): this;
addListener(event: "pipe", listener: (src: stream.Readable) => void): this;
addListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
addListener(event: string, listener: (...args: any[]) => void): this;
on(event: "finish", listener: () => void): this;
on(event: "part", listener: (stream: Dicer.PartStream) => void): this;
on(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
on(event: "trailer", listener: (data: Buffer) => void): this;
on(event: "close", listener: () => void): this;
on(event: "drain", listener: () => void): this;
on(event: "error", listener: (err: Error) => void): this;
on(event: "pipe", listener: (src: stream.Readable) => void): this;
on(event: "unpipe", listener: (src: stream.Readable) => void): this;
on(event: string, listener: (...args: any[]) => void): this;
once(event: "finish", listener: () => void): this;
once(event: "part", listener: (stream: Dicer.PartStream) => void): this;
once(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
once(event: "trailer", listener: (data: Buffer) => void): this;
once(event: "close", listener: () => void): this;
once(event: "drain", listener: () => void): this;
once(event: "error", listener: (err: Error) => void): this;
once(event: "pipe", listener: (src: stream.Readable) => void): this;
once(event: "unpipe", listener: (src: stream.Readable) => void): this;
once(event: string, listener: (...args: any[]) => void): this;
prependListener(event: "finish", listener: () => void): this;
prependListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
prependListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
prependListener(event: "trailer", listener: (data: Buffer) => void): this;
prependListener(event: "close", listener: () => void): this;
prependListener(event: "drain", listener: () => void): this;
prependListener(event: "error", listener: (err: Error) => void): this;
prependListener(event: "pipe", listener: (src: stream.Readable) => void): this;
prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
prependListener(event: string, listener: (...args: any[]) => void): this;
prependOnceListener(event: "finish", listener: () => void): this;
prependOnceListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
prependOnceListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
prependOnceListener(event: "trailer", listener: (data: Buffer) => void): this;
prependOnceListener(event: "close", listener: () => void): this;
prependOnceListener(event: "drain", listener: () => void): this;
prependOnceListener(event: "error", listener: (err: Error) => void): this;
prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this;
prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
removeListener(event: "finish", listener: () => void): this;
removeListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
removeListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
removeListener(event: "trailer", listener: (data: Buffer) => void): this;
removeListener(event: "close", listener: () => void): this;
removeListener(event: "drain", listener: () => void): this;
removeListener(event: "error", listener: (err: Error) => void): this;
removeListener(event: "pipe", listener: (src: stream.Readable) => void): this;
removeListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
removeListener(event: string, listener: (...args: any[]) => void): this;
}
declare namespace Dicer {
interface Config {
/**
* This is the boundary used to detect the beginning of a new part.
*/
boundary?: string | undefined;
/**
* If true, preamble header parsing will be performed first.
*/
headerFirst?: boolean | undefined;
/**
* The maximum number of header key=>value pairs to parse Default: 2000 (same as node's http).
*/
maxHeaderPairs?: number | undefined;
}
/**
* PartStream is a _ReadableStream_
*
* PartStream (special) events:
* - on('header', (header: object)) - An object containing the header for this particular part. Each property value is an array of one or more string values.
*/
interface PartStream extends stream.Readable {
addListener(event: "header", listener: (header: object) => void): this;
addListener(event: "close", listener: () => void): this;
addListener(event: "data", listener: (chunk: Buffer | string) => void): this;
addListener(event: "end", listener: () => void): this;
addListener(event: "readable", listener: () => void): this;
addListener(event: "error", listener: (err: Error) => void): this;
addListener(event: string, listener: (...args: any[]) => void): this;
on(event: "header", listener: (header: object) => void): this;
on(event: "close", listener: () => void): this;
on(event: "data", listener: (chunk: Buffer | string) => void): this;
on(event: "end", listener: () => void): this;
on(event: "readable", listener: () => void): this;
on(event: "error", listener: (err: Error) => void): this;
on(event: string, listener: (...args: any[]) => void): this;
once(event: "header", listener: (header: object) => void): this;
once(event: "close", listener: () => void): this;
once(event: "data", listener: (chunk: Buffer | string) => void): this;
once(event: "end", listener: () => void): this;
once(event: "readable", listener: () => void): this;
once(event: "error", listener: (err: Error) => void): this;
once(event: string, listener: (...args: any[]) => void): this;
prependListener(event: "header", listener: (header: object) => void): this;
prependListener(event: "close", listener: () => void): this;
prependListener(event: "data", listener: (chunk: Buffer | string) => void): this;
prependListener(event: "end", listener: () => void): this;
prependListener(event: "readable", listener: () => void): this;
prependListener(event: "error", listener: (err: Error) => void): this;
prependListener(event: string, listener: (...args: any[]) => void): this;
prependOnceListener(event: "header", listener: (header: object) => void): this;
prependOnceListener(event: "close", listener: () => void): this;
prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this;
prependOnceListener(event: "end", listener: () => void): this;
prependOnceListener(event: "readable", listener: () => void): this;
prependOnceListener(event: "error", listener: (err: Error) => void): this;
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
removeListener(event: "header", listener: (header: object) => void): this;
removeListener(event: "close", listener: () => void): this;
removeListener(event: "data", listener: (chunk: Buffer | string) => void): this;
removeListener(event: "end", listener: () => void): this;
removeListener(event: "readable", listener: () => void): this;
removeListener(event: "error", listener: (err: Error) => void): this;
removeListener(event: string, listener: (...args: any[]) => void): this;
}
}

View File

@ -1,228 +0,0 @@
'use strict'
/**
* Copyright Brian White. All rights reserved.
*
* @see https://github.com/mscdex/streamsearch
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*
* Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation
* by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool
*/
const EventEmitter = require('node:events').EventEmitter
const inherits = require('node:util').inherits
function SBMH (needle) {
if (typeof needle === 'string') {
needle = Buffer.from(needle)
}
if (!Buffer.isBuffer(needle)) {
throw new TypeError('The needle has to be a String or a Buffer.')
}
const needleLength = needle.length
if (needleLength === 0) {
throw new Error('The needle cannot be an empty String/Buffer.')
}
if (needleLength > 256) {
throw new Error('The needle cannot have a length bigger than 256.')
}
this.maxMatches = Infinity
this.matches = 0
this._occ = new Array(256)
.fill(needleLength) // Initialize occurrence table.
this._lookbehind_size = 0
this._needle = needle
this._bufpos = 0
this._lookbehind = Buffer.alloc(needleLength)
// Populate occurrence table with analysis of the needle,
// ignoring last letter.
for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var
this._occ[needle[i]] = needleLength - 1 - i
}
}
inherits(SBMH, EventEmitter)
SBMH.prototype.reset = function () {
this._lookbehind_size = 0
this.matches = 0
this._bufpos = 0
}
SBMH.prototype.push = function (chunk, pos) {
if (!Buffer.isBuffer(chunk)) {
chunk = Buffer.from(chunk, 'binary')
}
const chlen = chunk.length
this._bufpos = pos || 0
let r
while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) }
return r
}
SBMH.prototype._sbmh_feed = function (data) {
const len = data.length
const needle = this._needle
const needleLength = needle.length
const lastNeedleChar = needle[needleLength - 1]
// Positive: points to a position in `data`
// pos == 3 points to data[3]
// Negative: points to a position in the lookbehind buffer
// pos == -2 points to lookbehind[lookbehind_size - 2]
let pos = -this._lookbehind_size
let ch
if (pos < 0) {
// Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool
// search with character lookup code that considers both the
// lookbehind buffer and the current round's haystack data.
//
// Loop until
// there is a match.
// or until
// we've moved past the position that requires the
// lookbehind buffer. In this case we switch to the
// optimized loop.
// or until
// the character to look at lies outside the haystack.
while (pos < 0 && pos <= len - needleLength) {
ch = this._sbmh_lookup_char(data, pos + needleLength - 1)
if (
ch === lastNeedleChar &&
this._sbmh_memcmp(data, pos, needleLength - 1)
) {
this._lookbehind_size = 0
++this.matches
this.emit('info', true)
return (this._bufpos = pos + needleLength)
}
pos += this._occ[ch]
}
// No match.
if (pos < 0) {
// There's too few data for Boyer-Moore-Horspool to run,
// so let's use a different algorithm to skip as much as
// we can.
// Forward pos until
// the trailing part of lookbehind + data
// looks like the beginning of the needle
// or until
// pos == 0
while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos }
}
if (pos >= 0) {
// Discard lookbehind buffer.
this.emit('info', false, this._lookbehind, 0, this._lookbehind_size)
this._lookbehind_size = 0
} else {
// Cut off part of the lookbehind buffer that has
// been processed and append the entire haystack
// into it.
const bytesToCutOff = this._lookbehind_size + pos
if (bytesToCutOff > 0) {
// The cut off data is guaranteed not to contain the needle.
this.emit('info', false, this._lookbehind, 0, bytesToCutOff)
}
this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff,
this._lookbehind_size - bytesToCutOff)
this._lookbehind_size -= bytesToCutOff
data.copy(this._lookbehind, this._lookbehind_size)
this._lookbehind_size += len
this._bufpos = len
return len
}
}
pos += (pos >= 0) * this._bufpos
// Lookbehind buffer is now empty. We only need to check if the
// needle is in the haystack.
if (data.indexOf(needle, pos) !== -1) {
pos = data.indexOf(needle, pos)
++this.matches
if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) }
return (this._bufpos = pos + needleLength)
} else {
pos = len - needleLength
}
// There was no match. If there's trailing haystack data that we cannot
// match yet using the Boyer-Moore-Horspool algorithm (because the trailing
// data is less than the needle size) then match using a modified
// algorithm that starts matching from the beginning instead of the end.
// Whatever trailing data is left after running this algorithm is added to
// the lookbehind buffer.
while (
pos < len &&
(
data[pos] !== needle[0] ||
(
(Buffer.compare(
data.subarray(pos, pos + len - pos),
needle.subarray(0, len - pos)
) !== 0)
)
)
) {
++pos
}
if (pos < len) {
data.copy(this._lookbehind, 0, pos, pos + (len - pos))
this._lookbehind_size = len - pos
}
// Everything until pos is guaranteed not to contain needle data.
if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) }
this._bufpos = len
return len
}
SBMH.prototype._sbmh_lookup_char = function (data, pos) {
return (pos < 0)
? this._lookbehind[this._lookbehind_size + pos]
: data[pos]
}
SBMH.prototype._sbmh_memcmp = function (data, pos, len) {
for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false }
}
return true
}
module.exports = SBMH

View File

@ -1,196 +0,0 @@
// Definitions by: Jacob Baskin <https://github.com/jacobbaskin>
// BendingBender <https://github.com/BendingBender>
// Igor Savin <https://github.com/kibertoad>
/// <reference types="node" />
import * as http from 'http';
import { Readable, Writable } from 'stream';
export { Dicer } from "../deps/dicer/lib/dicer";
export const Busboy: BusboyConstructor;
export default Busboy;
export interface BusboyConfig {
/**
* These are the HTTP headers of the incoming request, which are used by individual parsers.
*/
headers: BusboyHeaders;
/**
* `highWaterMark` to use for this Busboy instance.
* @default WritableStream default.
*/
highWaterMark?: number | undefined;
/**
* highWaterMark to use for file streams.
* @default ReadableStream default.
*/
fileHwm?: number | undefined;
/**
* Default character set to use when one isn't defined.
* @default 'utf8'
*/
defCharset?: string | undefined;
/**
* Detect if a Part is a file.
*
* By default a file is detected if contentType
* is application/octet-stream or fileName is not
* undefined.
*
* Modify this to handle e.g. Blobs.
*/
isPartAFile?: (fieldName: string | undefined, contentType: string | undefined, fileName: string | undefined) => boolean;
/**
* If paths in the multipart 'filename' field shall be preserved.
* @default false
*/
preservePath?: boolean | undefined;
/**
* Various limits on incoming data.
*/
limits?:
| {
/**
* Max field name size (in bytes)
* @default 100 bytes
*/
fieldNameSize?: number | undefined;
/**
* Max field value size (in bytes)
* @default 1MB
*/
fieldSize?: number | undefined;
/**
* Max number of non-file fields
* @default Infinity
*/
fields?: number | undefined;
/**
* For multipart forms, the max file size (in bytes)
* @default Infinity
*/
fileSize?: number | undefined;
/**
* For multipart forms, the max number of file fields
* @default Infinity
*/
files?: number | undefined;
/**
* For multipart forms, the max number of parts (fields + files)
* @default Infinity
*/
parts?: number | undefined;
/**
* For multipart forms, the max number of header key=>value pairs to parse
* @default 2000
*/
headerPairs?: number | undefined;
/**
* For multipart forms, the max size of a header part
* @default 81920
*/
headerSize?: number | undefined;
}
| undefined;
}
export type BusboyHeaders = { 'content-type': string } & http.IncomingHttpHeaders;
export interface BusboyFileStream extends
Readable {
truncated: boolean;
/**
* The number of bytes that have been read so far.
*/
bytesRead: number;
}
export interface Busboy extends Writable {
addListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
addListener(event: string | symbol, listener: (...args: any[]) => void): this;
on<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
on(event: string | symbol, listener: (...args: any[]) => void): this;
once<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
once(event: string | symbol, listener: (...args: any[]) => void): this;
removeListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
removeListener(event: string | symbol, listener: (...args: any[]) => void): this;
off<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
off(event: string | symbol, listener: (...args: any[]) => void): this;
prependListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
prependListener(event: string | symbol, listener: (...args: any[]) => void): this;
prependOnceListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this;
}
export interface BusboyEvents {
/**
* Emitted for each new file form field found.
*
* * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the
* file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents),
* otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any**
* incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically
* and safely discarded (these discarded files do still count towards `files` and `parts` limits).
* * If a configured file size limit was reached, `stream` will both have a boolean property `truncated`
* (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens.
*
* @param listener.transferEncoding Contains the 'Content-Transfer-Encoding' value for the file stream.
* @param listener.mimeType Contains the 'Content-Type' value for the file stream.
*/
file: (
fieldname: string,
stream: BusboyFileStream,
filename: string,
transferEncoding: string,
mimeType: string,
) => void;
/**
* Emitted for each new non-file field found.
*/
field: (
fieldname: string,
value: string,
fieldnameTruncated: boolean,
valueTruncated: boolean,
transferEncoding: string,
mimeType: string,
) => void;
finish: () => void;
/**
* Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted.
*/
partsLimit: () => void;
/**
* Emitted when specified `files` limit has been reached. No more 'file' events will be emitted.
*/
filesLimit: () => void;
/**
* Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted.
*/
fieldsLimit: () => void;
error: (error: unknown) => void;
}
export interface BusboyConstructor {
(options: BusboyConfig): Busboy;
new(options: BusboyConfig): Busboy;
}

View File

@ -1,85 +0,0 @@
'use strict'
const WritableStream = require('node:stream').Writable
const { inherits } = require('node:util')
const Dicer = require('../deps/dicer/lib/Dicer')
const MultipartParser = require('./types/multipart')
const UrlencodedParser = require('./types/urlencoded')
const parseParams = require('./utils/parseParams')
function Busboy (opts) {
if (!(this instanceof Busboy)) { return new Busboy(opts) }
if (typeof opts !== 'object') {
throw new TypeError('Busboy expected an options-Object.')
}
if (typeof opts.headers !== 'object') {
throw new TypeError('Busboy expected an options-Object with headers-attribute.')
}
if (typeof opts.headers['content-type'] !== 'string') {
throw new TypeError('Missing Content-Type-header.')
}
const {
headers,
...streamOptions
} = opts
this.opts = {
autoDestroy: false,
...streamOptions
}
WritableStream.call(this, this.opts)
this._done = false
this._parser = this.getParserByHeaders(headers)
this._finished = false
}
inherits(Busboy, WritableStream)
Busboy.prototype.emit = function (ev) {
if (ev === 'finish') {
if (!this._done) {
this._parser?.end()
return
} else if (this._finished) {
return
}
this._finished = true
}
WritableStream.prototype.emit.apply(this, arguments)
}
Busboy.prototype.getParserByHeaders = function (headers) {
const parsed = parseParams(headers['content-type'])
const cfg = {
defCharset: this.opts.defCharset,
fileHwm: this.opts.fileHwm,
headers,
highWaterMark: this.opts.highWaterMark,
isPartAFile: this.opts.isPartAFile,
limits: this.opts.limits,
parsedConType: parsed,
preservePath: this.opts.preservePath
}
if (MultipartParser.detect.test(parsed[0])) {
return new MultipartParser(this, cfg)
}
if (UrlencodedParser.detect.test(parsed[0])) {
return new UrlencodedParser(this, cfg)
}
throw new Error('Unsupported Content-Type.')
}
Busboy.prototype._write = function (chunk, encoding, cb) {
this._parser.write(chunk, cb)
}
module.exports = Busboy
module.exports.default = Busboy
module.exports.Busboy = Busboy
module.exports.Dicer = Dicer

View File

@ -1,306 +0,0 @@
'use strict'
// TODO:
// * support 1 nested multipart level
// (see second multipart example here:
// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data)
// * support limits.fieldNameSize
// -- this will require modifications to utils.parseParams
const { Readable } = require('node:stream')
const { inherits } = require('node:util')
const Dicer = require('../../deps/dicer/lib/Dicer')
const parseParams = require('../utils/parseParams')
const decodeText = require('../utils/decodeText')
const basename = require('../utils/basename')
const getLimit = require('../utils/getLimit')
const RE_BOUNDARY = /^boundary$/i
const RE_FIELD = /^form-data$/i
const RE_CHARSET = /^charset$/i
const RE_FILENAME = /^filename$/i
const RE_NAME = /^name$/i
Multipart.detect = /^multipart\/form-data/i
function Multipart (boy, cfg) {
let i
let len
const self = this
let boundary
const limits = cfg.limits
const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined))
const parsedConType = cfg.parsedConType || []
const defCharset = cfg.defCharset || 'utf8'
const preservePath = cfg.preservePath
const fileOpts = { highWaterMark: cfg.fileHwm }
for (i = 0, len = parsedConType.length; i < len; ++i) {
if (Array.isArray(parsedConType[i]) &&
RE_BOUNDARY.test(parsedConType[i][0])) {
boundary = parsedConType[i][1]
break
}
}
function checkFinished () {
if (nends === 0 && finished && !boy._done) {
finished = false
self.end()
}
}
if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') }
const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
const fileSizeLimit = getLimit(limits, 'fileSize', Infinity)
const filesLimit = getLimit(limits, 'files', Infinity)
const fieldsLimit = getLimit(limits, 'fields', Infinity)
const partsLimit = getLimit(limits, 'parts', Infinity)
const headerPairsLimit = getLimit(limits, 'headerPairs', 2000)
const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024)
let nfiles = 0
let nfields = 0
let nends = 0
let curFile
let curField
let finished = false
this._needDrain = false
this._pause = false
this._cb = undefined
this._nparts = 0
this._boy = boy
const parserCfg = {
boundary,
maxHeaderPairs: headerPairsLimit,
maxHeaderSize: headerSizeLimit,
partHwm: fileOpts.highWaterMark,
highWaterMark: cfg.highWaterMark
}
this.parser = new Dicer(parserCfg)
this.parser.on('drain', function () {
self._needDrain = false
if (self._cb && !self._pause) {
const cb = self._cb
self._cb = undefined
cb()
}
}).on('part', function onPart (part) {
if (++self._nparts > partsLimit) {
self.parser.removeListener('part', onPart)
self.parser.on('part', skipPart)
boy.hitPartsLimit = true
boy.emit('partsLimit')
return skipPart(part)
}
// hack because streams2 _always_ doesn't emit 'end' until nextTick, so let
// us emit 'end' early since we know the part has ended if we are already
// seeing the next part
if (curField) {
const field = curField
field.emit('end')
field.removeAllListeners('end')
}
part.on('header', function (header) {
let contype
let fieldname
let parsed
let charset
let encoding
let filename
let nsize = 0
if (header['content-type']) {
parsed = parseParams(header['content-type'][0])
if (parsed[0]) {
contype = parsed[0].toLowerCase()
for (i = 0, len = parsed.length; i < len; ++i) {
if (RE_CHARSET.test(parsed[i][0])) {
charset = parsed[i][1].toLowerCase()
break
}
}
}
}
if (contype === undefined) { contype = 'text/plain' }
if (charset === undefined) { charset = defCharset }
if (header['content-disposition']) {
parsed = parseParams(header['content-disposition'][0])
if (!RE_FIELD.test(parsed[0])) { return skipPart(part) }
for (i = 0, len = parsed.length; i < len; ++i) {
if (RE_NAME.test(parsed[i][0])) {
fieldname = parsed[i][1]
} else if (RE_FILENAME.test(parsed[i][0])) {
filename = parsed[i][1]
if (!preservePath) { filename = basename(filename) }
}
}
} else { return skipPart(part) }
if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' }
let onData,
onEnd
if (isPartAFile(fieldname, contype, filename)) {
// file/binary field
if (nfiles === filesLimit) {
if (!boy.hitFilesLimit) {
boy.hitFilesLimit = true
boy.emit('filesLimit')
}
return skipPart(part)
}
++nfiles
if (boy.listenerCount('file') === 0) {
self.parser._ignore()
return
}
++nends
const file = new FileStream(fileOpts)
curFile = file
file.on('end', function () {
--nends
self._pause = false
checkFinished()
if (self._cb && !self._needDrain) {
const cb = self._cb
self._cb = undefined
cb()
}
})
file._read = function (n) {
if (!self._pause) { return }
self._pause = false
if (self._cb && !self._needDrain) {
const cb = self._cb
self._cb = undefined
cb()
}
}
boy.emit('file', fieldname, file, filename, encoding, contype)
onData = function (data) {
if ((nsize += data.length) > fileSizeLimit) {
const extralen = fileSizeLimit - nsize + data.length
if (extralen > 0) { file.push(data.slice(0, extralen)) }
file.truncated = true
file.bytesRead = fileSizeLimit
part.removeAllListeners('data')
file.emit('limit')
return
} else if (!file.push(data)) { self._pause = true }
file.bytesRead = nsize
}
onEnd = function () {
curFile = undefined
file.push(null)
}
} else {
// non-file field
if (nfields === fieldsLimit) {
if (!boy.hitFieldsLimit) {
boy.hitFieldsLimit = true
boy.emit('fieldsLimit')
}
return skipPart(part)
}
++nfields
++nends
let buffer = ''
let truncated = false
curField = part
onData = function (data) {
if ((nsize += data.length) > fieldSizeLimit) {
const extralen = (fieldSizeLimit - (nsize - data.length))
buffer += data.toString('binary', 0, extralen)
truncated = true
part.removeAllListeners('data')
} else { buffer += data.toString('binary') }
}
onEnd = function () {
curField = undefined
if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) }
boy.emit('field', fieldname, buffer, false, truncated, encoding, contype)
--nends
checkFinished()
}
}
/* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become
broken. Streams2/streams3 is a huge black box of confusion, but
somehow overriding the sync state seems to fix things again (and still
seems to work for previous node versions).
*/
part._readableState.sync = false
part.on('data', onData)
part.on('end', onEnd)
}).on('error', function (err) {
if (curFile) { curFile.emit('error', err) }
})
}).on('error', function (err) {
boy.emit('error', err)
}).on('finish', function () {
finished = true
checkFinished()
})
}
Multipart.prototype.write = function (chunk, cb) {
const r = this.parser.write(chunk)
if (r && !this._pause) {
cb()
} else {
this._needDrain = !r
this._cb = cb
}
}
Multipart.prototype.end = function () {
const self = this
if (self.parser.writable) {
self.parser.end()
} else if (!self._boy._done) {
process.nextTick(function () {
self._boy._done = true
self._boy.emit('finish')
})
}
}
function skipPart (part) {
part.resume()
}
function FileStream (opts) {
Readable.call(this, opts)
this.bytesRead = 0
this.truncated = false
}
inherits(FileStream, Readable)
FileStream.prototype._read = function (n) {}
module.exports = Multipart

View File

@ -1,190 +0,0 @@
'use strict'
const Decoder = require('../utils/Decoder')
const decodeText = require('../utils/decodeText')
const getLimit = require('../utils/getLimit')
const RE_CHARSET = /^charset$/i
UrlEncoded.detect = /^application\/x-www-form-urlencoded/i
function UrlEncoded (boy, cfg) {
const limits = cfg.limits
const parsedConType = cfg.parsedConType
this.boy = boy
this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100)
this.fieldsLimit = getLimit(limits, 'fields', Infinity)
let charset
for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var
if (Array.isArray(parsedConType[i]) &&
RE_CHARSET.test(parsedConType[i][0])) {
charset = parsedConType[i][1].toLowerCase()
break
}
}
if (charset === undefined) { charset = cfg.defCharset || 'utf8' }
this.decoder = new Decoder()
this.charset = charset
this._fields = 0
this._state = 'key'
this._checkingBytes = true
this._bytesKey = 0
this._bytesVal = 0
this._key = ''
this._val = ''
this._keyTrunc = false
this._valTrunc = false
this._hitLimit = false
}
UrlEncoded.prototype.write = function (data, cb) {
if (this._fields === this.fieldsLimit) {
if (!this.boy.hitFieldsLimit) {
this.boy.hitFieldsLimit = true
this.boy.emit('fieldsLimit')
}
return cb()
}
let idxeq; let idxamp; let i; let p = 0; const len = data.length
while (p < len) {
if (this._state === 'key') {
idxeq = idxamp = undefined
for (i = p; i < len; ++i) {
if (!this._checkingBytes) { ++p }
if (data[i] === 0x3D/* = */) {
idxeq = i
break
} else if (data[i] === 0x26/* & */) {
idxamp = i
break
}
if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) {
this._hitLimit = true
break
} else if (this._checkingBytes) { ++this._bytesKey }
}
if (idxeq !== undefined) {
// key with assignment
if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) }
this._state = 'val'
this._hitLimit = false
this._checkingBytes = true
this._val = ''
this._bytesVal = 0
this._valTrunc = false
this.decoder.reset()
p = idxeq + 1
} else if (idxamp !== undefined) {
// key with no assignment
++this._fields
let key; const keyTrunc = this._keyTrunc
if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key }
this._hitLimit = false
this._checkingBytes = true
this._key = ''
this._bytesKey = 0
this._keyTrunc = false
this.decoder.reset()
if (key.length) {
this.boy.emit('field', decodeText(key, 'binary', this.charset),
'',
keyTrunc,
false)
}
p = idxamp + 1
if (this._fields === this.fieldsLimit) { return cb() }
} else if (this._hitLimit) {
// we may not have hit the actual limit if there are encoded bytes...
if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) }
p = i
if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) {
// yep, we actually did hit the limit
this._checkingBytes = false
this._keyTrunc = true
}
} else {
if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) }
p = len
}
} else {
idxamp = undefined
for (i = p; i < len; ++i) {
if (!this._checkingBytes) { ++p }
if (data[i] === 0x26/* & */) {
idxamp = i
break
}
if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) {
this._hitLimit = true
break
} else if (this._checkingBytes) { ++this._bytesVal }
}
if (idxamp !== undefined) {
++this._fields
if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) }
this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
decodeText(this._val, 'binary', this.charset),
this._keyTrunc,
this._valTrunc)
this._state = 'key'
this._hitLimit = false
this._checkingBytes = true
this._key = ''
this._bytesKey = 0
this._keyTrunc = false
this.decoder.reset()
p = idxamp + 1
if (this._fields === this.fieldsLimit) { return cb() }
} else if (this._hitLimit) {
// we may not have hit the actual limit if there are encoded bytes...
if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) }
p = i
if ((this._val === '' && this.fieldSizeLimit === 0) ||
(this._bytesVal = this._val.length) === this.fieldSizeLimit) {
// yep, we actually did hit the limit
this._checkingBytes = false
this._valTrunc = true
}
} else {
if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) }
p = len
}
}
}
cb()
}
UrlEncoded.prototype.end = function () {
if (this.boy._done) { return }
if (this._state === 'key' && this._key.length > 0) {
this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
'',
this._keyTrunc,
false)
} else if (this._state === 'val') {
this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
decodeText(this._val, 'binary', this.charset),
this._keyTrunc,
this._valTrunc)
}
this.boy._done = true
this.boy.emit('finish')
}
module.exports = UrlEncoded

View File

@ -1,54 +0,0 @@
'use strict'
const RE_PLUS = /\+/g
const HEX = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
function Decoder () {
this.buffer = undefined
}
Decoder.prototype.write = function (str) {
// Replace '+' with ' ' before decoding
str = str.replace(RE_PLUS, ' ')
let res = ''
let i = 0; let p = 0; const len = str.length
for (; i < len; ++i) {
if (this.buffer !== undefined) {
if (!HEX[str.charCodeAt(i)]) {
res += '%' + this.buffer
this.buffer = undefined
--i // retry character
} else {
this.buffer += str[i]
++p
if (this.buffer.length === 2) {
res += String.fromCharCode(parseInt(this.buffer, 16))
this.buffer = undefined
}
}
} else if (str[i] === '%') {
if (i > p) {
res += str.substring(p, i)
p = i
}
this.buffer = ''
++p
}
}
if (p < len && this.buffer === undefined) { res += str.substring(p) }
return res
}
Decoder.prototype.reset = function () {
this.buffer = undefined
}
module.exports = Decoder

View File

@ -1,14 +0,0 @@
'use strict'
module.exports = function basename (path) {
if (typeof path !== 'string') { return '' }
for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var
switch (path.charCodeAt(i)) {
case 0x2F: // '/'
case 0x5C: // '\'
path = path.slice(i + 1)
return (path === '..' || path === '.' ? '' : path)
}
}
return (path === '..' || path === '.' ? '' : path)
}

View File

@ -1,114 +0,0 @@
'use strict'
// Node has always utf-8
const utf8Decoder = new TextDecoder('utf-8')
const textDecoders = new Map([
['utf-8', utf8Decoder],
['utf8', utf8Decoder]
])
function getDecoder (charset) {
let lc
while (true) {
switch (charset) {
case 'utf-8':
case 'utf8':
return decoders.utf8
case 'latin1':
case 'ascii': // TODO: Make these a separate, strict decoder?
case 'us-ascii':
case 'iso-8859-1':
case 'iso8859-1':
case 'iso88591':
case 'iso_8859-1':
case 'windows-1252':
case 'iso_8859-1:1987':
case 'cp1252':
case 'x-cp1252':
return decoders.latin1
case 'utf16le':
case 'utf-16le':
case 'ucs2':
case 'ucs-2':
return decoders.utf16le
case 'base64':
return decoders.base64
default:
if (lc === undefined) {
lc = true
charset = charset.toLowerCase()
continue
}
return decoders.other.bind(charset)
}
}
}
const decoders = {
utf8: (data, sourceEncoding) => {
if (data.length === 0) {
return ''
}
if (typeof data === 'string') {
data = Buffer.from(data, sourceEncoding)
}
return data.utf8Slice(0, data.length)
},
latin1: (data, sourceEncoding) => {
if (data.length === 0) {
return ''
}
if (typeof data === 'string') {
return data
}
return data.latin1Slice(0, data.length)
},
utf16le: (data, sourceEncoding) => {
if (data.length === 0) {
return ''
}
if (typeof data === 'string') {
data = Buffer.from(data, sourceEncoding)
}
return data.ucs2Slice(0, data.length)
},
base64: (data, sourceEncoding) => {
if (data.length === 0) {
return ''
}
if (typeof data === 'string') {
data = Buffer.from(data, sourceEncoding)
}
return data.base64Slice(0, data.length)
},
other: (data, sourceEncoding) => {
if (data.length === 0) {
return ''
}
if (typeof data === 'string') {
data = Buffer.from(data, sourceEncoding)
}
if (textDecoders.has(this.toString())) {
try {
return textDecoders.get(this).decode(data)
} catch {}
}
return typeof data === 'string'
? data
: data.toString()
}
}
function decodeText (text, sourceEncoding, destEncoding) {
if (text) {
return getDecoder(destEncoding)(text, sourceEncoding)
}
return text
}
module.exports = decodeText

View File

@ -1,16 +0,0 @@
'use strict'
module.exports = function getLimit (limits, name, defaultLimit) {
if (
!limits ||
limits[name] === undefined ||
limits[name] === null
) { return defaultLimit }
if (
typeof limits[name] !== 'number' ||
isNaN(limits[name])
) { throw new TypeError('Limit ' + name + ' is not a valid number') }
return limits[name]
}

View File

@ -1,196 +0,0 @@
/* eslint-disable object-property-newline */
'use strict'
const decodeText = require('./decodeText')
const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g
const EncodedLookup = {
'%00': '\x00', '%01': '\x01', '%02': '\x02', '%03': '\x03', '%04': '\x04',
'%05': '\x05', '%06': '\x06', '%07': '\x07', '%08': '\x08', '%09': '\x09',
'%0a': '\x0a', '%0A': '\x0a', '%0b': '\x0b', '%0B': '\x0b', '%0c': '\x0c',
'%0C': '\x0c', '%0d': '\x0d', '%0D': '\x0d', '%0e': '\x0e', '%0E': '\x0e',
'%0f': '\x0f', '%0F': '\x0f', '%10': '\x10', '%11': '\x11', '%12': '\x12',
'%13': '\x13', '%14': '\x14', '%15': '\x15', '%16': '\x16', '%17': '\x17',
'%18': '\x18', '%19': '\x19', '%1a': '\x1a', '%1A': '\x1a', '%1b': '\x1b',
'%1B': '\x1b', '%1c': '\x1c', '%1C': '\x1c', '%1d': '\x1d', '%1D': '\x1d',
'%1e': '\x1e', '%1E': '\x1e', '%1f': '\x1f', '%1F': '\x1f', '%20': '\x20',
'%21': '\x21', '%22': '\x22', '%23': '\x23', '%24': '\x24', '%25': '\x25',
'%26': '\x26', '%27': '\x27', '%28': '\x28', '%29': '\x29', '%2a': '\x2a',
'%2A': '\x2a', '%2b': '\x2b', '%2B': '\x2b', '%2c': '\x2c', '%2C': '\x2c',
'%2d': '\x2d', '%2D': '\x2d', '%2e': '\x2e', '%2E': '\x2e', '%2f': '\x2f',
'%2F': '\x2f', '%30': '\x30', '%31': '\x31', '%32': '\x32', '%33': '\x33',
'%34': '\x34', '%35': '\x35', '%36': '\x36', '%37': '\x37', '%38': '\x38',
'%39': '\x39', '%3a': '\x3a', '%3A': '\x3a', '%3b': '\x3b', '%3B': '\x3b',
'%3c': '\x3c', '%3C': '\x3c', '%3d': '\x3d', '%3D': '\x3d', '%3e': '\x3e',
'%3E': '\x3e', '%3f': '\x3f', '%3F': '\x3f', '%40': '\x40', '%41': '\x41',
'%42': '\x42', '%43': '\x43', '%44': '\x44', '%45': '\x45', '%46': '\x46',
'%47': '\x47', '%48': '\x48', '%49': '\x49', '%4a': '\x4a', '%4A': '\x4a',
'%4b': '\x4b', '%4B': '\x4b', '%4c': '\x4c', '%4C': '\x4c', '%4d': '\x4d',
'%4D': '\x4d', '%4e': '\x4e', '%4E': '\x4e', '%4f': '\x4f', '%4F': '\x4f',
'%50': '\x50', '%51': '\x51', '%52': '\x52', '%53': '\x53', '%54': '\x54',
'%55': '\x55', '%56': '\x56', '%57': '\x57', '%58': '\x58', '%59': '\x59',
'%5a': '\x5a', '%5A': '\x5a', '%5b': '\x5b', '%5B': '\x5b', '%5c': '\x5c',
'%5C': '\x5c', '%5d': '\x5d', '%5D': '\x5d', '%5e': '\x5e', '%5E': '\x5e',
'%5f': '\x5f', '%5F': '\x5f', '%60': '\x60', '%61': '\x61', '%62': '\x62',
'%63': '\x63', '%64': '\x64', '%65': '\x65', '%66': '\x66', '%67': '\x67',
'%68': '\x68', '%69': '\x69', '%6a': '\x6a', '%6A': '\x6a', '%6b': '\x6b',
'%6B': '\x6b', '%6c': '\x6c', '%6C': '\x6c', '%6d': '\x6d', '%6D': '\x6d',
'%6e': '\x6e', '%6E': '\x6e', '%6f': '\x6f', '%6F': '\x6f', '%70': '\x70',
'%71': '\x71', '%72': '\x72', '%73': '\x73', '%74': '\x74', '%75': '\x75',
'%76': '\x76', '%77': '\x77', '%78': '\x78', '%79': '\x79', '%7a': '\x7a',
'%7A': '\x7a', '%7b': '\x7b', '%7B': '\x7b', '%7c': '\x7c', '%7C': '\x7c',
'%7d': '\x7d', '%7D': '\x7d', '%7e': '\x7e', '%7E': '\x7e', '%7f': '\x7f',
'%7F': '\x7f', '%80': '\x80', '%81': '\x81', '%82': '\x82', '%83': '\x83',
'%84': '\x84', '%85': '\x85', '%86': '\x86', '%87': '\x87', '%88': '\x88',
'%89': '\x89', '%8a': '\x8a', '%8A': '\x8a', '%8b': '\x8b', '%8B': '\x8b',
'%8c': '\x8c', '%8C': '\x8c', '%8d': '\x8d', '%8D': '\x8d', '%8e': '\x8e',
'%8E': '\x8e', '%8f': '\x8f', '%8F': '\x8f', '%90': '\x90', '%91': '\x91',
'%92': '\x92', '%93': '\x93', '%94': '\x94', '%95': '\x95', '%96': '\x96',
'%97': '\x97', '%98': '\x98', '%99': '\x99', '%9a': '\x9a', '%9A': '\x9a',
'%9b': '\x9b', '%9B': '\x9b', '%9c': '\x9c', '%9C': '\x9c', '%9d': '\x9d',
'%9D': '\x9d', '%9e': '\x9e', '%9E': '\x9e', '%9f': '\x9f', '%9F': '\x9f',
'%a0': '\xa0', '%A0': '\xa0', '%a1': '\xa1', '%A1': '\xa1', '%a2': '\xa2',
'%A2': '\xa2', '%a3': '\xa3', '%A3': '\xa3', '%a4': '\xa4', '%A4': '\xa4',
'%a5': '\xa5', '%A5': '\xa5', '%a6': '\xa6', '%A6': '\xa6', '%a7': '\xa7',
'%A7': '\xa7', '%a8': '\xa8', '%A8': '\xa8', '%a9': '\xa9', '%A9': '\xa9',
'%aa': '\xaa', '%Aa': '\xaa', '%aA': '\xaa', '%AA': '\xaa', '%ab': '\xab',
'%Ab': '\xab', '%aB': '\xab', '%AB': '\xab', '%ac': '\xac', '%Ac': '\xac',
'%aC': '\xac', '%AC': '\xac', '%ad': '\xad', '%Ad': '\xad', '%aD': '\xad',
'%AD': '\xad', '%ae': '\xae', '%Ae': '\xae', '%aE': '\xae', '%AE': '\xae',
'%af': '\xaf', '%Af': '\xaf', '%aF': '\xaf', '%AF': '\xaf', '%b0': '\xb0',
'%B0': '\xb0', '%b1': '\xb1', '%B1': '\xb1', '%b2': '\xb2', '%B2': '\xb2',
'%b3': '\xb3', '%B3': '\xb3', '%b4': '\xb4', '%B4': '\xb4', '%b5': '\xb5',
'%B5': '\xb5', '%b6': '\xb6', '%B6': '\xb6', '%b7': '\xb7', '%B7': '\xb7',
'%b8': '\xb8', '%B8': '\xb8', '%b9': '\xb9', '%B9': '\xb9', '%ba': '\xba',
'%Ba': '\xba', '%bA': '\xba', '%BA': '\xba', '%bb': '\xbb', '%Bb': '\xbb',
'%bB': '\xbb', '%BB': '\xbb', '%bc': '\xbc', '%Bc': '\xbc', '%bC': '\xbc',
'%BC': '\xbc', '%bd': '\xbd', '%Bd': '\xbd', '%bD': '\xbd', '%BD': '\xbd',
'%be': '\xbe', '%Be': '\xbe', '%bE': '\xbe', '%BE': '\xbe', '%bf': '\xbf',
'%Bf': '\xbf', '%bF': '\xbf', '%BF': '\xbf', '%c0': '\xc0', '%C0': '\xc0',
'%c1': '\xc1', '%C1': '\xc1', '%c2': '\xc2', '%C2': '\xc2', '%c3': '\xc3',
'%C3': '\xc3', '%c4': '\xc4', '%C4': '\xc4', '%c5': '\xc5', '%C5': '\xc5',
'%c6': '\xc6', '%C6': '\xc6', '%c7': '\xc7', '%C7': '\xc7', '%c8': '\xc8',
'%C8': '\xc8', '%c9': '\xc9', '%C9': '\xc9', '%ca': '\xca', '%Ca': '\xca',
'%cA': '\xca', '%CA': '\xca', '%cb': '\xcb', '%Cb': '\xcb', '%cB': '\xcb',
'%CB': '\xcb', '%cc': '\xcc', '%Cc': '\xcc', '%cC': '\xcc', '%CC': '\xcc',
'%cd': '\xcd', '%Cd': '\xcd', '%cD': '\xcd', '%CD': '\xcd', '%ce': '\xce',
'%Ce': '\xce', '%cE': '\xce', '%CE': '\xce', '%cf': '\xcf', '%Cf': '\xcf',
'%cF': '\xcf', '%CF': '\xcf', '%d0': '\xd0', '%D0': '\xd0', '%d1': '\xd1',
'%D1': '\xd1', '%d2': '\xd2', '%D2': '\xd2', '%d3': '\xd3', '%D3': '\xd3',
'%d4': '\xd4', '%D4': '\xd4', '%d5': '\xd5', '%D5': '\xd5', '%d6': '\xd6',
'%D6': '\xd6', '%d7': '\xd7', '%D7': '\xd7', '%d8': '\xd8', '%D8': '\xd8',
'%d9': '\xd9', '%D9': '\xd9', '%da': '\xda', '%Da': '\xda', '%dA': '\xda',
'%DA': '\xda', '%db': '\xdb', '%Db': '\xdb', '%dB': '\xdb', '%DB': '\xdb',
'%dc': '\xdc', '%Dc': '\xdc', '%dC': '\xdc', '%DC': '\xdc', '%dd': '\xdd',
'%Dd': '\xdd', '%dD': '\xdd', '%DD': '\xdd', '%de': '\xde', '%De': '\xde',
'%dE': '\xde', '%DE': '\xde', '%df': '\xdf', '%Df': '\xdf', '%dF': '\xdf',
'%DF': '\xdf', '%e0': '\xe0', '%E0': '\xe0', '%e1': '\xe1', '%E1': '\xe1',
'%e2': '\xe2', '%E2': '\xe2', '%e3': '\xe3', '%E3': '\xe3', '%e4': '\xe4',
'%E4': '\xe4', '%e5': '\xe5', '%E5': '\xe5', '%e6': '\xe6', '%E6': '\xe6',
'%e7': '\xe7', '%E7': '\xe7', '%e8': '\xe8', '%E8': '\xe8', '%e9': '\xe9',
'%E9': '\xe9', '%ea': '\xea', '%Ea': '\xea', '%eA': '\xea', '%EA': '\xea',
'%eb': '\xeb', '%Eb': '\xeb', '%eB': '\xeb', '%EB': '\xeb', '%ec': '\xec',
'%Ec': '\xec', '%eC': '\xec', '%EC': '\xec', '%ed': '\xed', '%Ed': '\xed',
'%eD': '\xed', '%ED': '\xed', '%ee': '\xee', '%Ee': '\xee', '%eE': '\xee',
'%EE': '\xee', '%ef': '\xef', '%Ef': '\xef', '%eF': '\xef', '%EF': '\xef',
'%f0': '\xf0', '%F0': '\xf0', '%f1': '\xf1', '%F1': '\xf1', '%f2': '\xf2',
'%F2': '\xf2', '%f3': '\xf3', '%F3': '\xf3', '%f4': '\xf4', '%F4': '\xf4',
'%f5': '\xf5', '%F5': '\xf5', '%f6': '\xf6', '%F6': '\xf6', '%f7': '\xf7',
'%F7': '\xf7', '%f8': '\xf8', '%F8': '\xf8', '%f9': '\xf9', '%F9': '\xf9',
'%fa': '\xfa', '%Fa': '\xfa', '%fA': '\xfa', '%FA': '\xfa', '%fb': '\xfb',
'%Fb': '\xfb', '%fB': '\xfb', '%FB': '\xfb', '%fc': '\xfc', '%Fc': '\xfc',
'%fC': '\xfc', '%FC': '\xfc', '%fd': '\xfd', '%Fd': '\xfd', '%fD': '\xfd',
'%FD': '\xfd', '%fe': '\xfe', '%Fe': '\xfe', '%fE': '\xfe', '%FE': '\xfe',
'%ff': '\xff', '%Ff': '\xff', '%fF': '\xff', '%FF': '\xff'
}
function encodedReplacer (match) {
return EncodedLookup[match]
}
const STATE_KEY = 0
const STATE_VALUE = 1
const STATE_CHARSET = 2
const STATE_LANG = 3
function parseParams (str) {
const res = []
let state = STATE_KEY
let charset = ''
let inquote = false
let escaping = false
let p = 0
let tmp = ''
const len = str.length
for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
const char = str[i]
if (char === '\\' && inquote) {
if (escaping) { escaping = false } else {
escaping = true
continue
}
} else if (char === '"') {
if (!escaping) {
if (inquote) {
inquote = false
state = STATE_KEY
} else { inquote = true }
continue
} else { escaping = false }
} else {
if (escaping && inquote) { tmp += '\\' }
escaping = false
if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") {
if (state === STATE_CHARSET) {
state = STATE_LANG
charset = tmp.substring(1)
} else { state = STATE_VALUE }
tmp = ''
continue
} else if (state === STATE_KEY &&
(char === '*' || char === '=') &&
res.length) {
state = char === '*'
? STATE_CHARSET
: STATE_VALUE
res[p] = [tmp, undefined]
tmp = ''
continue
} else if (!inquote && char === ';') {
state = STATE_KEY
if (charset) {
if (tmp.length) {
tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
'binary',
charset)
}
charset = ''
} else if (tmp.length) {
tmp = decodeText(tmp, 'binary', 'utf8')
}
if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp }
tmp = ''
++p
continue
} else if (!inquote && (char === ' ' || char === '\t')) { continue }
}
tmp += char
}
if (charset && tmp.length) {
tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
'binary',
charset)
} else if (tmp) {
tmp = decodeText(tmp, 'binary', 'utf8')
}
if (res[p] === undefined) {
if (tmp) { res[p] = tmp }
} else { res[p][1] = tmp }
return res
}
module.exports = parseParams

View File

@ -1,86 +0,0 @@
{
"name": "@fastify/busboy",
"version": "2.1.1",
"private": false,
"author": "Brian White <mscdex@mscdex.net>",
"contributors": [
{
"name": "Igor Savin",
"email": "kibertoad@gmail.com",
"url": "https://github.com/kibertoad"
},
{
"name": "Aras Abbasi",
"email": "aras.abbasi@gmail.com",
"url": "https://github.com/uzlopak"
}
],
"description": "A streaming parser for HTML form data for node.js",
"main": "lib/main",
"type": "commonjs",
"types": "lib/main.d.ts",
"scripts": {
"bench:busboy": "cd benchmarks && npm install && npm run benchmark-fastify",
"bench:dicer": "node bench/dicer/dicer-bench-multipart-parser.js",
"coveralls": "nyc report --reporter=lcov",
"lint": "npm run lint:standard",
"lint:everything": "npm run lint && npm run test:types",
"lint:fix": "standard --fix",
"lint:standard": "standard --verbose | snazzy",
"test:mocha": "tap",
"test:types": "tsd",
"test:coverage": "nyc npm run test",
"test": "npm run test:mocha"
},
"engines": {
"node": ">=14"
},
"devDependencies": {
"@types/node": "^20.1.0",
"busboy": "^1.0.0",
"photofinish": "^1.8.0",
"snazzy": "^9.0.0",
"standard": "^17.0.0",
"tap": "^16.3.8",
"tinybench": "^2.5.1",
"tsd": "^0.30.0",
"typescript": "^5.0.2"
},
"keywords": [
"uploads",
"forms",
"multipart",
"form-data"
],
"license": "MIT",
"repository": {
"type": "git",
"url": "git+https://github.com/fastify/busboy.git"
},
"tsd": {
"directory": "test/types",
"compilerOptions": {
"esModuleInterop": false,
"module": "commonjs",
"target": "ES2017"
}
},
"standard": {
"globals": [
"describe",
"it"
],
"ignore": [
"bench"
]
},
"files": [
"README.md",
"LICENSE",
"lib/*",
"deps/encoding/*",
"deps/dicer/lib",
"deps/streamsearch/",
"deps/dicer/LICENSE"
]
}

20
node_modules/anymatch/index.d.ts generated vendored
View File

@ -1,20 +0,0 @@
type AnymatchFn = (testString: string) => boolean;
type AnymatchPattern = string|RegExp|AnymatchFn;
type AnymatchMatcher = AnymatchPattern|AnymatchPattern[]
type AnymatchTester = {
(testString: string|any[], returnIndex: true): number;
(testString: string|any[]): boolean;
}
type PicomatchOptions = {dot: boolean};
declare const anymatch: {
(matchers: AnymatchMatcher): AnymatchTester;
(matchers: AnymatchMatcher, testString: null, returnIndex: true | PicomatchOptions): AnymatchTester;
(matchers: AnymatchMatcher, testString: string|any[], returnIndex: true | PicomatchOptions): number;
(matchers: AnymatchMatcher, testString: string|any[]): boolean;
}
export {AnymatchMatcher as Matcher}
export {AnymatchTester as Tester}
export default anymatch

104
node_modules/anymatch/index.js generated vendored
View File

@ -1,104 +0,0 @@
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const picomatch = require('picomatch');
const normalizePath = require('normalize-path');
/**
* @typedef {(testString: string) => boolean} AnymatchFn
* @typedef {string|RegExp|AnymatchFn} AnymatchPattern
* @typedef {AnymatchPattern|AnymatchPattern[]} AnymatchMatcher
*/
const BANG = '!';
const DEFAULT_OPTIONS = {returnIndex: false};
const arrify = (item) => Array.isArray(item) ? item : [item];
/**
* @param {AnymatchPattern} matcher
* @param {object} options
* @returns {AnymatchFn}
*/
const createPattern = (matcher, options) => {
if (typeof matcher === 'function') {
return matcher;
}
if (typeof matcher === 'string') {
const glob = picomatch(matcher, options);
return (string) => matcher === string || glob(string);
}
if (matcher instanceof RegExp) {
return (string) => matcher.test(string);
}
return (string) => false;
};
/**
* @param {Array<Function>} patterns
* @param {Array<Function>} negPatterns
* @param {String|Array} args
* @param {Boolean} returnIndex
* @returns {boolean|number}
*/
const matchPatterns = (patterns, negPatterns, args, returnIndex) => {
const isList = Array.isArray(args);
const _path = isList ? args[0] : args;
if (!isList && typeof _path !== 'string') {
throw new TypeError('anymatch: second argument must be a string: got ' +
Object.prototype.toString.call(_path))
}
const path = normalizePath(_path, false);
for (let index = 0; index < negPatterns.length; index++) {
const nglob = negPatterns[index];
if (nglob(path)) {
return returnIndex ? -1 : false;
}
}
const applied = isList && [path].concat(args.slice(1));
for (let index = 0; index < patterns.length; index++) {
const pattern = patterns[index];
if (isList ? pattern(...applied) : pattern(path)) {
return returnIndex ? index : true;
}
}
return returnIndex ? -1 : false;
};
/**
* @param {AnymatchMatcher} matchers
* @param {Array|string} testString
* @param {object} options
* @returns {boolean|number|Function}
*/
const anymatch = (matchers, testString, options = DEFAULT_OPTIONS) => {
if (matchers == null) {
throw new TypeError('anymatch: specify first argument');
}
const opts = typeof options === 'boolean' ? {returnIndex: options} : options;
const returnIndex = opts.returnIndex || false;
// Early cache for matchers.
const mtchers = arrify(matchers);
const negatedGlobs = mtchers
.filter(item => typeof item === 'string' && item.charAt(0) === BANG)
.map(item => item.slice(1))
.map(item => picomatch(item, opts));
const patterns = mtchers
.filter(item => typeof item !== 'string' || (typeof item === 'string' && item.charAt(0) !== BANG))
.map(matcher => createPattern(matcher, opts));
if (testString == null) {
return (testString, ri = false) => {
const returnIndex = typeof ri === 'boolean' ? ri : false;
return matchPatterns(patterns, negatedGlobs, testString, returnIndex);
}
}
return matchPatterns(patterns, negatedGlobs, testString, returnIndex);
};
anymatch.default = anymatch;
module.exports = anymatch;

48
node_modules/anymatch/package.json generated vendored
View File

@ -1,48 +0,0 @@
{
"name": "anymatch",
"version": "3.1.3",
"description": "Matches strings against configurable strings, globs, regular expressions, and/or functions",
"files": [
"index.js",
"index.d.ts"
],
"dependencies": {
"normalize-path": "^3.0.0",
"picomatch": "^2.0.4"
},
"author": {
"name": "Elan Shanker",
"url": "https://github.com/es128"
},
"license": "ISC",
"homepage": "https://github.com/micromatch/anymatch",
"repository": {
"type": "git",
"url": "https://github.com/micromatch/anymatch"
},
"keywords": [
"match",
"any",
"string",
"file",
"fs",
"list",
"glob",
"regex",
"regexp",
"regular",
"expression",
"function"
],
"scripts": {
"test": "nyc mocha",
"mocha": "mocha"
},
"devDependencies": {
"mocha": "^6.1.3",
"nyc": "^14.0.0"
},
"engines": {
"node": ">= 8"
}
}

View File

@ -1,263 +0,0 @@
[
"3dm",
"3ds",
"3g2",
"3gp",
"7z",
"a",
"aac",
"adp",
"afdesign",
"afphoto",
"afpub",
"ai",
"aif",
"aiff",
"alz",
"ape",
"apk",
"appimage",
"ar",
"arj",
"asf",
"au",
"avi",
"bak",
"baml",
"bh",
"bin",
"bk",
"bmp",
"btif",
"bz2",
"bzip2",
"cab",
"caf",
"cgm",
"class",
"cmx",
"cpio",
"cr2",
"cur",
"dat",
"dcm",
"deb",
"dex",
"djvu",
"dll",
"dmg",
"dng",
"doc",
"docm",
"docx",
"dot",
"dotm",
"dra",
"DS_Store",
"dsk",
"dts",
"dtshd",
"dvb",
"dwg",
"dxf",
"ecelp4800",
"ecelp7470",
"ecelp9600",
"egg",
"eol",
"eot",
"epub",
"exe",
"f4v",
"fbs",
"fh",
"fla",
"flac",
"flatpak",
"fli",
"flv",
"fpx",
"fst",
"fvt",
"g3",
"gh",
"gif",
"graffle",
"gz",
"gzip",
"h261",
"h263",
"h264",
"icns",
"ico",
"ief",
"img",
"ipa",
"iso",
"jar",
"jpeg",
"jpg",
"jpgv",
"jpm",
"jxr",
"key",
"ktx",
"lha",
"lib",
"lvp",
"lz",
"lzh",
"lzma",
"lzo",
"m3u",
"m4a",
"m4v",
"mar",
"mdi",
"mht",
"mid",
"midi",
"mj2",
"mka",
"mkv",
"mmr",
"mng",
"mobi",
"mov",
"movie",
"mp3",
"mp4",
"mp4a",
"mpeg",
"mpg",
"mpga",
"mxu",
"nef",
"npx",
"numbers",
"nupkg",
"o",
"odp",
"ods",
"odt",
"oga",
"ogg",
"ogv",
"otf",
"ott",
"pages",
"pbm",
"pcx",
"pdb",
"pdf",
"pea",
"pgm",
"pic",
"png",
"pnm",
"pot",
"potm",
"potx",
"ppa",
"ppam",
"ppm",
"pps",
"ppsm",
"ppsx",
"ppt",
"pptm",
"pptx",
"psd",
"pya",
"pyc",
"pyo",
"pyv",
"qt",
"rar",
"ras",
"raw",
"resources",
"rgb",
"rip",
"rlc",
"rmf",
"rmvb",
"rpm",
"rtf",
"rz",
"s3m",
"s7z",
"scpt",
"sgi",
"shar",
"snap",
"sil",
"sketch",
"slk",
"smv",
"snk",
"so",
"stl",
"suo",
"sub",
"swf",
"tar",
"tbz",
"tbz2",
"tga",
"tgz",
"thmx",
"tif",
"tiff",
"tlz",
"ttc",
"ttf",
"txz",
"udf",
"uvh",
"uvi",
"uvm",
"uvp",
"uvs",
"uvu",
"viv",
"vob",
"war",
"wav",
"wax",
"wbmp",
"wdp",
"weba",
"webm",
"webp",
"whl",
"wim",
"wm",
"wma",
"wmv",
"wmx",
"woff",
"woff2",
"wrm",
"wvx",
"xbm",
"xif",
"xla",
"xlam",
"xls",
"xlsb",
"xlsm",
"xlsx",
"xlt",
"xltm",
"xltx",
"xm",
"xmind",
"xpi",
"xpm",
"xwd",
"xz",
"z",
"zip",
"zipx"
]

View File

@ -1,3 +0,0 @@
declare const binaryExtensionsJson: readonly string[];
export = binaryExtensionsJson;

View File

@ -1,14 +0,0 @@
/**
List of binary file extensions.
@example
```
import binaryExtensions = require('binary-extensions');
console.log(binaryExtensions);
//=> ['3ds', '3g2', …]
```
*/
declare const binaryExtensions: readonly string[];
export = binaryExtensions;

View File

@ -1 +0,0 @@
module.exports = require('./binary-extensions.json');

View File

@ -1,40 +0,0 @@
{
"name": "binary-extensions",
"version": "2.3.0",
"description": "List of binary file extensions",
"license": "MIT",
"repository": "sindresorhus/binary-extensions",
"funding": "https://github.com/sponsors/sindresorhus",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"sideEffects": false,
"engines": {
"node": ">=8"
},
"scripts": {
"test": "xo && ava && tsd"
},
"files": [
"index.js",
"index.d.ts",
"binary-extensions.json",
"binary-extensions.json.d.ts"
],
"keywords": [
"binary",
"extensions",
"extension",
"file",
"json",
"list",
"array"
],
"devDependencies": {
"ava": "^1.4.1",
"tsd": "^0.7.2",
"xo": "^0.24.0"
}
}

90
node_modules/body-parser/index.js generated vendored
View File

@ -6,20 +6,6 @@
'use strict'
/**
* Module dependencies.
* @private
*/
var deprecate = require('depd')('body-parser')
/**
* Cache of loaded parsers.
* @private
*/
var parsers = Object.create(null)
/**
* @typedef Parsers
* @type {function}
@ -34,8 +20,7 @@ var parsers = Object.create(null)
* @type {Parsers}
*/
exports = module.exports = deprecate.function(bodyParser,
'bodyParser: use individual json/urlencoded middlewares')
exports = module.exports = bodyParser
/**
* JSON parser.
@ -45,7 +30,7 @@ exports = module.exports = deprecate.function(bodyParser,
Object.defineProperty(exports, 'json', {
configurable: true,
enumerable: true,
get: createParserGetter('json')
get: () => require('./lib/types/json')
})
/**
@ -56,7 +41,7 @@ Object.defineProperty(exports, 'json', {
Object.defineProperty(exports, 'raw', {
configurable: true,
enumerable: true,
get: createParserGetter('raw')
get: () => require('./lib/types/raw')
})
/**
@ -67,7 +52,7 @@ Object.defineProperty(exports, 'raw', {
Object.defineProperty(exports, 'text', {
configurable: true,
enumerable: true,
get: createParserGetter('text')
get: () => require('./lib/types/text')
})
/**
@ -78,7 +63,7 @@ Object.defineProperty(exports, 'text', {
Object.defineProperty(exports, 'urlencoded', {
configurable: true,
enumerable: true,
get: createParserGetter('urlencoded')
get: () => require('./lib/types/urlencoded')
})
/**
@ -90,67 +75,6 @@ Object.defineProperty(exports, 'urlencoded', {
* @public
*/
function bodyParser (options) {
// use default type for parsers
var opts = Object.create(options || null, {
type: {
configurable: true,
enumerable: true,
value: undefined,
writable: true
}
})
var _urlencoded = exports.urlencoded(opts)
var _json = exports.json(opts)
return function bodyParser (req, res, next) {
_json(req, res, function (err) {
if (err) return next(err)
_urlencoded(req, res, next)
})
}
}
/**
* Create a getter for loading a parser.
* @private
*/
function createParserGetter (name) {
return function get () {
return loadParser(name)
}
}
/**
* Load a parser module.
* @private
*/
function loadParser (parserName) {
var parser = parsers[parserName]
if (parser !== undefined) {
return parser
}
// this uses a switch for static require analysis
switch (parserName) {
case 'json':
parser = require('./lib/types/json')
break
case 'raw':
parser = require('./lib/types/raw')
break
case 'text':
parser = require('./lib/types/text')
break
case 'urlencoded':
parser = require('./lib/types/urlencoded')
break
}
// store to prevent invoking require()
return (parsers[parserName] = parser)
function bodyParser () {
throw new Error('The bodyParser() generic has been split into individual middleware to use instead.')
}

49
node_modules/body-parser/lib/read.js generated vendored
View File

@ -12,12 +12,10 @@
*/
var createError = require('http-errors')
var destroy = require('destroy')
var getBody = require('raw-body')
var iconv = require('iconv-lite')
var onFinished = require('on-finished')
var unpipe = require('unpipe')
var zlib = require('zlib')
var zlib = require('node:zlib')
/**
* Module exports.
@ -42,9 +40,6 @@ function read (req, res, next, parse, debug, options) {
var opts = options
var stream
// flag as parsed
req._body = true
// read options
var encoding = opts.encoding !== null
? opts.encoding
@ -93,8 +88,8 @@ function read (req, res, next, parse, debug, options) {
// unpipe from stream and destroy
if (stream !== req) {
unpipe(req)
destroy(stream, true)
req.unpipe()
stream.destroy()
}
// read off entire request
@ -125,7 +120,7 @@ function read (req, res, next, parse, debug, options) {
str = typeof body !== 'string' && encoding !== null
? iconv.decode(body, encoding)
: body
req.body = parse(str)
req.body = parse(str, encoding)
} catch (err) {
next(createError(400, err, {
body: str,
@ -151,7 +146,6 @@ function read (req, res, next, parse, debug, options) {
function contentstream (req, debug, inflate) {
var encoding = (req.headers['content-encoding'] || 'identity').toLowerCase()
var length = req.headers['content-length']
var stream
debug('content-encoding "%s"', encoding)
@ -162,29 +156,40 @@ function contentstream (req, debug, inflate) {
})
}
if (encoding === 'identity') {
req.length = length
return req
}
var stream = createDecompressionStream(encoding, debug)
req.pipe(stream)
return stream
}
/**
* Create a decompression stream for the given encoding.
* @param {string} encoding
* @param {function} debug
* @return {object}
* @api private
*/
function createDecompressionStream (encoding, debug) {
switch (encoding) {
case 'deflate':
stream = zlib.createInflate()
debug('inflate body')
req.pipe(stream)
break
return zlib.createInflate()
case 'gzip':
stream = zlib.createGunzip()
debug('gunzip body')
req.pipe(stream)
break
case 'identity':
stream = req
stream.length = length
break
return zlib.createGunzip()
case 'br':
debug('brotli decompress body')
return zlib.createBrotliDecompress()
default:
throw createError(415, 'unsupported content encoding "' + encoding + '"', {
encoding: encoding,
type: 'encoding.unsupported'
})
}
return stream
}
/**

View File

@ -12,12 +12,12 @@
* @private
*/
var bytes = require('bytes')
var contentType = require('content-type')
var createError = require('http-errors')
var debug = require('debug')('body-parser:json')
var isFinished = require('on-finished').isFinished
var read = require('../read')
var typeis = require('type-is')
var { getCharset, normalizeOptions } = require('../utils')
/**
* Module exports.
@ -51,25 +51,10 @@ var JSON_SYNTAX_REGEXP = /#+/g
*/
function json (options) {
var opts = options || {}
var { inflate, limit, verify, shouldParse } = normalizeOptions(options, 'application/json')
var limit = typeof opts.limit !== 'number'
? bytes.parse(opts.limit || '100kb')
: opts.limit
var inflate = opts.inflate !== false
var reviver = opts.reviver
var strict = opts.strict !== false
var type = opts.type || 'application/json'
var verify = opts.verify || false
if (verify !== false && typeof verify !== 'function') {
throw new TypeError('option verify must be function')
}
// create the appropriate type checking function
var shouldParse = typeof type !== 'function'
? typeChecker(type)
: type
var reviver = options?.reviver
var strict = options?.strict !== false
function parse (body) {
if (body.length === 0) {
@ -99,13 +84,15 @@ function json (options) {
}
return function jsonParser (req, res, next) {
if (req._body) {
if (isFinished(req)) {
debug('body already parsed')
next()
return
}
req.body = req.body || {}
if (!('body' in req)) {
req.body = undefined
}
// skip requests without bodies
if (!typeis.hasBody(req)) {
@ -137,9 +124,9 @@ function json (options) {
// read
read(req, res, next, parse, debug, {
encoding: charset,
inflate: inflate,
limit: limit,
verify: verify
inflate,
limit,
verify
})
}
}
@ -193,21 +180,6 @@ function firstchar (str) {
: undefined
}
/**
* Get the charset of a request.
*
* @param {object} req
* @api private
*/
function getCharset (req) {
try {
return (contentType.parse(req).parameters.charset || '').toLowerCase()
} catch (e) {
return undefined
}
}
/**
* Normalize a SyntaxError for JSON.parse.
*
@ -232,16 +204,3 @@ function normalizeJsonSyntaxError (error, obj) {
return error
}
/**
* Get the simple type checker.
*
* @param {string} type
* @return {function}
*/
function typeChecker (type) {
return function checkType (req) {
return Boolean(typeis(req, type))
}
}

View File

@ -10,10 +10,11 @@
* Module dependencies.
*/
var bytes = require('bytes')
var debug = require('debug')('body-parser:raw')
var isFinished = require('on-finished').isFinished
var read = require('../read')
var typeis = require('type-is')
var { normalizeOptions } = require('../utils')
/**
* Module exports.
@ -30,36 +31,22 @@ module.exports = raw
*/
function raw (options) {
var opts = options || {}
var inflate = opts.inflate !== false
var limit = typeof opts.limit !== 'number'
? bytes.parse(opts.limit || '100kb')
: opts.limit
var type = opts.type || 'application/octet-stream'
var verify = opts.verify || false
if (verify !== false && typeof verify !== 'function') {
throw new TypeError('option verify must be function')
}
// create the appropriate type checking function
var shouldParse = typeof type !== 'function'
? typeChecker(type)
: type
var { inflate, limit, verify, shouldParse } = normalizeOptions(options, 'application/octet-stream')
function parse (buf) {
return buf
}
return function rawParser (req, res, next) {
if (req._body) {
if (isFinished(req)) {
debug('body already parsed')
next()
return
}
req.body = req.body || {}
if (!('body' in req)) {
req.body = undefined
}
// skip requests without bodies
if (!typeis.hasBody(req)) {
@ -80,22 +67,9 @@ function raw (options) {
// read
read(req, res, next, parse, debug, {
encoding: null,
inflate: inflate,
limit: limit,
verify: verify
inflate,
limit,
verify
})
}
}
/**
* Get the simple type checker.
*
* @param {string} type
* @return {function}
*/
function typeChecker (type) {
return function checkType (req) {
return Boolean(typeis(req, type))
}
}

View File

@ -10,11 +10,11 @@
* Module dependencies.
*/
var bytes = require('bytes')
var contentType = require('content-type')
var debug = require('debug')('body-parser:text')
var isFinished = require('on-finished').isFinished
var read = require('../read')
var typeis = require('type-is')
var { getCharset, normalizeOptions } = require('../utils')
/**
* Module exports.
@ -31,37 +31,24 @@ module.exports = text
*/
function text (options) {
var opts = options || {}
var { inflate, limit, verify, shouldParse } = normalizeOptions(options, 'text/plain')
var defaultCharset = opts.defaultCharset || 'utf-8'
var inflate = opts.inflate !== false
var limit = typeof opts.limit !== 'number'
? bytes.parse(opts.limit || '100kb')
: opts.limit
var type = opts.type || 'text/plain'
var verify = opts.verify || false
if (verify !== false && typeof verify !== 'function') {
throw new TypeError('option verify must be function')
}
// create the appropriate type checking function
var shouldParse = typeof type !== 'function'
? typeChecker(type)
: type
var defaultCharset = options?.defaultCharset || 'utf-8'
function parse (buf) {
return buf
}
return function textParser (req, res, next) {
if (req._body) {
if (isFinished(req)) {
debug('body already parsed')
next()
return
}
req.body = req.body || {}
if (!('body' in req)) {
req.body = undefined
}
// skip requests without bodies
if (!typeis.hasBody(req)) {
@ -85,37 +72,9 @@ function text (options) {
// read
read(req, res, next, parse, debug, {
encoding: charset,
inflate: inflate,
limit: limit,
verify: verify
inflate,
limit,
verify
})
}
}
/**
* Get the charset of a request.
*
* @param {object} req
* @api private
*/
function getCharset (req) {
try {
return (contentType.parse(req).parameters.charset || '').toLowerCase()
} catch (e) {
return undefined
}
}
/**
* Get the simple type checker.
*
* @param {string} type
* @return {function}
*/
function typeChecker (type) {
return function checkType (req) {
return Boolean(typeis(req, type))
}
}

View File

@ -12,13 +12,13 @@
* @private
*/
var bytes = require('bytes')
var contentType = require('content-type')
var createError = require('http-errors')
var debug = require('debug')('body-parser:urlencoded')
var deprecate = require('depd')('body-parser')
var isFinished = require('on-finished').isFinished
var read = require('../read')
var typeis = require('type-is')
var qs = require('qs')
var { getCharset, normalizeOptions } = require('../utils')
/**
* Module exports.
@ -26,12 +26,6 @@ var typeis = require('type-is')
module.exports = urlencoded
/**
* Cache of parser modules.
*/
var parsers = Object.create(null)
/**
* Create a middleware to parse urlencoded bodies.
*
@ -41,52 +35,32 @@ var parsers = Object.create(null)
*/
function urlencoded (options) {
var opts = options || {}
var { inflate, limit, verify, shouldParse } = normalizeOptions(options, 'application/x-www-form-urlencoded')
// notice because option default will flip in next major
if (opts.extended === undefined) {
deprecate('undefined extended: provide extended option')
}
var extended = opts.extended !== false
var inflate = opts.inflate !== false
var limit = typeof opts.limit !== 'number'
? bytes.parse(opts.limit || '100kb')
: opts.limit
var type = opts.type || 'application/x-www-form-urlencoded'
var verify = opts.verify || false
var depth = typeof opts.depth !== 'number'
? Number(opts.depth || 32)
: opts.depth
if (verify !== false && typeof verify !== 'function') {
throw new TypeError('option verify must be function')
var defaultCharset = options?.defaultCharset || 'utf-8'
if (defaultCharset !== 'utf-8' && defaultCharset !== 'iso-8859-1') {
throw new TypeError('option defaultCharset must be either utf-8 or iso-8859-1')
}
// create the appropriate query parser
var queryparse = extended
? extendedparser(opts)
: simpleparser(opts)
var queryparse = createQueryParser(options)
// create the appropriate type checking function
var shouldParse = typeof type !== 'function'
? typeChecker(type)
: type
function parse (body) {
function parse (body, encoding) {
return body.length
? queryparse(body)
? queryparse(body, encoding)
: {}
}
return function urlencodedParser (req, res, next) {
if (req._body) {
if (isFinished(req)) {
debug('body already parsed')
next()
return
}
req.body = req.body || {}
if (!('body' in req)) {
req.body = undefined
}
// skip requests without bodies
if (!typeis.hasBody(req)) {
@ -105,8 +79,8 @@ function urlencoded (options) {
}
// assert charset
var charset = getCharset(req) || 'utf-8'
if (charset !== 'utf-8') {
var charset = getCharset(req) || defaultCharset
if (charset !== 'utf-8' && charset !== 'iso-8859-1') {
debug('invalid charset')
next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', {
charset: charset,
@ -117,12 +91,10 @@ function urlencoded (options) {
// read
read(req, res, next, parse, debug, {
debug: debug,
encoding: charset,
inflate: inflate,
limit: limit,
verify: verify,
depth: depth
inflate,
limit,
verify
})
}
}
@ -133,15 +105,14 @@ function urlencoded (options) {
* @param {object} options
*/
function extendedparser (options) {
var parameterLimit = options.parameterLimit !== undefined
? options.parameterLimit
function createQueryParser (options) {
var extended = Boolean(options?.extended)
var parameterLimit = options?.parameterLimit !== undefined
? options?.parameterLimit
: 1000
var depth = typeof options.depth !== 'number'
? Number(options.depth || 32)
: options.depth
var parse = parser('qs')
var charsetSentinel = options?.charsetSentinel
var interpretNumericEntities = options?.interpretNumericEntities
var depth = extended ? (options?.depth !== undefined ? options?.depth : 32) : 0
if (isNaN(parameterLimit) || parameterLimit < 1) {
throw new TypeError('option parameterLimit must be a positive number')
@ -155,7 +126,7 @@ function extendedparser (options) {
parameterLimit = parameterLimit | 0
}
return function queryparse (body) {
return function queryparse (body, encoding) {
var paramCount = parameterCount(body, parameterLimit)
if (paramCount === undefined) {
@ -165,16 +136,19 @@ function extendedparser (options) {
})
}
var arrayLimit = Math.max(100, paramCount)
var arrayLimit = extended ? Math.max(100, paramCount) : 0
debug('parse extended urlencoding')
debug('parse ' + (extended ? 'extended ' : '') + 'urlencoding')
try {
return parse(body, {
return qs.parse(body, {
allowPrototypes: true,
arrayLimit: arrayLimit,
depth: depth,
strictDepth: true,
parameterLimit: parameterLimit
charsetSentinel: charsetSentinel,
interpretNumericEntities: interpretNumericEntities,
charset: encoding,
parameterLimit: parameterLimit,
strictDepth: true
})
} catch (err) {
if (err instanceof RangeError) {
@ -188,21 +162,6 @@ function extendedparser (options) {
}
}
/**
* Get the charset of a request.
*
* @param {object} req
* @api private
*/
function getCharset (req) {
try {
return (contentType.parse(req).parameters.charset || '').toLowerCase()
} catch (e) {
return undefined
}
}
/**
* Count the number of parameters, stopping once limit reached
*
@ -212,96 +171,7 @@ function getCharset (req) {
*/
function parameterCount (body, limit) {
var count = 0
var index = 0
var len = body.split('&').length
while ((index = body.indexOf('&', index)) !== -1) {
count++
index++
if (count === limit) {
return undefined
}
}
return count
}
/**
* Get parser for module name dynamically.
*
* @param {string} name
* @return {function}
* @api private
*/
function parser (name) {
var mod = parsers[name]
if (mod !== undefined) {
return mod.parse
}
// this uses a switch for static require analysis
switch (name) {
case 'qs':
mod = require('qs')
break
case 'querystring':
mod = require('querystring')
break
}
// store to prevent invoking require()
parsers[name] = mod
return mod.parse
}
/**
* Get the simple query parser.
*
* @param {object} options
*/
function simpleparser (options) {
var parameterLimit = options.parameterLimit !== undefined
? options.parameterLimit
: 1000
var parse = parser('querystring')
if (isNaN(parameterLimit) || parameterLimit < 1) {
throw new TypeError('option parameterLimit must be a positive number')
}
if (isFinite(parameterLimit)) {
parameterLimit = parameterLimit | 0
}
return function queryparse (body) {
var paramCount = parameterCount(body, parameterLimit)
if (paramCount === undefined) {
debug('too many parameters')
throw createError(413, 'too many parameters', {
type: 'parameters.too.many'
})
}
debug('parse urlencoding')
return parse(body, undefined, undefined, { maxKeys: parameterLimit })
}
}
/**
* Get the simple type checker.
*
* @param {string} type
* @return {function}
*/
function typeChecker (type) {
return function checkType (req) {
return Boolean(typeis(req, type))
}
return len > limit ? undefined : len - 1
}

83
node_modules/body-parser/lib/utils.js generated vendored Normal file
View File

@ -0,0 +1,83 @@
'use strict'
/**
* Module dependencies.
*/
var bytes = require('bytes')
var contentType = require('content-type')
var typeis = require('type-is')
/**
* Module exports.
*/
module.exports = {
getCharset,
normalizeOptions
}
/**
* Get the charset of a request.
*
* @param {object} req
* @api private
*/
function getCharset (req) {
try {
return (contentType.parse(req).parameters.charset || '').toLowerCase()
} catch {
return undefined
}
}
/**
* Get the simple type checker.
*
* @param {string | string[]} type
* @return {function}
*/
function typeChecker (type) {
return function checkType (req) {
return Boolean(typeis(req, type))
}
}
/**
* Normalizes the common options for all parsers.
*
* @param {object} options options to normalize
* @param {string | string[] | function} defaultType default content type(s) or a function to determine it
* @returns {object}
*/
function normalizeOptions (options, defaultType) {
if (!defaultType) {
// Parsers must define a default content type
throw new TypeError('defaultType must be provided')
}
var inflate = options?.inflate !== false
var limit = typeof options?.limit !== 'number'
? bytes.parse(options?.limit || '100kb')
: options?.limit
var type = options?.type || defaultType
var verify = options?.verify || false
if (verify !== false && typeof verify !== 'function') {
throw new TypeError('option verify must be function')
}
// create the appropriate type checking function
var shouldParse = typeof type !== 'function'
? typeChecker(type)
: type
return {
inflate,
limit,
verify,
shouldParse
}
}

View File

@ -0,0 +1,143 @@
/*!
* media-typer
* Copyright(c) 2014-2017 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* RegExp to match type in RFC 6838
*
* type-name = restricted-name
* subtype-name = restricted-name
* restricted-name = restricted-name-first *126restricted-name-chars
* restricted-name-first = ALPHA / DIGIT
* restricted-name-chars = ALPHA / DIGIT / "!" / "#" /
* "$" / "&" / "-" / "^" / "_"
* restricted-name-chars =/ "." ; Characters before first dot always
* ; specify a facet name
* restricted-name-chars =/ "+" ; Characters after last plus always
* ; specify a structured syntax suffix
* ALPHA = %x41-5A / %x61-7A ; A-Z / a-z
* DIGIT = %x30-39 ; 0-9
*/
var SUBTYPE_NAME_REGEXP = /^[A-Za-z0-9][A-Za-z0-9!#$&^_.-]{0,126}$/
var TYPE_NAME_REGEXP = /^[A-Za-z0-9][A-Za-z0-9!#$&^_-]{0,126}$/
var TYPE_REGEXP = /^ *([A-Za-z0-9][A-Za-z0-9!#$&^_-]{0,126})\/([A-Za-z0-9][A-Za-z0-9!#$&^_.+-]{0,126}) *$/
/**
* Module exports.
*/
exports.format = format
exports.parse = parse
exports.test = test
/**
* Format object to media type.
*
* @param {object} obj
* @return {string}
* @public
*/
function format (obj) {
if (!obj || typeof obj !== 'object') {
throw new TypeError('argument obj is required')
}
var subtype = obj.subtype
var suffix = obj.suffix
var type = obj.type
if (!type || !TYPE_NAME_REGEXP.test(type)) {
throw new TypeError('invalid type')
}
if (!subtype || !SUBTYPE_NAME_REGEXP.test(subtype)) {
throw new TypeError('invalid subtype')
}
// format as type/subtype
var string = type + '/' + subtype
// append +suffix
if (suffix) {
if (!TYPE_NAME_REGEXP.test(suffix)) {
throw new TypeError('invalid suffix')
}
string += '+' + suffix
}
return string
}
/**
* Test media type.
*
* @param {string} string
* @return {object}
* @public
*/
function test (string) {
if (!string) {
throw new TypeError('argument string is required')
}
if (typeof string !== 'string') {
throw new TypeError('argument string is required to be a string')
}
return TYPE_REGEXP.test(string.toLowerCase())
}
/**
* Parse media type to object.
*
* @param {string} string
* @return {object}
* @public
*/
function parse (string) {
if (!string) {
throw new TypeError('argument string is required')
}
if (typeof string !== 'string') {
throw new TypeError('argument string is required to be a string')
}
var match = TYPE_REGEXP.exec(string.toLowerCase())
if (!match) {
throw new TypeError('invalid media type')
}
var type = match[1]
var subtype = match[2]
var suffix
// suffix after last +
var index = subtype.lastIndexOf('+')
if (index !== -1) {
suffix = subtype.substr(index + 1)
subtype = subtype.substr(0, index)
}
return new MediaType(type, subtype, suffix)
}
/**
* Class for MediaType object.
* @public
*/
function MediaType (type, subtype, suffix) {
this.type = type
this.subtype = subtype
this.suffix = suffix
}

View File

@ -0,0 +1,33 @@
{
"name": "media-typer",
"description": "Simple RFC 6838 media type parser and formatter",
"version": "1.1.0",
"author": "Douglas Christopher Wilson <doug@somethingdoug.com>",
"license": "MIT",
"repository": "jshttp/media-typer",
"devDependencies": {
"eslint": "5.16.0",
"eslint-config-standard": "12.0.0",
"eslint-plugin-import": "2.17.2",
"eslint-plugin-markdown": "1.0.0",
"eslint-plugin-node": "8.0.1",
"eslint-plugin-promise": "4.1.1",
"eslint-plugin-standard": "4.0.0",
"mocha": "6.1.4",
"nyc": "14.0.0"
},
"files": [
"LICENSE",
"HISTORY.md",
"index.js"
],
"engines": {
"node": ">= 0.8"
},
"scripts": {
"lint": "eslint --plugin markdown --ext js,md .",
"test": "mocha --reporter spec --check-leaks --bail test/",
"test-cov": "nyc --reporter=html --reporter=text npm test",
"test-travis": "nyc --reporter=text npm test"
}
}

9342
node_modules/body-parser/node_modules/mime-db/db.json generated vendored Normal file

File diff suppressed because it is too large Load Diff

12
node_modules/body-parser/node_modules/mime-db/index.js generated vendored Normal file
View File

@ -0,0 +1,12 @@
/*!
* mime-db
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2015-2022 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module exports.
*/
module.exports = require('./db.json')

View File

@ -0,0 +1,56 @@
{
"name": "mime-db",
"description": "Media Type Database",
"version": "1.54.0",
"contributors": [
"Douglas Christopher Wilson <doug@somethingdoug.com>",
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)",
"Robert Kieffer <robert@broofa.com> (http://github.com/broofa)"
],
"license": "MIT",
"keywords": [
"mime",
"db",
"type",
"types",
"database",
"charset",
"charsets"
],
"repository": "jshttp/mime-db",
"devDependencies": {
"csv-parse": "4.16.3",
"eslint": "8.32.0",
"eslint-config-standard": "15.0.1",
"eslint-plugin-import": "2.27.5",
"eslint-plugin-markdown": "3.0.0",
"eslint-plugin-node": "11.1.0",
"eslint-plugin-promise": "6.1.1",
"eslint-plugin-standard": "4.1.0",
"media-typer": "1.1.0",
"mocha": "10.2.0",
"nyc": "15.1.0",
"stream-to-array": "2.3.0",
"undici": "7.1.0"
},
"files": [
"HISTORY.md",
"LICENSE",
"README.md",
"db.json",
"index.js"
],
"engines": {
"node": ">= 0.6"
},
"scripts": {
"build": "node scripts/build",
"fetch": "node scripts/fetch-apache && node scripts/fetch-iana && node scripts/fetch-nginx",
"lint": "eslint .",
"test": "mocha --reporter spec --check-leaks test/",
"test-ci": "nyc --reporter=lcovonly --reporter=text npm test",
"test-cov": "nyc --reporter=html --reporter=text npm test",
"update": "npm run fetch && npm run build",
"version": "node scripts/version-history.js && git add HISTORY.md"
}
}

View File

@ -0,0 +1,211 @@
/*!
* mime-types
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2015 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
* @private
*/
var db = require('mime-db')
var extname = require('path').extname
var mimeScore = require('./mimeScore')
/**
* Module variables.
* @private
*/
var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/
var TEXT_TYPE_REGEXP = /^text\//i
/**
* Module exports.
* @public
*/
exports.charset = charset
exports.charsets = { lookup: charset }
exports.contentType = contentType
exports.extension = extension
exports.extensions = Object.create(null)
exports.lookup = lookup
exports.types = Object.create(null)
exports._extensionConflicts = []
// Populate the extensions/types maps
populateMaps(exports.extensions, exports.types)
/**
* Get the default charset for a MIME type.
*
* @param {string} type
* @return {boolean|string}
*/
function charset (type) {
if (!type || typeof type !== 'string') {
return false
}
// TODO: use media-typer
var match = EXTRACT_TYPE_REGEXP.exec(type)
var mime = match && db[match[1].toLowerCase()]
if (mime && mime.charset) {
return mime.charset
}
// default text/* to utf-8
if (match && TEXT_TYPE_REGEXP.test(match[1])) {
return 'UTF-8'
}
return false
}
/**
* Create a full Content-Type header given a MIME type or extension.
*
* @param {string} str
* @return {boolean|string}
*/
function contentType (str) {
// TODO: should this even be in this module?
if (!str || typeof str !== 'string') {
return false
}
var mime = str.indexOf('/') === -1 ? exports.lookup(str) : str
if (!mime) {
return false
}
// TODO: use content-type or other module
if (mime.indexOf('charset') === -1) {
var charset = exports.charset(mime)
if (charset) mime += '; charset=' + charset.toLowerCase()
}
return mime
}
/**
* Get the default extension for a MIME type.
*
* @param {string} type
* @return {boolean|string}
*/
function extension (type) {
if (!type || typeof type !== 'string') {
return false
}
// TODO: use media-typer
var match = EXTRACT_TYPE_REGEXP.exec(type)
// get extensions
var exts = match && exports.extensions[match[1].toLowerCase()]
if (!exts || !exts.length) {
return false
}
return exts[0]
}
/**
* Lookup the MIME type for a file path/extension.
*
* @param {string} path
* @return {boolean|string}
*/
function lookup (path) {
if (!path || typeof path !== 'string') {
return false
}
// get the extension ("ext" or ".ext" or full path)
var extension = extname('x.' + path)
.toLowerCase()
.slice(1)
if (!extension) {
return false
}
return exports.types[extension] || false
}
/**
* Populate the extensions and types maps.
* @private
*/
function populateMaps (extensions, types) {
Object.keys(db).forEach(function forEachMimeType (type) {
var mime = db[type]
var exts = mime.extensions
if (!exts || !exts.length) {
return
}
// mime -> extensions
extensions[type] = exts
// extension -> mime
for (var i = 0; i < exts.length; i++) {
var extension = exts[i]
types[extension] = _preferredType(extension, types[extension], type)
// DELETE (eventually): Capture extension->type maps that change as a
// result of switching to mime-score. This is just to help make reviewing
// PR #119 easier, and can be removed once that PR is approved.
const legacyType = _preferredTypeLegacy(
extension,
types[extension],
type
)
if (legacyType !== types[extension]) {
exports._extensionConflicts.push([extension, legacyType, types[extension]])
}
}
})
}
// Resolve type conflict using mime-score
function _preferredType (ext, type0, type1) {
var score0 = type0 ? mimeScore(type0, db[type0].source) : 0
var score1 = type1 ? mimeScore(type1, db[type1].source) : 0
return score0 > score1 ? type0 : type1
}
// Resolve type conflict using pre-mime-score logic
function _preferredTypeLegacy (ext, type0, type1) {
var SOURCE_RANK = ['nginx', 'apache', undefined, 'iana']
var score0 = type0 ? SOURCE_RANK.indexOf(db[type0].source) : 0
var score1 = type1 ? SOURCE_RANK.indexOf(db[type1].source) : 0
if (
exports.types[extension] !== 'application/octet-stream' &&
(score0 > score1 ||
(score0 === score1 &&
exports.types[extension]?.slice(0, 12) === 'application/'))
) {
return type0
}
return score0 > score1 ? type0 : type1
}

View File

@ -0,0 +1,52 @@
// 'mime-score' back-ported to CommonJS
// Score RFC facets (see https://tools.ietf.org/html/rfc6838#section-3)
var FACET_SCORES = {
'prs.': 100,
'x-': 200,
'x.': 300,
'vnd.': 400,
default: 900
}
// Score mime source (Logic originally from `jshttp/mime-types` module)
var SOURCE_SCORES = {
nginx: 10,
apache: 20,
iana: 40,
default: 30 // definitions added by `jshttp/mime-db` project?
}
var TYPE_SCORES = {
// prefer application/xml over text/xml
// prefer application/rtf over text/rtf
application: 1,
// prefer font/woff over application/font-woff
font: 2,
default: 0
}
/**
* Get each component of the score for a mime type. The sum of these is the
* total score. The higher the score, the more "official" the type.
*/
module.exports = function mimeScore (mimeType, source = 'default') {
if (mimeType === 'application/octet-stream') {
return 0
}
const [type, subtype] = mimeType.split('/')
const facet = subtype.replace(/(\.|x-).*/, '$1')
const facetScore = FACET_SCORES[facet] || FACET_SCORES.default
const sourceScore = SOURCE_SCORES[source] || SOURCE_SCORES.default
const typeScore = TYPE_SCORES[type] || TYPE_SCORES.default
// All else being equal prefer shorter types
const lengthScore = 1 - mimeType.length / 100
return facetScore + sourceScore + typeScore + lengthScore
}

View File

@ -0,0 +1,45 @@
{
"name": "mime-types",
"description": "The ultimate javascript content-type utility.",
"version": "3.0.1",
"contributors": [
"Douglas Christopher Wilson <doug@somethingdoug.com>",
"Jeremiah Senkpiel <fishrock123@rocketmail.com> (https://searchbeam.jit.su)",
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)"
],
"license": "MIT",
"keywords": [
"mime",
"types"
],
"repository": "jshttp/mime-types",
"dependencies": {
"mime-db": "^1.54.0"
},
"devDependencies": {
"eslint": "8.33.0",
"eslint-config-standard": "14.1.1",
"eslint-plugin-import": "2.27.5",
"eslint-plugin-markdown": "3.0.0",
"eslint-plugin-node": "11.1.0",
"eslint-plugin-promise": "6.1.1",
"eslint-plugin-standard": "4.1.0",
"mocha": "10.2.0",
"nyc": "15.1.0"
},
"files": [
"HISTORY.md",
"LICENSE",
"index.js",
"mimeScore.js"
],
"engines": {
"node": ">= 0.6"
},
"scripts": {
"lint": "eslint .",
"test": "mocha --reporter spec test/test.js",
"test-ci": "nyc --reporter=lcov --reporter=text npm test",
"test-cov": "nyc --reporter=html --reporter=text npm test"
}
}

141
node_modules/body-parser/node_modules/qs/dist/qs.js generated vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,23 @@
'use strict';
var replace = String.prototype.replace;
var percentTwenties = /%20/g;
var Format = {
RFC1738: 'RFC1738',
RFC3986: 'RFC3986'
};
module.exports = {
'default': Format.RFC3986,
formatters: {
RFC1738: function (value) {
return replace.call(value, percentTwenties, '+');
},
RFC3986: function (value) {
return String(value);
}
},
RFC1738: Format.RFC1738,
RFC3986: Format.RFC3986
};

11
node_modules/body-parser/node_modules/qs/lib/index.js generated vendored Normal file
View File

@ -0,0 +1,11 @@
'use strict';
var stringify = require('./stringify');
var parse = require('./parse');
var formats = require('./formats');
module.exports = {
formats: formats,
parse: parse,
stringify: stringify
};

328
node_modules/body-parser/node_modules/qs/lib/parse.js generated vendored Normal file
View File

@ -0,0 +1,328 @@
'use strict';
var utils = require('./utils');
var has = Object.prototype.hasOwnProperty;
var isArray = Array.isArray;
var defaults = {
allowDots: false,
allowEmptyArrays: false,
allowPrototypes: false,
allowSparse: false,
arrayLimit: 20,
charset: 'utf-8',
charsetSentinel: false,
comma: false,
decodeDotInKeys: false,
decoder: utils.decode,
delimiter: '&',
depth: 5,
duplicates: 'combine',
ignoreQueryPrefix: false,
interpretNumericEntities: false,
parameterLimit: 1000,
parseArrays: true,
plainObjects: false,
strictDepth: false,
strictNullHandling: false,
throwOnLimitExceeded: false
};
var interpretNumericEntities = function (str) {
return str.replace(/&#(\d+);/g, function ($0, numberStr) {
return String.fromCharCode(parseInt(numberStr, 10));
});
};
var parseArrayValue = function (val, options, currentArrayLength) {
if (val && typeof val === 'string' && options.comma && val.indexOf(',') > -1) {
return val.split(',');
}
if (options.throwOnLimitExceeded && currentArrayLength >= options.arrayLimit) {
throw new RangeError('Array limit exceeded. Only ' + options.arrayLimit + ' element' + (options.arrayLimit === 1 ? '' : 's') + ' allowed in an array.');
}
return val;
};
// This is what browsers will submit when the ✓ character occurs in an
// application/x-www-form-urlencoded body and the encoding of the page containing
// the form is iso-8859-1, or when the submitted form has an accept-charset
// attribute of iso-8859-1. Presumably also with other charsets that do not contain
// the ✓ character, such as us-ascii.
var isoSentinel = 'utf8=%26%2310003%3B'; // encodeURIComponent('&#10003;')
// These are the percent-encoded utf-8 octets representing a checkmark, indicating that the request actually is utf-8 encoded.
var charsetSentinel = 'utf8=%E2%9C%93'; // encodeURIComponent('✓')
var parseValues = function parseQueryStringValues(str, options) {
var obj = { __proto__: null };
var cleanStr = options.ignoreQueryPrefix ? str.replace(/^\?/, '') : str;
cleanStr = cleanStr.replace(/%5B/gi, '[').replace(/%5D/gi, ']');
var limit = options.parameterLimit === Infinity ? undefined : options.parameterLimit;
var parts = cleanStr.split(
options.delimiter,
options.throwOnLimitExceeded ? limit + 1 : limit
);
if (options.throwOnLimitExceeded && parts.length > limit) {
throw new RangeError('Parameter limit exceeded. Only ' + limit + ' parameter' + (limit === 1 ? '' : 's') + ' allowed.');
}
var skipIndex = -1; // Keep track of where the utf8 sentinel was found
var i;
var charset = options.charset;
if (options.charsetSentinel) {
for (i = 0; i < parts.length; ++i) {
if (parts[i].indexOf('utf8=') === 0) {
if (parts[i] === charsetSentinel) {
charset = 'utf-8';
} else if (parts[i] === isoSentinel) {
charset = 'iso-8859-1';
}
skipIndex = i;
i = parts.length; // The eslint settings do not allow break;
}
}
}
for (i = 0; i < parts.length; ++i) {
if (i === skipIndex) {
continue;
}
var part = parts[i];
var bracketEqualsPos = part.indexOf(']=');
var pos = bracketEqualsPos === -1 ? part.indexOf('=') : bracketEqualsPos + 1;
var key;
var val;
if (pos === -1) {
key = options.decoder(part, defaults.decoder, charset, 'key');
val = options.strictNullHandling ? null : '';
} else {
key = options.decoder(part.slice(0, pos), defaults.decoder, charset, 'key');
val = utils.maybeMap(
parseArrayValue(
part.slice(pos + 1),
options,
isArray(obj[key]) ? obj[key].length : 0
),
function (encodedVal) {
return options.decoder(encodedVal, defaults.decoder, charset, 'value');
}
);
}
if (val && options.interpretNumericEntities && charset === 'iso-8859-1') {
val = interpretNumericEntities(String(val));
}
if (part.indexOf('[]=') > -1) {
val = isArray(val) ? [val] : val;
}
var existing = has.call(obj, key);
if (existing && options.duplicates === 'combine') {
obj[key] = utils.combine(obj[key], val);
} else if (!existing || options.duplicates === 'last') {
obj[key] = val;
}
}
return obj;
};
var parseObject = function (chain, val, options, valuesParsed) {
var currentArrayLength = 0;
if (chain.length > 0 && chain[chain.length - 1] === '[]') {
var parentKey = chain.slice(0, -1).join('');
currentArrayLength = Array.isArray(val) && val[parentKey] ? val[parentKey].length : 0;
}
var leaf = valuesParsed ? val : parseArrayValue(val, options, currentArrayLength);
for (var i = chain.length - 1; i >= 0; --i) {
var obj;
var root = chain[i];
if (root === '[]' && options.parseArrays) {
obj = options.allowEmptyArrays && (leaf === '' || (options.strictNullHandling && leaf === null))
? []
: utils.combine([], leaf);
} else {
obj = options.plainObjects ? { __proto__: null } : {};
var cleanRoot = root.charAt(0) === '[' && root.charAt(root.length - 1) === ']' ? root.slice(1, -1) : root;
var decodedRoot = options.decodeDotInKeys ? cleanRoot.replace(/%2E/g, '.') : cleanRoot;
var index = parseInt(decodedRoot, 10);
if (!options.parseArrays && decodedRoot === '') {
obj = { 0: leaf };
} else if (
!isNaN(index)
&& root !== decodedRoot
&& String(index) === decodedRoot
&& index >= 0
&& (options.parseArrays && index <= options.arrayLimit)
) {
obj = [];
obj[index] = leaf;
} else if (decodedRoot !== '__proto__') {
obj[decodedRoot] = leaf;
}
}
leaf = obj;
}
return leaf;
};
var parseKeys = function parseQueryStringKeys(givenKey, val, options, valuesParsed) {
if (!givenKey) {
return;
}
// Transform dot notation to bracket notation
var key = options.allowDots ? givenKey.replace(/\.([^.[]+)/g, '[$1]') : givenKey;
// The regex chunks
var brackets = /(\[[^[\]]*])/;
var child = /(\[[^[\]]*])/g;
// Get the parent
var segment = options.depth > 0 && brackets.exec(key);
var parent = segment ? key.slice(0, segment.index) : key;
// Stash the parent if it exists
var keys = [];
if (parent) {
// If we aren't using plain objects, optionally prefix keys that would overwrite object prototype properties
if (!options.plainObjects && has.call(Object.prototype, parent)) {
if (!options.allowPrototypes) {
return;
}
}
keys.push(parent);
}
// Loop through children appending to the array until we hit depth
var i = 0;
while (options.depth > 0 && (segment = child.exec(key)) !== null && i < options.depth) {
i += 1;
if (!options.plainObjects && has.call(Object.prototype, segment[1].slice(1, -1))) {
if (!options.allowPrototypes) {
return;
}
}
keys.push(segment[1]);
}
// If there's a remainder, check strictDepth option for throw, else just add whatever is left
if (segment) {
if (options.strictDepth === true) {
throw new RangeError('Input depth exceeded depth option of ' + options.depth + ' and strictDepth is true');
}
keys.push('[' + key.slice(segment.index) + ']');
}
return parseObject(keys, val, options, valuesParsed);
};
var normalizeParseOptions = function normalizeParseOptions(opts) {
if (!opts) {
return defaults;
}
if (typeof opts.allowEmptyArrays !== 'undefined' && typeof opts.allowEmptyArrays !== 'boolean') {
throw new TypeError('`allowEmptyArrays` option can only be `true` or `false`, when provided');
}
if (typeof opts.decodeDotInKeys !== 'undefined' && typeof opts.decodeDotInKeys !== 'boolean') {
throw new TypeError('`decodeDotInKeys` option can only be `true` or `false`, when provided');
}
if (opts.decoder !== null && typeof opts.decoder !== 'undefined' && typeof opts.decoder !== 'function') {
throw new TypeError('Decoder has to be a function.');
}
if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') {
throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined');
}
if (typeof opts.throwOnLimitExceeded !== 'undefined' && typeof opts.throwOnLimitExceeded !== 'boolean') {
throw new TypeError('`throwOnLimitExceeded` option must be a boolean');
}
var charset = typeof opts.charset === 'undefined' ? defaults.charset : opts.charset;
var duplicates = typeof opts.duplicates === 'undefined' ? defaults.duplicates : opts.duplicates;
if (duplicates !== 'combine' && duplicates !== 'first' && duplicates !== 'last') {
throw new TypeError('The duplicates option must be either combine, first, or last');
}
var allowDots = typeof opts.allowDots === 'undefined' ? opts.decodeDotInKeys === true ? true : defaults.allowDots : !!opts.allowDots;
return {
allowDots: allowDots,
allowEmptyArrays: typeof opts.allowEmptyArrays === 'boolean' ? !!opts.allowEmptyArrays : defaults.allowEmptyArrays,
allowPrototypes: typeof opts.allowPrototypes === 'boolean' ? opts.allowPrototypes : defaults.allowPrototypes,
allowSparse: typeof opts.allowSparse === 'boolean' ? opts.allowSparse : defaults.allowSparse,
arrayLimit: typeof opts.arrayLimit === 'number' ? opts.arrayLimit : defaults.arrayLimit,
charset: charset,
charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel,
comma: typeof opts.comma === 'boolean' ? opts.comma : defaults.comma,
decodeDotInKeys: typeof opts.decodeDotInKeys === 'boolean' ? opts.decodeDotInKeys : defaults.decodeDotInKeys,
decoder: typeof opts.decoder === 'function' ? opts.decoder : defaults.decoder,
delimiter: typeof opts.delimiter === 'string' || utils.isRegExp(opts.delimiter) ? opts.delimiter : defaults.delimiter,
// eslint-disable-next-line no-implicit-coercion, no-extra-parens
depth: (typeof opts.depth === 'number' || opts.depth === false) ? +opts.depth : defaults.depth,
duplicates: duplicates,
ignoreQueryPrefix: opts.ignoreQueryPrefix === true,
interpretNumericEntities: typeof opts.interpretNumericEntities === 'boolean' ? opts.interpretNumericEntities : defaults.interpretNumericEntities,
parameterLimit: typeof opts.parameterLimit === 'number' ? opts.parameterLimit : defaults.parameterLimit,
parseArrays: opts.parseArrays !== false,
plainObjects: typeof opts.plainObjects === 'boolean' ? opts.plainObjects : defaults.plainObjects,
strictDepth: typeof opts.strictDepth === 'boolean' ? !!opts.strictDepth : defaults.strictDepth,
strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling,
throwOnLimitExceeded: typeof opts.throwOnLimitExceeded === 'boolean' ? opts.throwOnLimitExceeded : false
};
};
module.exports = function (str, opts) {
var options = normalizeParseOptions(opts);
if (str === '' || str === null || typeof str === 'undefined') {
return options.plainObjects ? { __proto__: null } : {};
}
var tempObj = typeof str === 'string' ? parseValues(str, options) : str;
var obj = options.plainObjects ? { __proto__: null } : {};
// Iterate over the keys and setup the new object
var keys = Object.keys(tempObj);
for (var i = 0; i < keys.length; ++i) {
var key = keys[i];
var newObj = parseKeys(key, tempObj[key], options, typeof str === 'string');
obj = utils.merge(obj, newObj, options);
}
if (options.allowSparse === true) {
return obj;
}
return utils.compact(obj);
};

View File

@ -0,0 +1,356 @@
'use strict';
var getSideChannel = require('side-channel');
var utils = require('./utils');
var formats = require('./formats');
var has = Object.prototype.hasOwnProperty;
var arrayPrefixGenerators = {
brackets: function brackets(prefix) {
return prefix + '[]';
},
comma: 'comma',
indices: function indices(prefix, key) {
return prefix + '[' + key + ']';
},
repeat: function repeat(prefix) {
return prefix;
}
};
var isArray = Array.isArray;
var push = Array.prototype.push;
var pushToArray = function (arr, valueOrArray) {
push.apply(arr, isArray(valueOrArray) ? valueOrArray : [valueOrArray]);
};
var toISO = Date.prototype.toISOString;
var defaultFormat = formats['default'];
var defaults = {
addQueryPrefix: false,
allowDots: false,
allowEmptyArrays: false,
arrayFormat: 'indices',
charset: 'utf-8',
charsetSentinel: false,
commaRoundTrip: false,
delimiter: '&',
encode: true,
encodeDotInKeys: false,
encoder: utils.encode,
encodeValuesOnly: false,
filter: void undefined,
format: defaultFormat,
formatter: formats.formatters[defaultFormat],
// deprecated
indices: false,
serializeDate: function serializeDate(date) {
return toISO.call(date);
},
skipNulls: false,
strictNullHandling: false
};
var isNonNullishPrimitive = function isNonNullishPrimitive(v) {
return typeof v === 'string'
|| typeof v === 'number'
|| typeof v === 'boolean'
|| typeof v === 'symbol'
|| typeof v === 'bigint';
};
var sentinel = {};
var stringify = function stringify(
object,
prefix,
generateArrayPrefix,
commaRoundTrip,
allowEmptyArrays,
strictNullHandling,
skipNulls,
encodeDotInKeys,
encoder,
filter,
sort,
allowDots,
serializeDate,
format,
formatter,
encodeValuesOnly,
charset,
sideChannel
) {
var obj = object;
var tmpSc = sideChannel;
var step = 0;
var findFlag = false;
while ((tmpSc = tmpSc.get(sentinel)) !== void undefined && !findFlag) {
// Where object last appeared in the ref tree
var pos = tmpSc.get(object);
step += 1;
if (typeof pos !== 'undefined') {
if (pos === step) {
throw new RangeError('Cyclic object value');
} else {
findFlag = true; // Break while
}
}
if (typeof tmpSc.get(sentinel) === 'undefined') {
step = 0;
}
}
if (typeof filter === 'function') {
obj = filter(prefix, obj);
} else if (obj instanceof Date) {
obj = serializeDate(obj);
} else if (generateArrayPrefix === 'comma' && isArray(obj)) {
obj = utils.maybeMap(obj, function (value) {
if (value instanceof Date) {
return serializeDate(value);
}
return value;
});
}
if (obj === null) {
if (strictNullHandling) {
return encoder && !encodeValuesOnly ? encoder(prefix, defaults.encoder, charset, 'key', format) : prefix;
}
obj = '';
}
if (isNonNullishPrimitive(obj) || utils.isBuffer(obj)) {
if (encoder) {
var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder, charset, 'key', format);
return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder, charset, 'value', format))];
}
return [formatter(prefix) + '=' + formatter(String(obj))];
}
var values = [];
if (typeof obj === 'undefined') {
return values;
}
var objKeys;
if (generateArrayPrefix === 'comma' && isArray(obj)) {
// we need to join elements in
if (encodeValuesOnly && encoder) {
obj = utils.maybeMap(obj, encoder);
}
objKeys = [{ value: obj.length > 0 ? obj.join(',') || null : void undefined }];
} else if (isArray(filter)) {
objKeys = filter;
} else {
var keys = Object.keys(obj);
objKeys = sort ? keys.sort(sort) : keys;
}
var encodedPrefix = encodeDotInKeys ? String(prefix).replace(/\./g, '%2E') : String(prefix);
var adjustedPrefix = commaRoundTrip && isArray(obj) && obj.length === 1 ? encodedPrefix + '[]' : encodedPrefix;
if (allowEmptyArrays && isArray(obj) && obj.length === 0) {
return adjustedPrefix + '[]';
}
for (var j = 0; j < objKeys.length; ++j) {
var key = objKeys[j];
var value = typeof key === 'object' && key && typeof key.value !== 'undefined'
? key.value
: obj[key];
if (skipNulls && value === null) {
continue;
}
var encodedKey = allowDots && encodeDotInKeys ? String(key).replace(/\./g, '%2E') : String(key);
var keyPrefix = isArray(obj)
? typeof generateArrayPrefix === 'function' ? generateArrayPrefix(adjustedPrefix, encodedKey) : adjustedPrefix
: adjustedPrefix + (allowDots ? '.' + encodedKey : '[' + encodedKey + ']');
sideChannel.set(object, step);
var valueSideChannel = getSideChannel();
valueSideChannel.set(sentinel, sideChannel);
pushToArray(values, stringify(
value,
keyPrefix,
generateArrayPrefix,
commaRoundTrip,
allowEmptyArrays,
strictNullHandling,
skipNulls,
encodeDotInKeys,
generateArrayPrefix === 'comma' && encodeValuesOnly && isArray(obj) ? null : encoder,
filter,
sort,
allowDots,
serializeDate,
format,
formatter,
encodeValuesOnly,
charset,
valueSideChannel
));
}
return values;
};
var normalizeStringifyOptions = function normalizeStringifyOptions(opts) {
if (!opts) {
return defaults;
}
if (typeof opts.allowEmptyArrays !== 'undefined' && typeof opts.allowEmptyArrays !== 'boolean') {
throw new TypeError('`allowEmptyArrays` option can only be `true` or `false`, when provided');
}
if (typeof opts.encodeDotInKeys !== 'undefined' && typeof opts.encodeDotInKeys !== 'boolean') {
throw new TypeError('`encodeDotInKeys` option can only be `true` or `false`, when provided');
}
if (opts.encoder !== null && typeof opts.encoder !== 'undefined' && typeof opts.encoder !== 'function') {
throw new TypeError('Encoder has to be a function.');
}
var charset = opts.charset || defaults.charset;
if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') {
throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined');
}
var format = formats['default'];
if (typeof opts.format !== 'undefined') {
if (!has.call(formats.formatters, opts.format)) {
throw new TypeError('Unknown format option provided.');
}
format = opts.format;
}
var formatter = formats.formatters[format];
var filter = defaults.filter;
if (typeof opts.filter === 'function' || isArray(opts.filter)) {
filter = opts.filter;
}
var arrayFormat;
if (opts.arrayFormat in arrayPrefixGenerators) {
arrayFormat = opts.arrayFormat;
} else if ('indices' in opts) {
arrayFormat = opts.indices ? 'indices' : 'repeat';
} else {
arrayFormat = defaults.arrayFormat;
}
if ('commaRoundTrip' in opts && typeof opts.commaRoundTrip !== 'boolean') {
throw new TypeError('`commaRoundTrip` must be a boolean, or absent');
}
var allowDots = typeof opts.allowDots === 'undefined' ? opts.encodeDotInKeys === true ? true : defaults.allowDots : !!opts.allowDots;
return {
addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix,
allowDots: allowDots,
allowEmptyArrays: typeof opts.allowEmptyArrays === 'boolean' ? !!opts.allowEmptyArrays : defaults.allowEmptyArrays,
arrayFormat: arrayFormat,
charset: charset,
charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel,
commaRoundTrip: !!opts.commaRoundTrip,
delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter,
encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode,
encodeDotInKeys: typeof opts.encodeDotInKeys === 'boolean' ? opts.encodeDotInKeys : defaults.encodeDotInKeys,
encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder,
encodeValuesOnly: typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly,
filter: filter,
format: format,
formatter: formatter,
serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate,
skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls,
sort: typeof opts.sort === 'function' ? opts.sort : null,
strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling
};
};
module.exports = function (object, opts) {
var obj = object;
var options = normalizeStringifyOptions(opts);
var objKeys;
var filter;
if (typeof options.filter === 'function') {
filter = options.filter;
obj = filter('', obj);
} else if (isArray(options.filter)) {
filter = options.filter;
objKeys = filter;
}
var keys = [];
if (typeof obj !== 'object' || obj === null) {
return '';
}
var generateArrayPrefix = arrayPrefixGenerators[options.arrayFormat];
var commaRoundTrip = generateArrayPrefix === 'comma' && options.commaRoundTrip;
if (!objKeys) {
objKeys = Object.keys(obj);
}
if (options.sort) {
objKeys.sort(options.sort);
}
var sideChannel = getSideChannel();
for (var i = 0; i < objKeys.length; ++i) {
var key = objKeys[i];
var value = obj[key];
if (options.skipNulls && value === null) {
continue;
}
pushToArray(keys, stringify(
value,
key,
generateArrayPrefix,
commaRoundTrip,
options.allowEmptyArrays,
options.strictNullHandling,
options.skipNulls,
options.encodeDotInKeys,
options.encode ? options.encoder : null,
options.filter,
options.sort,
options.allowDots,
options.serializeDate,
options.format,
options.formatter,
options.encodeValuesOnly,
options.charset,
sideChannel
));
}
var joined = keys.join(options.delimiter);
var prefix = options.addQueryPrefix === true ? '?' : '';
if (options.charsetSentinel) {
if (options.charset === 'iso-8859-1') {
// encodeURIComponent('&#10003;'), the "numeric entity" representation of a checkmark
prefix += 'utf8=%26%2310003%3B&';
} else {
// encodeURIComponent('✓')
prefix += 'utf8=%E2%9C%93&';
}
}
return joined.length > 0 ? prefix + joined : '';
};

268
node_modules/body-parser/node_modules/qs/lib/utils.js generated vendored Normal file
View File

@ -0,0 +1,268 @@
'use strict';
var formats = require('./formats');
var has = Object.prototype.hasOwnProperty;
var isArray = Array.isArray;
var hexTable = (function () {
var array = [];
for (var i = 0; i < 256; ++i) {
array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase());
}
return array;
}());
var compactQueue = function compactQueue(queue) {
while (queue.length > 1) {
var item = queue.pop();
var obj = item.obj[item.prop];
if (isArray(obj)) {
var compacted = [];
for (var j = 0; j < obj.length; ++j) {
if (typeof obj[j] !== 'undefined') {
compacted.push(obj[j]);
}
}
item.obj[item.prop] = compacted;
}
}
};
var arrayToObject = function arrayToObject(source, options) {
var obj = options && options.plainObjects ? { __proto__: null } : {};
for (var i = 0; i < source.length; ++i) {
if (typeof source[i] !== 'undefined') {
obj[i] = source[i];
}
}
return obj;
};
var merge = function merge(target, source, options) {
/* eslint no-param-reassign: 0 */
if (!source) {
return target;
}
if (typeof source !== 'object' && typeof source !== 'function') {
if (isArray(target)) {
target.push(source);
} else if (target && typeof target === 'object') {
if (
(options && (options.plainObjects || options.allowPrototypes))
|| !has.call(Object.prototype, source)
) {
target[source] = true;
}
} else {
return [target, source];
}
return target;
}
if (!target || typeof target !== 'object') {
return [target].concat(source);
}
var mergeTarget = target;
if (isArray(target) && !isArray(source)) {
mergeTarget = arrayToObject(target, options);
}
if (isArray(target) && isArray(source)) {
source.forEach(function (item, i) {
if (has.call(target, i)) {
var targetItem = target[i];
if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') {
target[i] = merge(targetItem, item, options);
} else {
target.push(item);
}
} else {
target[i] = item;
}
});
return target;
}
return Object.keys(source).reduce(function (acc, key) {
var value = source[key];
if (has.call(acc, key)) {
acc[key] = merge(acc[key], value, options);
} else {
acc[key] = value;
}
return acc;
}, mergeTarget);
};
var assign = function assignSingleSource(target, source) {
return Object.keys(source).reduce(function (acc, key) {
acc[key] = source[key];
return acc;
}, target);
};
var decode = function (str, defaultDecoder, charset) {
var strWithoutPlus = str.replace(/\+/g, ' ');
if (charset === 'iso-8859-1') {
// unescape never throws, no try...catch needed:
return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape);
}
// utf-8
try {
return decodeURIComponent(strWithoutPlus);
} catch (e) {
return strWithoutPlus;
}
};
var limit = 1024;
/* eslint operator-linebreak: [2, "before"] */
var encode = function encode(str, defaultEncoder, charset, kind, format) {
// This code was originally written by Brian White (mscdex) for the io.js core querystring library.
// It has been adapted here for stricter adherence to RFC 3986
if (str.length === 0) {
return str;
}
var string = str;
if (typeof str === 'symbol') {
string = Symbol.prototype.toString.call(str);
} else if (typeof str !== 'string') {
string = String(str);
}
if (charset === 'iso-8859-1') {
return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) {
return '%26%23' + parseInt($0.slice(2), 16) + '%3B';
});
}
var out = '';
for (var j = 0; j < string.length; j += limit) {
var segment = string.length >= limit ? string.slice(j, j + limit) : string;
var arr = [];
for (var i = 0; i < segment.length; ++i) {
var c = segment.charCodeAt(i);
if (
c === 0x2D // -
|| c === 0x2E // .
|| c === 0x5F // _
|| c === 0x7E // ~
|| (c >= 0x30 && c <= 0x39) // 0-9
|| (c >= 0x41 && c <= 0x5A) // a-z
|| (c >= 0x61 && c <= 0x7A) // A-Z
|| (format === formats.RFC1738 && (c === 0x28 || c === 0x29)) // ( )
) {
arr[arr.length] = segment.charAt(i);
continue;
}
if (c < 0x80) {
arr[arr.length] = hexTable[c];
continue;
}
if (c < 0x800) {
arr[arr.length] = hexTable[0xC0 | (c >> 6)]
+ hexTable[0x80 | (c & 0x3F)];
continue;
}
if (c < 0xD800 || c >= 0xE000) {
arr[arr.length] = hexTable[0xE0 | (c >> 12)]
+ hexTable[0x80 | ((c >> 6) & 0x3F)]
+ hexTable[0x80 | (c & 0x3F)];
continue;
}
i += 1;
c = 0x10000 + (((c & 0x3FF) << 10) | (segment.charCodeAt(i) & 0x3FF));
arr[arr.length] = hexTable[0xF0 | (c >> 18)]
+ hexTable[0x80 | ((c >> 12) & 0x3F)]
+ hexTable[0x80 | ((c >> 6) & 0x3F)]
+ hexTable[0x80 | (c & 0x3F)];
}
out += arr.join('');
}
return out;
};
var compact = function compact(value) {
var queue = [{ obj: { o: value }, prop: 'o' }];
var refs = [];
for (var i = 0; i < queue.length; ++i) {
var item = queue[i];
var obj = item.obj[item.prop];
var keys = Object.keys(obj);
for (var j = 0; j < keys.length; ++j) {
var key = keys[j];
var val = obj[key];
if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) {
queue.push({ obj: obj, prop: key });
refs.push(val);
}
}
}
compactQueue(queue);
return value;
};
var isRegExp = function isRegExp(obj) {
return Object.prototype.toString.call(obj) === '[object RegExp]';
};
var isBuffer = function isBuffer(obj) {
if (!obj || typeof obj !== 'object') {
return false;
}
return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj));
};
var combine = function combine(a, b) {
return [].concat(a, b);
};
var maybeMap = function maybeMap(val, fn) {
if (isArray(val)) {
var mapped = [];
for (var i = 0; i < val.length; i += 1) {
mapped.push(fn(val[i]));
}
return mapped;
}
return fn(val);
};
module.exports = {
arrayToObject: arrayToObject,
assign: assign,
combine: combine,
compact: compact,
decode: decode,
encode: encode,
isBuffer: isBuffer,
isRegExp: isRegExp,
maybeMap: maybeMap,
merge: merge
};

93
node_modules/body-parser/node_modules/qs/package.json generated vendored Normal file
View File

@ -0,0 +1,93 @@
{
"name": "qs",
"description": "A querystring parser that supports nesting and arrays, with a depth limit",
"homepage": "https://github.com/ljharb/qs",
"version": "6.14.0",
"repository": {
"type": "git",
"url": "https://github.com/ljharb/qs.git"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
},
"main": "lib/index.js",
"sideEffects": false,
"contributors": [
{
"name": "Jordan Harband",
"email": "ljharb@gmail.com",
"url": "http://ljharb.codes"
}
],
"keywords": [
"querystring",
"qs",
"query",
"url",
"parse",
"stringify"
],
"engines": {
"node": ">=0.6"
},
"dependencies": {
"side-channel": "^1.1.0"
},
"devDependencies": {
"@browserify/envify": "^6.0.0",
"@browserify/uglifyify": "^6.0.0",
"@ljharb/eslint-config": "^21.1.1",
"browserify": "^16.5.2",
"bundle-collapser": "^1.4.0",
"common-shakeify": "~1.0.0",
"eclint": "^2.8.1",
"es-value-fixtures": "^1.7.0",
"eslint": "=8.8.0",
"evalmd": "^0.0.19",
"for-each": "^0.3.3",
"glob": "=10.3.7",
"has-bigints": "^1.1.0",
"has-override-mistake": "^1.0.1",
"has-property-descriptors": "^1.0.2",
"has-proto": "^1.2.0",
"has-symbols": "^1.1.0",
"iconv-lite": "^0.5.1",
"in-publish": "^2.0.1",
"jackspeak": "=2.1.1",
"mkdirp": "^0.5.5",
"mock-property": "^1.1.0",
"module-deps": "^6.2.3",
"npmignore": "^0.3.1",
"nyc": "^10.3.2",
"object-inspect": "^1.13.3",
"qs-iconv": "^1.0.4",
"safe-publish-latest": "^2.0.0",
"safer-buffer": "^2.1.2",
"tape": "^5.9.0",
"unassertify": "^3.0.1"
},
"scripts": {
"prepack": "npmignore --auto --commentLines=autogenerated && npm run dist",
"prepublishOnly": "safe-publish-latest",
"prepublish": "not-in-publish || npm run prepublishOnly",
"pretest": "npm run --silent readme && npm run --silent lint",
"test": "npm run tests-only",
"tests-only": "nyc tape 'test/**/*.js'",
"posttest": "npx npm@'>=10.2' audit --production",
"readme": "evalmd README.md",
"postlint": "eclint check $(git ls-files | xargs find 2> /dev/null | grep -vE 'node_modules|\\.git' | grep -v dist/)",
"lint": "eslint --ext=js,mjs .",
"dist": "mkdirp dist && browserify --standalone Qs -g unassertify -g @browserify/envify -g [@browserify/uglifyify --mangle.keep_fnames --compress.keep_fnames --format.indent_level=1 --compress.arrows=false --compress.passes=4 --compress.typeofs=false] -p common-shakeify -p bundle-collapser/plugin lib/index.js > dist/qs.js"
},
"license": "BSD-3-Clause",
"publishConfig": {
"ignore": [
"!dist/*",
"bower.json",
"component.json",
".github/workflows",
"logos",
"tea.yaml"
]
}
}

View File

@ -0,0 +1,267 @@
'use strict';
module.exports = {
emptyTestCases: [
{
input: '&',
withEmptyKeys: {},
stringifyOutput: {
brackets: '',
indices: '',
repeat: ''
},
noEmptyKeys: {}
},
{
input: '&&',
withEmptyKeys: {},
stringifyOutput: {
brackets: '',
indices: '',
repeat: ''
},
noEmptyKeys: {}
},
{
input: '&=',
withEmptyKeys: { '': '' },
stringifyOutput: {
brackets: '=',
indices: '=',
repeat: '='
},
noEmptyKeys: {}
},
{
input: '&=&',
withEmptyKeys: { '': '' },
stringifyOutput: {
brackets: '=',
indices: '=',
repeat: '='
},
noEmptyKeys: {}
},
{
input: '&=&=',
withEmptyKeys: { '': ['', ''] },
stringifyOutput: {
brackets: '[]=&[]=',
indices: '[0]=&[1]=',
repeat: '=&='
},
noEmptyKeys: {}
},
{
input: '&=&=&',
withEmptyKeys: { '': ['', ''] },
stringifyOutput: {
brackets: '[]=&[]=',
indices: '[0]=&[1]=',
repeat: '=&='
},
noEmptyKeys: {}
},
{
input: '=',
withEmptyKeys: { '': '' },
noEmptyKeys: {},
stringifyOutput: {
brackets: '=',
indices: '=',
repeat: '='
}
},
{
input: '=&',
withEmptyKeys: { '': '' },
stringifyOutput: {
brackets: '=',
indices: '=',
repeat: '='
},
noEmptyKeys: {}
},
{
input: '=&&&',
withEmptyKeys: { '': '' },
stringifyOutput: {
brackets: '=',
indices: '=',
repeat: '='
},
noEmptyKeys: {}
},
{
input: '=&=&=&',
withEmptyKeys: { '': ['', '', ''] },
stringifyOutput: {
brackets: '[]=&[]=&[]=',
indices: '[0]=&[1]=&[2]=',
repeat: '=&=&='
},
noEmptyKeys: {}
},
{
input: '=&a[]=b&a[1]=c',
withEmptyKeys: { '': '', a: ['b', 'c'] },
stringifyOutput: {
brackets: '=&a[]=b&a[]=c',
indices: '=&a[0]=b&a[1]=c',
repeat: '=&a=b&a=c'
},
noEmptyKeys: { a: ['b', 'c'] }
},
{
input: '=a',
withEmptyKeys: { '': 'a' },
noEmptyKeys: {},
stringifyOutput: {
brackets: '=a',
indices: '=a',
repeat: '=a'
}
},
{
input: 'a==a',
withEmptyKeys: { a: '=a' },
noEmptyKeys: { a: '=a' },
stringifyOutput: {
brackets: 'a==a',
indices: 'a==a',
repeat: 'a==a'
}
},
{
input: '=&a[]=b',
withEmptyKeys: { '': '', a: ['b'] },
stringifyOutput: {
brackets: '=&a[]=b',
indices: '=&a[0]=b',
repeat: '=&a=b'
},
noEmptyKeys: { a: ['b'] }
},
{
input: '=&a[]=b&a[]=c&a[2]=d',
withEmptyKeys: { '': '', a: ['b', 'c', 'd'] },
stringifyOutput: {
brackets: '=&a[]=b&a[]=c&a[]=d',
indices: '=&a[0]=b&a[1]=c&a[2]=d',
repeat: '=&a=b&a=c&a=d'
},
noEmptyKeys: { a: ['b', 'c', 'd'] }
},
{
input: '=a&=b',
withEmptyKeys: { '': ['a', 'b'] },
stringifyOutput: {
brackets: '[]=a&[]=b',
indices: '[0]=a&[1]=b',
repeat: '=a&=b'
},
noEmptyKeys: {}
},
{
input: '=a&foo=b',
withEmptyKeys: { '': 'a', foo: 'b' },
noEmptyKeys: { foo: 'b' },
stringifyOutput: {
brackets: '=a&foo=b',
indices: '=a&foo=b',
repeat: '=a&foo=b'
}
},
{
input: 'a[]=b&a=c&=',
withEmptyKeys: { '': '', a: ['b', 'c'] },
stringifyOutput: {
brackets: '=&a[]=b&a[]=c',
indices: '=&a[0]=b&a[1]=c',
repeat: '=&a=b&a=c'
},
noEmptyKeys: { a: ['b', 'c'] }
},
{
input: 'a[]=b&a=c&=',
withEmptyKeys: { '': '', a: ['b', 'c'] },
stringifyOutput: {
brackets: '=&a[]=b&a[]=c',
indices: '=&a[0]=b&a[1]=c',
repeat: '=&a=b&a=c'
},
noEmptyKeys: { a: ['b', 'c'] }
},
{
input: 'a[0]=b&a=c&=',
withEmptyKeys: { '': '', a: ['b', 'c'] },
stringifyOutput: {
brackets: '=&a[]=b&a[]=c',
indices: '=&a[0]=b&a[1]=c',
repeat: '=&a=b&a=c'
},
noEmptyKeys: { a: ['b', 'c'] }
},
{
input: 'a=b&a[]=c&=',
withEmptyKeys: { '': '', a: ['b', 'c'] },
stringifyOutput: {
brackets: '=&a[]=b&a[]=c',
indices: '=&a[0]=b&a[1]=c',
repeat: '=&a=b&a=c'
},
noEmptyKeys: { a: ['b', 'c'] }
},
{
input: 'a=b&a[0]=c&=',
withEmptyKeys: { '': '', a: ['b', 'c'] },
stringifyOutput: {
brackets: '=&a[]=b&a[]=c',
indices: '=&a[0]=b&a[1]=c',
repeat: '=&a=b&a=c'
},
noEmptyKeys: { a: ['b', 'c'] }
},
{
input: '[]=a&[]=b& []=1',
withEmptyKeys: { '': ['a', 'b'], ' ': ['1'] },
stringifyOutput: {
brackets: '[]=a&[]=b& []=1',
indices: '[0]=a&[1]=b& [0]=1',
repeat: '=a&=b& =1'
},
noEmptyKeys: { 0: 'a', 1: 'b', ' ': ['1'] }
},
{
input: '[0]=a&[1]=b&a[0]=1&a[1]=2',
withEmptyKeys: { '': ['a', 'b'], a: ['1', '2'] },
noEmptyKeys: { 0: 'a', 1: 'b', a: ['1', '2'] },
stringifyOutput: {
brackets: '[]=a&[]=b&a[]=1&a[]=2',
indices: '[0]=a&[1]=b&a[0]=1&a[1]=2',
repeat: '=a&=b&a=1&a=2'
}
},
{
input: '[deep]=a&[deep]=2',
withEmptyKeys: { '': { deep: ['a', '2'] }
},
stringifyOutput: {
brackets: '[deep][]=a&[deep][]=2',
indices: '[deep][0]=a&[deep][1]=2',
repeat: '[deep]=a&[deep]=2'
},
noEmptyKeys: { deep: ['a', '2'] }
},
{
input: '%5B0%5D=a&%5B1%5D=b',
withEmptyKeys: { '': ['a', 'b'] },
stringifyOutput: {
brackets: '[]=a&[]=b',
indices: '[0]=a&[1]=b',
repeat: '=a&=b'
},
noEmptyKeys: { 0: 'a', 1: 'b' }
}
]
};

1276
node_modules/body-parser/node_modules/qs/test/parse.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

262
node_modules/body-parser/node_modules/qs/test/utils.js generated vendored Normal file
View File

@ -0,0 +1,262 @@
'use strict';
var test = require('tape');
var inspect = require('object-inspect');
var SaferBuffer = require('safer-buffer').Buffer;
var forEach = require('for-each');
var v = require('es-value-fixtures');
var utils = require('../lib/utils');
test('merge()', function (t) {
t.deepEqual(utils.merge(null, true), [null, true], 'merges true into null');
t.deepEqual(utils.merge(null, [42]), [null, 42], 'merges null into an array');
t.deepEqual(utils.merge({ a: 'b' }, { a: 'c' }), { a: ['b', 'c'] }, 'merges two objects with the same key');
var oneMerged = utils.merge({ foo: 'bar' }, { foo: { first: '123' } });
t.deepEqual(oneMerged, { foo: ['bar', { first: '123' }] }, 'merges a standalone and an object into an array');
var twoMerged = utils.merge({ foo: ['bar', { first: '123' }] }, { foo: { second: '456' } });
t.deepEqual(twoMerged, { foo: { 0: 'bar', 1: { first: '123' }, second: '456' } }, 'merges a standalone and two objects into an array');
var sandwiched = utils.merge({ foo: ['bar', { first: '123', second: '456' }] }, { foo: 'baz' });
t.deepEqual(sandwiched, { foo: ['bar', { first: '123', second: '456' }, 'baz'] }, 'merges an object sandwiched by two standalones into an array');
var nestedArrays = utils.merge({ foo: ['baz'] }, { foo: ['bar', 'xyzzy'] });
t.deepEqual(nestedArrays, { foo: ['baz', 'bar', 'xyzzy'] });
var noOptionsNonObjectSource = utils.merge({ foo: 'baz' }, 'bar');
t.deepEqual(noOptionsNonObjectSource, { foo: 'baz', bar: true });
var func = function f() {};
t.deepEqual(
utils.merge(func, { foo: 'bar' }),
[func, { foo: 'bar' }],
'functions can not be merged into'
);
func.bar = 'baz';
t.deepEqual(
utils.merge({ foo: 'bar' }, func),
{ foo: 'bar', bar: 'baz' },
'functions can be merge sources'
);
t.test(
'avoids invoking array setters unnecessarily',
{ skip: typeof Object.defineProperty !== 'function' },
function (st) {
var setCount = 0;
var getCount = 0;
var observed = [];
Object.defineProperty(observed, 0, {
get: function () {
getCount += 1;
return { bar: 'baz' };
},
set: function () { setCount += 1; }
});
utils.merge(observed, [null]);
st.equal(setCount, 0);
st.equal(getCount, 1);
observed[0] = observed[0]; // eslint-disable-line no-self-assign
st.equal(setCount, 1);
st.equal(getCount, 2);
st.end();
}
);
t.end();
});
test('assign()', function (t) {
var target = { a: 1, b: 2 };
var source = { b: 3, c: 4 };
var result = utils.assign(target, source);
t.equal(result, target, 'returns the target');
t.deepEqual(target, { a: 1, b: 3, c: 4 }, 'target and source are merged');
t.deepEqual(source, { b: 3, c: 4 }, 'source is untouched');
t.end();
});
test('combine()', function (t) {
t.test('both arrays', function (st) {
var a = [1];
var b = [2];
var combined = utils.combine(a, b);
st.deepEqual(a, [1], 'a is not mutated');
st.deepEqual(b, [2], 'b is not mutated');
st.notEqual(a, combined, 'a !== combined');
st.notEqual(b, combined, 'b !== combined');
st.deepEqual(combined, [1, 2], 'combined is a + b');
st.end();
});
t.test('one array, one non-array', function (st) {
var aN = 1;
var a = [aN];
var bN = 2;
var b = [bN];
var combinedAnB = utils.combine(aN, b);
st.deepEqual(b, [bN], 'b is not mutated');
st.notEqual(aN, combinedAnB, 'aN + b !== aN');
st.notEqual(a, combinedAnB, 'aN + b !== a');
st.notEqual(bN, combinedAnB, 'aN + b !== bN');
st.notEqual(b, combinedAnB, 'aN + b !== b');
st.deepEqual([1, 2], combinedAnB, 'first argument is array-wrapped when not an array');
var combinedABn = utils.combine(a, bN);
st.deepEqual(a, [aN], 'a is not mutated');
st.notEqual(aN, combinedABn, 'a + bN !== aN');
st.notEqual(a, combinedABn, 'a + bN !== a');
st.notEqual(bN, combinedABn, 'a + bN !== bN');
st.notEqual(b, combinedABn, 'a + bN !== b');
st.deepEqual([1, 2], combinedABn, 'second argument is array-wrapped when not an array');
st.end();
});
t.test('neither is an array', function (st) {
var combined = utils.combine(1, 2);
st.notEqual(1, combined, '1 + 2 !== 1');
st.notEqual(2, combined, '1 + 2 !== 2');
st.deepEqual([1, 2], combined, 'both arguments are array-wrapped when not an array');
st.end();
});
t.end();
});
test('decode', function (t) {
t.equal(
utils.decode('a+b'),
'a b',
'decodes + to space'
);
t.equal(
utils.decode('name%2Eobj'),
'name.obj',
'decodes a string'
);
t.equal(
utils.decode('name%2Eobj%2Efoo', null, 'iso-8859-1'),
'name.obj.foo',
'decodes a string in iso-8859-1'
);
t.end();
});
test('encode', function (t) {
forEach(v.nullPrimitives, function (nullish) {
t['throws'](
function () { utils.encode(nullish); },
TypeError,
inspect(nullish) + ' is not a string'
);
});
t.equal(utils.encode(''), '', 'empty string returns itself');
t.deepEqual(utils.encode([]), [], 'empty array returns itself');
t.deepEqual(utils.encode({ length: 0 }), { length: 0 }, 'empty arraylike returns itself');
t.test('symbols', { skip: !v.hasSymbols }, function (st) {
st.equal(utils.encode(Symbol('x')), 'Symbol%28x%29', 'symbol is encoded');
st.end();
});
t.equal(
utils.encode('(abc)'),
'%28abc%29',
'encodes parentheses'
);
t.equal(
utils.encode({ toString: function () { return '(abc)'; } }),
'%28abc%29',
'toStrings and encodes parentheses'
);
t.equal(
utils.encode('abc 123 💩', null, 'iso-8859-1'),
'abc%20123%20%26%2355357%3B%26%2356489%3B',
'encodes in iso-8859-1'
);
var longString = '';
var expectedString = '';
for (var i = 0; i < 1500; i++) {
longString += ' ';
expectedString += '%20';
}
t.equal(
utils.encode(longString),
expectedString,
'encodes a long string'
);
t.equal(
utils.encode('\x28\x29'),
'%28%29',
'encodes parens normally'
);
t.equal(
utils.encode('\x28\x29', null, null, null, 'RFC1738'),
'()',
'does not encode parens in RFC1738'
);
// todo RFC1738 format
t.equal(
utils.encode('Āက豈'),
'%C4%80%E1%80%80%EF%A4%80',
'encodes multibyte chars'
);
t.equal(
utils.encode('\uD83D \uDCA9'),
'%F0%9F%90%A0%F0%BA%90%80',
'encodes lone surrogates'
);
t.end();
});
test('isBuffer()', function (t) {
forEach([null, undefined, true, false, '', 'abc', 42, 0, NaN, {}, [], function () {}, /a/g], function (x) {
t.equal(utils.isBuffer(x), false, inspect(x) + ' is not a buffer');
});
var fakeBuffer = { constructor: Buffer };
t.equal(utils.isBuffer(fakeBuffer), false, 'fake buffer is not a buffer');
var saferBuffer = SaferBuffer.from('abc');
t.equal(utils.isBuffer(saferBuffer), true, 'SaferBuffer instance is a buffer');
var buffer = Buffer.from && Buffer.alloc ? Buffer.from('abc') : new Buffer('abc');
t.equal(utils.isBuffer(buffer), true, 'real Buffer instance is a buffer');
t.end();
});
test('isRegExp()', function (t) {
t.equal(utils.isRegExp(/a/g), true, 'RegExp is a RegExp');
t.equal(utils.isRegExp(new RegExp('a', 'g')), true, 'new RegExp is a RegExp');
t.equal(utils.isRegExp(new Date()), false, 'Date is not a RegExp');
forEach(v.primitives, function (primitive) {
t.equal(utils.isRegExp(primitive), false, inspect(primitive) + ' is not a RegExp');
});
t.end();
});

250
node_modules/body-parser/node_modules/type-is/index.js generated vendored Normal file
View File

@ -0,0 +1,250 @@
/*!
* type-is
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2014-2015 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
* @private
*/
var contentType = require('content-type')
var mime = require('mime-types')
var typer = require('media-typer')
/**
* Module exports.
* @public
*/
module.exports = typeofrequest
module.exports.is = typeis
module.exports.hasBody = hasbody
module.exports.normalize = normalize
module.exports.match = mimeMatch
/**
* Compare a `value` content-type with `types`.
* Each `type` can be an extension like `html`,
* a special shortcut like `multipart` or `urlencoded`,
* or a mime type.
*
* If no types match, `false` is returned.
* Otherwise, the first `type` that matches is returned.
*
* @param {String} value
* @param {Array} types
* @public
*/
function typeis (value, types_) {
var i
var types = types_
// remove parameters and normalize
var val = tryNormalizeType(value)
// no type or invalid
if (!val) {
return false
}
// support flattened arguments
if (types && !Array.isArray(types)) {
types = new Array(arguments.length - 1)
for (i = 0; i < types.length; i++) {
types[i] = arguments[i + 1]
}
}
// no types, return the content type
if (!types || !types.length) {
return val
}
var type
for (i = 0; i < types.length; i++) {
if (mimeMatch(normalize(type = types[i]), val)) {
return type[0] === '+' || type.indexOf('*') !== -1
? val
: type
}
}
// no matches
return false
}
/**
* Check if a request has a request body.
* A request with a body __must__ either have `transfer-encoding`
* or `content-length` headers set.
* http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3
*
* @param {Object} request
* @return {Boolean}
* @public
*/
function hasbody (req) {
return req.headers['transfer-encoding'] !== undefined ||
!isNaN(req.headers['content-length'])
}
/**
* Check if the incoming request contains the "Content-Type"
* header field, and it contains any of the give mime `type`s.
* If there is no request body, `null` is returned.
* If there is no content type, `false` is returned.
* Otherwise, it returns the first `type` that matches.
*
* Examples:
*
* // With Content-Type: text/html; charset=utf-8
* this.is('html'); // => 'html'
* this.is('text/html'); // => 'text/html'
* this.is('text/*', 'application/json'); // => 'text/html'
*
* // When Content-Type is application/json
* this.is('json', 'urlencoded'); // => 'json'
* this.is('application/json'); // => 'application/json'
* this.is('html', 'application/*'); // => 'application/json'
*
* this.is('html'); // => false
*
* @param {Object} req
* @param {(String|Array)} types...
* @return {(String|false|null)}
* @public
*/
function typeofrequest (req, types_) {
// no body
if (!hasbody(req)) return null
// support flattened arguments
var types = arguments.length > 2
? Array.prototype.slice.call(arguments, 1)
: types_
// request content type
var value = req.headers['content-type']
return typeis(value, types)
}
/**
* Normalize a mime type.
* If it's a shorthand, expand it to a valid mime type.
*
* In general, you probably want:
*
* var type = is(req, ['urlencoded', 'json', 'multipart']);
*
* Then use the appropriate body parsers.
* These three are the most common request body types
* and are thus ensured to work.
*
* @param {String} type
* @return {String|false|null}
* @public
*/
function normalize (type) {
if (typeof type !== 'string') {
// invalid type
return false
}
switch (type) {
case 'urlencoded':
return 'application/x-www-form-urlencoded'
case 'multipart':
return 'multipart/*'
}
if (type[0] === '+') {
// "+json" -> "*/*+json" expando
return '*/*' + type
}
return type.indexOf('/') === -1
? mime.lookup(type)
: type
}
/**
* Check if `expected` mime type
* matches `actual` mime type with
* wildcard and +suffix support.
*
* @param {String} expected
* @param {String} actual
* @return {Boolean}
* @public
*/
function mimeMatch (expected, actual) {
// invalid type
if (expected === false) {
return false
}
// split types
var actualParts = actual.split('/')
var expectedParts = expected.split('/')
// invalid format
if (actualParts.length !== 2 || expectedParts.length !== 2) {
return false
}
// validate type
if (expectedParts[0] !== '*' && expectedParts[0] !== actualParts[0]) {
return false
}
// validate suffix wildcard
if (expectedParts[1].slice(0, 2) === '*+') {
return expectedParts[1].length <= actualParts[1].length + 1 &&
expectedParts[1].slice(1) === actualParts[1].slice(1 - expectedParts[1].length)
}
// validate subtype
if (expectedParts[1] !== '*' && expectedParts[1] !== actualParts[1]) {
return false
}
return true
}
/**
* Normalize a type and remove parameters.
*
* @param {string} value
* @return {(string|null)}
* @private
*/
function normalizeType (value) {
// Parse the type
var type = contentType.parse(value).type
return typer.test(type) ? type : null
}
/**
* Try to normalize a type and remove parameters.
*
* @param {string} value
* @return {(string|null)}
* @private
*/
function tryNormalizeType (value) {
try {
return value ? normalizeType(value) : null
} catch (err) {
return null
}
}

View File

@ -0,0 +1,47 @@
{
"name": "type-is",
"description": "Infer the content-type of a request.",
"version": "2.0.1",
"contributors": [
"Douglas Christopher Wilson <doug@somethingdoug.com>",
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)"
],
"license": "MIT",
"repository": "jshttp/type-is",
"dependencies": {
"content-type": "^1.0.5",
"media-typer": "^1.1.0",
"mime-types": "^3.0.0"
},
"devDependencies": {
"eslint": "7.32.0",
"eslint-config-standard": "14.1.1",
"eslint-plugin-import": "2.25.4",
"eslint-plugin-markdown": "2.2.1",
"eslint-plugin-node": "11.1.0",
"eslint-plugin-promise": "5.2.0",
"eslint-plugin-standard": "4.1.0",
"mocha": "9.2.1",
"nyc": "15.1.0"
},
"engines": {
"node": ">= 0.6"
},
"files": [
"LICENSE",
"HISTORY.md",
"index.js"
],
"scripts": {
"lint": "eslint .",
"test": "mocha --reporter spec --check-leaks --bail test/",
"test:debug": "mocha --reporter spec --check-leaks --inspect --inspect-brk test/",
"test-ci": "nyc --reporter=lcovonly --reporter=text npm test",
"test-cov": "nyc --reporter=html --reporter=text npm test"
},
"keywords": [
"content",
"type",
"checking"
]
}

View File

@ -1,7 +1,7 @@
{
"name": "body-parser",
"description": "Node.js body parsing middleware",
"version": "1.20.3",
"version": "2.2.0",
"contributors": [
"Douglas Christopher Wilson <doug@somethingdoug.com>",
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)"
@ -9,18 +9,15 @@
"license": "MIT",
"repository": "expressjs/body-parser",
"dependencies": {
"bytes": "3.1.2",
"content-type": "~1.0.5",
"debug": "2.6.9",
"depd": "2.0.0",
"destroy": "1.2.0",
"http-errors": "2.0.0",
"iconv-lite": "0.4.24",
"on-finished": "2.4.1",
"qs": "6.13.0",
"raw-body": "2.5.2",
"type-is": "~1.6.18",
"unpipe": "1.0.0"
"bytes": "^3.1.2",
"content-type": "^1.0.5",
"debug": "^4.4.0",
"http-errors": "^2.0.0",
"iconv-lite": "^0.6.3",
"on-finished": "^2.4.1",
"qs": "^6.14.0",
"raw-body": "^3.0.0",
"type-is": "^2.0.0"
},
"devDependencies": {
"eslint": "8.34.0",
@ -30,27 +27,23 @@
"eslint-plugin-node": "11.1.0",
"eslint-plugin-promise": "6.1.1",
"eslint-plugin-standard": "4.1.0",
"methods": "1.1.2",
"mocha": "10.2.0",
"nyc": "15.1.0",
"safe-buffer": "5.2.1",
"supertest": "6.3.3"
"mocha": "^11.1.0",
"nyc": "^17.1.0",
"supertest": "^7.0.0"
},
"files": [
"lib/",
"LICENSE",
"HISTORY.md",
"SECURITY.md",
"index.js"
],
"engines": {
"node": ">= 0.8",
"npm": "1.2.8000 || >= 1.4.16"
"node": ">=18"
},
"scripts": {
"lint": "eslint .",
"test": "mocha --require test/support/env --reporter spec --check-leaks --bail test/",
"test-ci": "nyc --reporter=lcov --reporter=text npm test",
"test": "mocha --reporter spec --check-leaks test/",
"test-ci": "nyc --reporter=lcovonly --reporter=text npm test",
"test-cov": "nyc --reporter=html --reporter=text npm test"
}
}

8
node_modules/boolbase/index.js generated vendored
View File

@ -1,8 +0,0 @@
module.exports = {
trueFunc: function trueFunc(){
return true;
},
falseFunc: function falseFunc(){
return false;
}
};

23
node_modules/boolbase/package.json generated vendored
View File

@ -1,23 +0,0 @@
{
"name": "boolbase",
"version": "1.0.0",
"description": "two functions: One that returns true, one that returns false",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
"type": "git",
"url": "https://github.com/fb55/boolbase"
},
"keywords": [
"boolean",
"function"
],
"author": "Felix Boehm <me@feedic.com>",
"license": "ISC",
"bugs": {
"url": "https://github.com/fb55/boolbase/issues"
},
"homepage": "https://github.com/fb55/boolbase"
}

201
node_modules/brace-expansion/index.js generated vendored
View File

@ -1,201 +0,0 @@
var concatMap = require('concat-map');
var balanced = require('balanced-match');
module.exports = expandTop;
var escSlash = '\0SLASH'+Math.random()+'\0';
var escOpen = '\0OPEN'+Math.random()+'\0';
var escClose = '\0CLOSE'+Math.random()+'\0';
var escComma = '\0COMMA'+Math.random()+'\0';
var escPeriod = '\0PERIOD'+Math.random()+'\0';
function numeric(str) {
return parseInt(str, 10) == str
? parseInt(str, 10)
: str.charCodeAt(0);
}
function escapeBraces(str) {
return str.split('\\\\').join(escSlash)
.split('\\{').join(escOpen)
.split('\\}').join(escClose)
.split('\\,').join(escComma)
.split('\\.').join(escPeriod);
}
function unescapeBraces(str) {
return str.split(escSlash).join('\\')
.split(escOpen).join('{')
.split(escClose).join('}')
.split(escComma).join(',')
.split(escPeriod).join('.');
}
// Basically just str.split(","), but handling cases
// where we have nested braced sections, which should be
// treated as individual members, like {a,{b,c},d}
function parseCommaParts(str) {
if (!str)
return [''];
var parts = [];
var m = balanced('{', '}', str);
if (!m)
return str.split(',');
var pre = m.pre;
var body = m.body;
var post = m.post;
var p = pre.split(',');
p[p.length-1] += '{' + body + '}';
var postParts = parseCommaParts(post);
if (post.length) {
p[p.length-1] += postParts.shift();
p.push.apply(p, postParts);
}
parts.push.apply(parts, p);
return parts;
}
function expandTop(str) {
if (!str)
return [];
// I don't know why Bash 4.3 does this, but it does.
// Anything starting with {} will have the first two bytes preserved
// but *only* at the top level, so {},a}b will not expand to anything,
// but a{},b}c will be expanded to [a}c,abc].
// One could argue that this is a bug in Bash, but since the goal of
// this module is to match Bash's rules, we escape a leading {}
if (str.substr(0, 2) === '{}') {
str = '\\{\\}' + str.substr(2);
}
return expand(escapeBraces(str), true).map(unescapeBraces);
}
function identity(e) {
return e;
}
function embrace(str) {
return '{' + str + '}';
}
function isPadded(el) {
return /^-?0\d/.test(el);
}
function lte(i, y) {
return i <= y;
}
function gte(i, y) {
return i >= y;
}
function expand(str, isTop) {
var expansions = [];
var m = balanced('{', '}', str);
if (!m || /\$$/.test(m.pre)) return [str];
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
var isSequence = isNumericSequence || isAlphaSequence;
var isOptions = m.body.indexOf(',') >= 0;
if (!isSequence && !isOptions) {
// {a},b}
if (m.post.match(/,.*\}/)) {
str = m.pre + '{' + m.body + escClose + m.post;
return expand(str);
}
return [str];
}
var n;
if (isSequence) {
n = m.body.split(/\.\./);
} else {
n = parseCommaParts(m.body);
if (n.length === 1) {
// x{{a,b}}y ==> x{a}y x{b}y
n = expand(n[0], false).map(embrace);
if (n.length === 1) {
var post = m.post.length
? expand(m.post, false)
: [''];
return post.map(function(p) {
return m.pre + n[0] + p;
});
}
}
}
// at this point, n is the parts, and we know it's not a comma set
// with a single entry.
// no need to expand pre, since it is guaranteed to be free of brace-sets
var pre = m.pre;
var post = m.post.length
? expand(m.post, false)
: [''];
var N;
if (isSequence) {
var x = numeric(n[0]);
var y = numeric(n[1]);
var width = Math.max(n[0].length, n[1].length)
var incr = n.length == 3
? Math.abs(numeric(n[2]))
: 1;
var test = lte;
var reverse = y < x;
if (reverse) {
incr *= -1;
test = gte;
}
var pad = n.some(isPadded);
N = [];
for (var i = x; test(i, y); i += incr) {
var c;
if (isAlphaSequence) {
c = String.fromCharCode(i);
if (c === '\\')
c = '';
} else {
c = String(i);
if (pad) {
var need = width - c.length;
if (need > 0) {
var z = new Array(need + 1).join('0');
if (i < 0)
c = '-' + z + c.slice(1);
else
c = z + c;
}
}
}
N.push(c);
}
} else {
N = concatMap(n, function(el) { return expand(el, false) });
}
for (var j = 0; j < N.length; j++) {
for (var k = 0; k < post.length; k++) {
var expansion = pre + N[j] + post[k];
if (!isTop || isSequence || expansion)
expansions.push(expansion);
}
}
return expansions;
}

View File

@ -1,47 +0,0 @@
{
"name": "brace-expansion",
"description": "Brace expansion as known from sh/bash",
"version": "1.1.11",
"repository": {
"type": "git",
"url": "git://github.com/juliangruber/brace-expansion.git"
},
"homepage": "https://github.com/juliangruber/brace-expansion",
"main": "index.js",
"scripts": {
"test": "tape test/*.js",
"gentest": "bash test/generate.sh",
"bench": "matcha test/perf/bench.js"
},
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
},
"devDependencies": {
"matcha": "^0.7.0",
"tape": "^4.6.0"
},
"keywords": [],
"author": {
"name": "Julian Gruber",
"email": "mail@juliangruber.com",
"url": "http://juliangruber.com"
},
"license": "MIT",
"testling": {
"files": "test/*.js",
"browsers": [
"ie/8..latest",
"firefox/20..latest",
"firefox/nightly",
"chrome/25..latest",
"chrome/canary",
"opera/12..latest",
"opera/next",
"safari/5.1..latest",
"ipad/6.0..latest",
"iphone/6.0..latest",
"android-browser/4.2..latest"
]
}
}

170
node_modules/braces/index.js generated vendored
View File

@ -1,170 +0,0 @@
'use strict';
const stringify = require('./lib/stringify');
const compile = require('./lib/compile');
const expand = require('./lib/expand');
const parse = require('./lib/parse');
/**
* Expand the given pattern or create a regex-compatible string.
*
* ```js
* const braces = require('braces');
* console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']
* console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']
* ```
* @param {String} `str`
* @param {Object} `options`
* @return {String}
* @api public
*/
const braces = (input, options = {}) => {
let output = [];
if (Array.isArray(input)) {
for (const pattern of input) {
const result = braces.create(pattern, options);
if (Array.isArray(result)) {
output.push(...result);
} else {
output.push(result);
}
}
} else {
output = [].concat(braces.create(input, options));
}
if (options && options.expand === true && options.nodupes === true) {
output = [...new Set(output)];
}
return output;
};
/**
* Parse the given `str` with the given `options`.
*
* ```js
* // braces.parse(pattern, [, options]);
* const ast = braces.parse('a/{b,c}/d');
* console.log(ast);
* ```
* @param {String} pattern Brace pattern to parse
* @param {Object} options
* @return {Object} Returns an AST
* @api public
*/
braces.parse = (input, options = {}) => parse(input, options);
/**
* Creates a braces string from an AST, or an AST node.
*
* ```js
* const braces = require('braces');
* let ast = braces.parse('foo/{a,b}/bar');
* console.log(stringify(ast.nodes[2])); //=> '{a,b}'
* ```
* @param {String} `input` Brace pattern or AST.
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces.stringify = (input, options = {}) => {
if (typeof input === 'string') {
return stringify(braces.parse(input, options), options);
}
return stringify(input, options);
};
/**
* Compiles a brace pattern into a regex-compatible, optimized string.
* This method is called by the main [braces](#braces) function by default.
*
* ```js
* const braces = require('braces');
* console.log(braces.compile('a/{b,c}/d'));
* //=> ['a/(b|c)/d']
* ```
* @param {String} `input` Brace pattern or AST.
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces.compile = (input, options = {}) => {
if (typeof input === 'string') {
input = braces.parse(input, options);
}
return compile(input, options);
};
/**
* Expands a brace pattern into an array. This method is called by the
* main [braces](#braces) function when `options.expand` is true. Before
* using this method it's recommended that you read the [performance notes](#performance))
* and advantages of using [.compile](#compile) instead.
*
* ```js
* const braces = require('braces');
* console.log(braces.expand('a/{b,c}/d'));
* //=> ['a/b/d', 'a/c/d'];
* ```
* @param {String} `pattern` Brace pattern
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces.expand = (input, options = {}) => {
if (typeof input === 'string') {
input = braces.parse(input, options);
}
let result = expand(input, options);
// filter out empty strings if specified
if (options.noempty === true) {
result = result.filter(Boolean);
}
// filter out duplicates if specified
if (options.nodupes === true) {
result = [...new Set(result)];
}
return result;
};
/**
* Processes a brace pattern and returns either an expanded array
* (if `options.expand` is true), a highly optimized regex-compatible string.
* This method is called by the main [braces](#braces) function.
*
* ```js
* const braces = require('braces');
* console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))
* //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'
* ```
* @param {String} `pattern` Brace pattern
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces.create = (input, options = {}) => {
if (input === '' || input.length < 3) {
return [input];
}
return options.expand !== true
? braces.compile(input, options)
: braces.expand(input, options);
};
/**
* Expose "braces"
*/
module.exports = braces;

60
node_modules/braces/lib/compile.js generated vendored
View File

@ -1,60 +0,0 @@
'use strict';
const fill = require('fill-range');
const utils = require('./utils');
const compile = (ast, options = {}) => {
const walk = (node, parent = {}) => {
const invalidBlock = utils.isInvalidBrace(parent);
const invalidNode = node.invalid === true && options.escapeInvalid === true;
const invalid = invalidBlock === true || invalidNode === true;
const prefix = options.escapeInvalid === true ? '\\' : '';
let output = '';
if (node.isOpen === true) {
return prefix + node.value;
}
if (node.isClose === true) {
console.log('node.isClose', prefix, node.value);
return prefix + node.value;
}
if (node.type === 'open') {
return invalid ? prefix + node.value : '(';
}
if (node.type === 'close') {
return invalid ? prefix + node.value : ')';
}
if (node.type === 'comma') {
return node.prev.type === 'comma' ? '' : invalid ? node.value : '|';
}
if (node.value) {
return node.value;
}
if (node.nodes && node.ranges > 0) {
const args = utils.reduce(node.nodes);
const range = fill(...args, { ...options, wrap: false, toRegex: true, strictZeros: true });
if (range.length !== 0) {
return args.length > 1 && range.length > 1 ? `(${range})` : range;
}
}
if (node.nodes) {
for (const child of node.nodes) {
output += walk(child, node);
}
}
return output;
};
return walk(ast);
};
module.exports = compile;

57
node_modules/braces/lib/constants.js generated vendored
View File

@ -1,57 +0,0 @@
'use strict';
module.exports = {
MAX_LENGTH: 10000,
// Digits
CHAR_0: '0', /* 0 */
CHAR_9: '9', /* 9 */
// Alphabet chars.
CHAR_UPPERCASE_A: 'A', /* A */
CHAR_LOWERCASE_A: 'a', /* a */
CHAR_UPPERCASE_Z: 'Z', /* Z */
CHAR_LOWERCASE_Z: 'z', /* z */
CHAR_LEFT_PARENTHESES: '(', /* ( */
CHAR_RIGHT_PARENTHESES: ')', /* ) */
CHAR_ASTERISK: '*', /* * */
// Non-alphabetic chars.
CHAR_AMPERSAND: '&', /* & */
CHAR_AT: '@', /* @ */
CHAR_BACKSLASH: '\\', /* \ */
CHAR_BACKTICK: '`', /* ` */
CHAR_CARRIAGE_RETURN: '\r', /* \r */
CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */
CHAR_COLON: ':', /* : */
CHAR_COMMA: ',', /* , */
CHAR_DOLLAR: '$', /* . */
CHAR_DOT: '.', /* . */
CHAR_DOUBLE_QUOTE: '"', /* " */
CHAR_EQUAL: '=', /* = */
CHAR_EXCLAMATION_MARK: '!', /* ! */
CHAR_FORM_FEED: '\f', /* \f */
CHAR_FORWARD_SLASH: '/', /* / */
CHAR_HASH: '#', /* # */
CHAR_HYPHEN_MINUS: '-', /* - */
CHAR_LEFT_ANGLE_BRACKET: '<', /* < */
CHAR_LEFT_CURLY_BRACE: '{', /* { */
CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */
CHAR_LINE_FEED: '\n', /* \n */
CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */
CHAR_PERCENT: '%', /* % */
CHAR_PLUS: '+', /* + */
CHAR_QUESTION_MARK: '?', /* ? */
CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */
CHAR_RIGHT_CURLY_BRACE: '}', /* } */
CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */
CHAR_SEMICOLON: ';', /* ; */
CHAR_SINGLE_QUOTE: '\'', /* ' */
CHAR_SPACE: ' ', /* */
CHAR_TAB: '\t', /* \t */
CHAR_UNDERSCORE: '_', /* _ */
CHAR_VERTICAL_LINE: '|', /* | */
CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */
};

113
node_modules/braces/lib/expand.js generated vendored
View File

@ -1,113 +0,0 @@
'use strict';
const fill = require('fill-range');
const stringify = require('./stringify');
const utils = require('./utils');
const append = (queue = '', stash = '', enclose = false) => {
const result = [];
queue = [].concat(queue);
stash = [].concat(stash);
if (!stash.length) return queue;
if (!queue.length) {
return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash;
}
for (const item of queue) {
if (Array.isArray(item)) {
for (const value of item) {
result.push(append(value, stash, enclose));
}
} else {
for (let ele of stash) {
if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;
result.push(Array.isArray(ele) ? append(item, ele, enclose) : item + ele);
}
}
}
return utils.flatten(result);
};
const expand = (ast, options = {}) => {
const rangeLimit = options.rangeLimit === undefined ? 1000 : options.rangeLimit;
const walk = (node, parent = {}) => {
node.queue = [];
let p = parent;
let q = parent.queue;
while (p.type !== 'brace' && p.type !== 'root' && p.parent) {
p = p.parent;
q = p.queue;
}
if (node.invalid || node.dollar) {
q.push(append(q.pop(), stringify(node, options)));
return;
}
if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {
q.push(append(q.pop(), ['{}']));
return;
}
if (node.nodes && node.ranges > 0) {
const args = utils.reduce(node.nodes);
if (utils.exceedsLimit(...args, options.step, rangeLimit)) {
throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');
}
let range = fill(...args, options);
if (range.length === 0) {
range = stringify(node, options);
}
q.push(append(q.pop(), range));
node.nodes = [];
return;
}
const enclose = utils.encloseBrace(node);
let queue = node.queue;
let block = node;
while (block.type !== 'brace' && block.type !== 'root' && block.parent) {
block = block.parent;
queue = block.queue;
}
for (let i = 0; i < node.nodes.length; i++) {
const child = node.nodes[i];
if (child.type === 'comma' && node.type === 'brace') {
if (i === 1) queue.push('');
queue.push('');
continue;
}
if (child.type === 'close') {
q.push(append(q.pop(), queue, enclose));
continue;
}
if (child.value && child.type !== 'open') {
queue.push(append(queue.pop(), child.value));
continue;
}
if (child.nodes) {
walk(child, node);
}
}
return queue;
};
return utils.flatten(walk(ast));
};
module.exports = expand;

331
node_modules/braces/lib/parse.js generated vendored
View File

@ -1,331 +0,0 @@
'use strict';
const stringify = require('./stringify');
/**
* Constants
*/
const {
MAX_LENGTH,
CHAR_BACKSLASH, /* \ */
CHAR_BACKTICK, /* ` */
CHAR_COMMA, /* , */
CHAR_DOT, /* . */
CHAR_LEFT_PARENTHESES, /* ( */
CHAR_RIGHT_PARENTHESES, /* ) */
CHAR_LEFT_CURLY_BRACE, /* { */
CHAR_RIGHT_CURLY_BRACE, /* } */
CHAR_LEFT_SQUARE_BRACKET, /* [ */
CHAR_RIGHT_SQUARE_BRACKET, /* ] */
CHAR_DOUBLE_QUOTE, /* " */
CHAR_SINGLE_QUOTE, /* ' */
CHAR_NO_BREAK_SPACE,
CHAR_ZERO_WIDTH_NOBREAK_SPACE
} = require('./constants');
/**
* parse
*/
const parse = (input, options = {}) => {
if (typeof input !== 'string') {
throw new TypeError('Expected a string');
}
const opts = options || {};
const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
if (input.length > max) {
throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);
}
const ast = { type: 'root', input, nodes: [] };
const stack = [ast];
let block = ast;
let prev = ast;
let brackets = 0;
const length = input.length;
let index = 0;
let depth = 0;
let value;
/**
* Helpers
*/
const advance = () => input[index++];
const push = node => {
if (node.type === 'text' && prev.type === 'dot') {
prev.type = 'text';
}
if (prev && prev.type === 'text' && node.type === 'text') {
prev.value += node.value;
return;
}
block.nodes.push(node);
node.parent = block;
node.prev = prev;
prev = node;
return node;
};
push({ type: 'bos' });
while (index < length) {
block = stack[stack.length - 1];
value = advance();
/**
* Invalid chars
*/
if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {
continue;
}
/**
* Escaped chars
*/
if (value === CHAR_BACKSLASH) {
push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });
continue;
}
/**
* Right square bracket (literal): ']'
*/
if (value === CHAR_RIGHT_SQUARE_BRACKET) {
push({ type: 'text', value: '\\' + value });
continue;
}
/**
* Left square bracket: '['
*/
if (value === CHAR_LEFT_SQUARE_BRACKET) {
brackets++;
let next;
while (index < length && (next = advance())) {
value += next;
if (next === CHAR_LEFT_SQUARE_BRACKET) {
brackets++;
continue;
}
if (next === CHAR_BACKSLASH) {
value += advance();
continue;
}
if (next === CHAR_RIGHT_SQUARE_BRACKET) {
brackets--;
if (brackets === 0) {
break;
}
}
}
push({ type: 'text', value });
continue;
}
/**
* Parentheses
*/
if (value === CHAR_LEFT_PARENTHESES) {
block = push({ type: 'paren', nodes: [] });
stack.push(block);
push({ type: 'text', value });
continue;
}
if (value === CHAR_RIGHT_PARENTHESES) {
if (block.type !== 'paren') {
push({ type: 'text', value });
continue;
}
block = stack.pop();
push({ type: 'text', value });
block = stack[stack.length - 1];
continue;
}
/**
* Quotes: '|"|`
*/
if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {
const open = value;
let next;
if (options.keepQuotes !== true) {
value = '';
}
while (index < length && (next = advance())) {
if (next === CHAR_BACKSLASH) {
value += next + advance();
continue;
}
if (next === open) {
if (options.keepQuotes === true) value += next;
break;
}
value += next;
}
push({ type: 'text', value });
continue;
}
/**
* Left curly brace: '{'
*/
if (value === CHAR_LEFT_CURLY_BRACE) {
depth++;
const dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;
const brace = {
type: 'brace',
open: true,
close: false,
dollar,
depth,
commas: 0,
ranges: 0,
nodes: []
};
block = push(brace);
stack.push(block);
push({ type: 'open', value });
continue;
}
/**
* Right curly brace: '}'
*/
if (value === CHAR_RIGHT_CURLY_BRACE) {
if (block.type !== 'brace') {
push({ type: 'text', value });
continue;
}
const type = 'close';
block = stack.pop();
block.close = true;
push({ type, value });
depth--;
block = stack[stack.length - 1];
continue;
}
/**
* Comma: ','
*/
if (value === CHAR_COMMA && depth > 0) {
if (block.ranges > 0) {
block.ranges = 0;
const open = block.nodes.shift();
block.nodes = [open, { type: 'text', value: stringify(block) }];
}
push({ type: 'comma', value });
block.commas++;
continue;
}
/**
* Dot: '.'
*/
if (value === CHAR_DOT && depth > 0 && block.commas === 0) {
const siblings = block.nodes;
if (depth === 0 || siblings.length === 0) {
push({ type: 'text', value });
continue;
}
if (prev.type === 'dot') {
block.range = [];
prev.value += value;
prev.type = 'range';
if (block.nodes.length !== 3 && block.nodes.length !== 5) {
block.invalid = true;
block.ranges = 0;
prev.type = 'text';
continue;
}
block.ranges++;
block.args = [];
continue;
}
if (prev.type === 'range') {
siblings.pop();
const before = siblings[siblings.length - 1];
before.value += prev.value + value;
prev = before;
block.ranges--;
continue;
}
push({ type: 'dot', value });
continue;
}
/**
* Text
*/
push({ type: 'text', value });
}
// Mark imbalanced braces and brackets as invalid
do {
block = stack.pop();
if (block.type !== 'root') {
block.nodes.forEach(node => {
if (!node.nodes) {
if (node.type === 'open') node.isOpen = true;
if (node.type === 'close') node.isClose = true;
if (!node.nodes) node.type = 'text';
node.invalid = true;
}
});
// get the location of the block on parent.nodes (block's siblings)
const parent = stack[stack.length - 1];
const index = parent.nodes.indexOf(block);
// replace the (invalid) block with it's nodes
parent.nodes.splice(index, 1, ...block.nodes);
}
} while (stack.length > 0);
push({ type: 'eos' });
return ast;
};
module.exports = parse;

32
node_modules/braces/lib/stringify.js generated vendored
View File

@ -1,32 +0,0 @@
'use strict';
const utils = require('./utils');
module.exports = (ast, options = {}) => {
const stringify = (node, parent = {}) => {
const invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent);
const invalidNode = node.invalid === true && options.escapeInvalid === true;
let output = '';
if (node.value) {
if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) {
return '\\' + node.value;
}
return node.value;
}
if (node.value) {
return node.value;
}
if (node.nodes) {
for (const child of node.nodes) {
output += stringify(child);
}
}
return output;
};
return stringify(ast);
};

122
node_modules/braces/lib/utils.js generated vendored
View File

@ -1,122 +0,0 @@
'use strict';
exports.isInteger = num => {
if (typeof num === 'number') {
return Number.isInteger(num);
}
if (typeof num === 'string' && num.trim() !== '') {
return Number.isInteger(Number(num));
}
return false;
};
/**
* Find a node of the given type
*/
exports.find = (node, type) => node.nodes.find(node => node.type === type);
/**
* Find a node of the given type
*/
exports.exceedsLimit = (min, max, step = 1, limit) => {
if (limit === false) return false;
if (!exports.isInteger(min) || !exports.isInteger(max)) return false;
return ((Number(max) - Number(min)) / Number(step)) >= limit;
};
/**
* Escape the given node with '\\' before node.value
*/
exports.escapeNode = (block, n = 0, type) => {
const node = block.nodes[n];
if (!node) return;
if ((type && node.type === type) || node.type === 'open' || node.type === 'close') {
if (node.escaped !== true) {
node.value = '\\' + node.value;
node.escaped = true;
}
}
};
/**
* Returns true if the given brace node should be enclosed in literal braces
*/
exports.encloseBrace = node => {
if (node.type !== 'brace') return false;
if ((node.commas >> 0 + node.ranges >> 0) === 0) {
node.invalid = true;
return true;
}
return false;
};
/**
* Returns true if a brace node is invalid.
*/
exports.isInvalidBrace = block => {
if (block.type !== 'brace') return false;
if (block.invalid === true || block.dollar) return true;
if ((block.commas >> 0 + block.ranges >> 0) === 0) {
block.invalid = true;
return true;
}
if (block.open !== true || block.close !== true) {
block.invalid = true;
return true;
}
return false;
};
/**
* Returns true if a node is an open or close node
*/
exports.isOpenOrClose = node => {
if (node.type === 'open' || node.type === 'close') {
return true;
}
return node.open === true || node.close === true;
};
/**
* Reduce an array of text nodes.
*/
exports.reduce = nodes => nodes.reduce((acc, node) => {
if (node.type === 'text') acc.push(node.value);
if (node.type === 'range') node.type = 'text';
return acc;
}, []);
/**
* Flatten an array
*/
exports.flatten = (...args) => {
const result = [];
const flat = arr => {
for (let i = 0; i < arr.length; i++) {
const ele = arr[i];
if (Array.isArray(ele)) {
flat(ele);
continue;
}
if (ele !== undefined) {
result.push(ele);
}
}
return result;
};
flat(args);
return result;
};

77
node_modules/braces/package.json generated vendored
View File

@ -1,77 +0,0 @@
{
"name": "braces",
"description": "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.",
"version": "3.0.3",
"homepage": "https://github.com/micromatch/braces",
"author": "Jon Schlinkert (https://github.com/jonschlinkert)",
"contributors": [
"Brian Woodward (https://twitter.com/doowb)",
"Elan Shanker (https://github.com/es128)",
"Eugene Sharygin (https://github.com/eush77)",
"hemanth.hm (http://h3manth.com)",
"Jon Schlinkert (http://twitter.com/jonschlinkert)"
],
"repository": "micromatch/braces",
"bugs": {
"url": "https://github.com/micromatch/braces/issues"
},
"license": "MIT",
"files": [
"index.js",
"lib"
],
"main": "index.js",
"engines": {
"node": ">=8"
},
"scripts": {
"test": "mocha",
"benchmark": "node benchmark"
},
"dependencies": {
"fill-range": "^7.1.1"
},
"devDependencies": {
"ansi-colors": "^3.2.4",
"bash-path": "^2.0.1",
"gulp-format-md": "^2.0.0",
"mocha": "^6.1.1"
},
"keywords": [
"alpha",
"alphabetical",
"bash",
"brace",
"braces",
"expand",
"expansion",
"filepath",
"fill",
"fs",
"glob",
"globbing",
"letter",
"match",
"matches",
"matching",
"number",
"numerical",
"path",
"range",
"ranges",
"sh"
],
"verb": {
"toc": false,
"layout": "default",
"tasks": [
"readme"
],
"lint": {
"reflinks": true
},
"plugins": [
"gulp-format-md"
]
}
}

View File

@ -1,118 +0,0 @@
declare enum platforms {
All = "all",
Activision = "acti",
Battlenet = "battle",
PSN = "psn",
Steam = "steam",
Uno = "uno",
XBOX = "xbl",
ios = "ios",
NULL = "_"
}
declare enum games {
ModernWarfare = "mw",
ModernWarfare2 = "mw2",
Vanguard = "vg",
ColdWar = "cw",
NULL = "_"
}
declare enum friendActions {
Invite = "invite",
Uninvite = "uninvite",
Remove = "remove",
Block = "block",
Unblock = "unblock"
}
declare const enableDebugMode: () => boolean;
declare const disableDebugMode: () => boolean;
declare const login: (ssoToken: string) => boolean;
declare const telescopeLogin: (username: string, password: string) => Promise<boolean>;
declare class WZ {
fullData: (gamertag: string, platform: platforms) => Promise<unknown>;
combatHistory: (gamertag: string, platform: platforms) => Promise<unknown>;
combatHistoryWithDate: (gamertag: string, startTime: number, endTime: number, platform: platforms) => Promise<unknown>;
breakdown: (gamertag: string, platform: platforms) => Promise<unknown>;
breakdownWithDate: (gamertag: string, startTime: number, endTime: number, platform: platforms) => Promise<unknown>;
matchInfo: (matchId: string, platform: platforms) => Promise<unknown>;
cleanGameMode: (mode: string) => Promise<string>;
}
declare class MW {
fullData: (gamertag: string, platform: platforms) => Promise<unknown>;
combatHistory: (gamertag: string, platform: platforms) => Promise<unknown>;
combatHistoryWithDate: (gamertag: string, startTime: number, endTime: number, platform: platforms) => Promise<unknown>;
breakdown: (gamertag: string, platform: platforms) => Promise<unknown>;
breakdownWithDate: (gamertag: string, startTime: number, endTime: number, platform: platforms) => Promise<unknown>;
matchInfo: (matchId: string, platform: platforms) => Promise<unknown>;
seasonloot: (gamertag: string, platform: platforms) => Promise<unknown>;
mapList: (platform: platforms) => Promise<unknown>;
}
declare class MW2 {
fullData: (unoId: string) => Promise<unknown>;
matches: (unoId: string) => Promise<unknown>;
matchInfo: (unoId: string, matchId: string) => Promise<unknown>;
}
declare class WZ2 {
fullData: (unoId: string) => Promise<unknown>;
matches: (unoId: string) => Promise<unknown>;
matchInfo: (unoId: string, matchId: string) => Promise<unknown>;
}
declare class MW3 {
fullData: (unoId: string) => Promise<unknown>;
matches: (unoId: string) => Promise<unknown>;
matchInfo: (unoId: string, matchId: string) => Promise<unknown>;
}
declare class WZM {
fullData: (unoId: string) => Promise<unknown>;
matches: (unoId: string) => Promise<unknown>;
matchInfo: (unoId: string, matchId: string) => Promise<unknown>;
}
declare class CW {
fullData: (gamertag: string, platform: platforms) => Promise<unknown>;
combatHistory: (gamertag: string, platform: platforms) => Promise<unknown>;
combatHistoryWithDate: (gamertag: string, startTime: number, endTime: number, platform: platforms) => Promise<unknown>;
breakdown: (gamertag: string, platform: platforms) => Promise<unknown>;
breakdownWithDate: (gamertag: string, startTime: number, endTime: number, platform: platforms) => Promise<unknown>;
seasonloot: (gamertag: string, platform: platforms) => Promise<unknown>;
mapList: (platform: platforms) => Promise<unknown>;
matchInfo: (matchId: string, platform: platforms) => Promise<unknown>;
}
declare class VG {
fullData: (gamertag: string, platform: platforms) => Promise<unknown>;
combatHistory: (gamertag: string, platform: platforms) => Promise<unknown>;
combatHistoryWithDate: (gamertag: string, startTime: number, endTime: number, platform: platforms) => Promise<unknown>;
breakdown: (gamertag: string, platform: platforms) => Promise<unknown>;
breakdownWithDate: (gamertag: string, startTime: number, endTime: number, platform: platforms) => Promise<unknown>;
seasonloot: (gamertag: string, platform: platforms) => Promise<unknown>;
mapList: (platform: platforms) => Promise<unknown>;
matchInfo: (matchId: string, platform: platforms) => Promise<unknown>;
}
declare class SHOP {
purchasableItems: (gameId: string) => Promise<unknown>;
bundleInformation: (title: string, bundleId: string) => Promise<unknown>;
battlePassLoot: (title: games, season: number, platform: platforms) => Promise<unknown>;
}
declare class USER {
friendFeed: (gamertag: string, platform: platforms) => Promise<unknown>;
eventFeed: () => Promise<unknown>;
loggedInIdentities: () => Promise<unknown>;
codPoints: (gamertag: string, platform: platforms) => Promise<unknown>;
connectedAccounts: (gamertag: string, platform: platforms) => Promise<unknown>;
settings: (gamertag: string, platform: platforms) => Promise<unknown>;
friendAction: (gamertag: string, platform: platforms, action: friendActions) => Promise<unknown>;
}
declare class ALT {
search: (gamertag: string, platform: platforms) => Promise<unknown>;
cleanWeapon: (weapon: string) => Promise<string>;
}
declare const Warzone: WZ;
declare const ModernWarfare: MW;
declare const ModernWarfare2: MW2;
declare const Warzone2: WZ2;
declare const ModernWarfare3: MW3;
declare const WarzoneMobile: WZM;
declare const ColdWar: CW;
declare const Vanguard: VG;
declare const Store: SHOP;
declare const Me: USER;
declare const Misc: ALT;
export { login, telescopeLogin, platforms, friendActions, Warzone, ModernWarfare, ModernWarfare2, ModernWarfare3, WarzoneMobile, Warzone2, ColdWar, Vanguard, Store, Me, Misc, enableDebugMode, disableDebugMode, };

View File

@ -1,766 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.disableDebugMode = exports.enableDebugMode = exports.Misc = exports.Me = exports.Store = exports.Vanguard = exports.ColdWar = exports.Warzone2 = exports.WarzoneMobile = exports.ModernWarfare3 = exports.ModernWarfare2 = exports.ModernWarfare = exports.Warzone = exports.friendActions = exports.platforms = exports.telescopeLogin = exports.login = void 0;
const tslib_1 = require("tslib");
const undici_1 = require("undici");
const weapon_ids_json_1 = tslib_1.__importDefault(require("./wz-data/weapon-ids.json"));
const game_modes_json_1 = tslib_1.__importDefault(require("./wz-data/game-modes.json"));
const userAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36";
let baseCookie = "new_SiteId=cod;ACT_SSO_LOCALE=en_US;country=US;";
let baseSsoToken = "";
let debugMode = false;
let baseHeaders = {
"content-type": "application/json",
cookie: baseCookie,
"user-agent": userAgent,
};
let baseTelescopeHeaders = {
accept: "application/json, text/plain, */*",
"accept-language": "en-GB,en;q=0.9,en-US;q=0.8,fr;q=0.7,nl;q=0.6,et;q=0.5",
"cache-control": "no-cache",
pragma: "no-cache",
"sec-ch-ua": '"Chromium";v="118", "Microsoft Edge";v="118", "Not=A?Brand";v="99"',
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": '"Windows"',
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-site",
};
let basePostHeaders = {
"content-type": "text/plain",
cookie: baseCookie,
"user-agent": userAgent,
};
let baseUrl = "https://my.callofduty.com";
let apiPath = "/api/papi-client";
let baseTelescopeUrl = "https://telescope.callofduty.com";
let apiTelescopePath = "/api/ts-api";
let loggedIn = false;
var platforms;
(function (platforms) {
platforms["All"] = "all";
platforms["Activision"] = "acti";
platforms["Battlenet"] = "battle";
platforms["PSN"] = "psn";
platforms["Steam"] = "steam";
platforms["Uno"] = "uno";
platforms["XBOX"] = "xbl";
platforms["ios"] = "ios";
platforms["NULL"] = "_";
})(platforms || (platforms = {}));
exports.platforms = platforms;
var games;
(function (games) {
games["ModernWarfare"] = "mw";
games["ModernWarfare2"] = "mw2";
games["Vanguard"] = "vg";
games["ColdWar"] = "cw";
games["NULL"] = "_";
})(games || (games = {}));
var telescopeGames;
(function (telescopeGames) {
telescopeGames["ModernWarfare2"] = "mw2";
telescopeGames["Warzone2"] = "wz2";
telescopeGames["ModernWarfare3"] = "jup";
telescopeGames["Mobile"] = "mgl";
})(telescopeGames || (telescopeGames = {}));
var modes;
(function (modes) {
modes["Multiplayer"] = "mp";
modes["Warzone"] = "wz";
modes["Warzone2"] = "wz2";
modes["NULL"] = "_";
})(modes || (modes = {}));
var telescopeModes;
(function (telescopeModes) {
telescopeModes["Multiplayer"] = "mp";
telescopeModes["Outbreak"] = "ob";
})(telescopeModes || (telescopeModes = {}));
var friendActions;
(function (friendActions) {
friendActions["Invite"] = "invite";
friendActions["Uninvite"] = "uninvite";
friendActions["Remove"] = "remove";
friendActions["Block"] = "block";
friendActions["Unblock"] = "unblock";
})(friendActions || (friendActions = {}));
exports.friendActions = friendActions;
var generics;
(function (generics) {
generics["STEAM_UNSUPPORTED"] = "Steam platform not supported by this game. Try `battle` instead.";
generics["UNO_NO_NUMERICAL_ID"] = "You must use a numerical ID when using the platform 'uno'.\nIf using an Activision ID, please use the platform 'acti'.";
})(generics || (generics = {}));
let telescopeUnoToken = "";
const enableDebugMode = () => (debugMode = true);
exports.enableDebugMode = enableDebugMode;
const disableDebugMode = () => (debugMode = false);
exports.disableDebugMode = disableDebugMode;
const sendTelescopeRequest = (url) => tslib_1.__awaiter(void 0, void 0, void 0, function* () {
try {
if (!loggedIn)
throw new Error("Not Logged In!");
let requestUrl = `${baseTelescopeUrl}${apiTelescopePath}${url}`;
if (debugMode)
console.log(`[DEBUG]`, `Request Uri: ${requestUrl}`);
baseTelescopeHeaders.authorization = `Bearer ${telescopeUnoToken}`;
const { body, statusCode } = yield (0, undici_1.request)(requestUrl, {
headers: baseTelescopeHeaders,
});
if (statusCode >= 500)
throw new Error(`Received status code: '${statusCode}'. Route may be down or not exist.`);
let response = yield body.json();
return response;
}
catch (exception) {
throw exception;
}
});
const sendRequest = (url) => tslib_1.__awaiter(void 0, void 0, void 0, function* () {
try {
if (!loggedIn)
throw new Error("Not Logged In.");
let requestUrl = `${baseUrl}${apiPath}${url}`;
if (debugMode)
console.log(`[DEBUG]`, `Request Uri: ${requestUrl}`);
if (debugMode)
console.time("Round Trip");
const { body, statusCode } = yield (0, undici_1.request)(requestUrl, {
headers: baseHeaders,
});
if (debugMode)
console.timeEnd("Round Trip");
if (statusCode >= 500)
throw new Error(`Received status code: '${statusCode}'. Route may be down or not exist.`);
let response = yield body.json();
if (debugMode)
console.log(`[DEBUG]`, `Body Size: ${JSON.stringify(response).length} bytes.`);
return response;
}
catch (exception) {
throw exception;
}
});
const sendPostRequest = (url, data) => tslib_1.__awaiter(void 0, void 0, void 0, function* () {
try {
if (!loggedIn)
throw new Error("Not Logged In.");
let requestUrl = `${baseUrl}${apiPath}${url}`;
const { body, statusCode } = yield (0, undici_1.request)(requestUrl, {
method: "POST",
headers: basePostHeaders,
body: data,
});
if (statusCode >= 500)
throw new Error(`Received status code: '${statusCode}'. Route may be down or not exist.`);
let response = yield body.json();
return response;
}
catch (exception) {
throw exception;
}
});
const cleanClientName = (gamertag) => {
return encodeURIComponent(gamertag);
};
const login = (ssoToken) => {
if (!ssoToken || ssoToken.trim().length <= 0)
return false;
let fakeXSRF = "68e8b62e-1d9d-4ce1-b93f-cbe5ff31a041";
baseHeaders["X-XSRF-TOKEN"] = fakeXSRF;
baseHeaders["X-CSRF-TOKEN"] = fakeXSRF;
baseHeaders["Atvi-Auth"] = ssoToken;
baseHeaders["ACT_SSO_COOKIE"] = ssoToken;
baseHeaders["atkn"] = ssoToken;
baseHeaders["cookie"] = `${baseCookie}ACT_SSO_COOKIE=${ssoToken};XSRF-TOKEN=${fakeXSRF};API_CSRF_TOKEN=${fakeXSRF};ACT_SSO_EVENT="LOGIN_SUCCESS:1644346543228";ACT_SSO_COOKIE_EXPIRY=1645556143194;comid=cod;ssoDevId=63025d09c69f47dfa2b8d5520b5b73e4;tfa_enrollment_seen=true;gtm.custom.bot.flag=human;`;
baseSsoToken = ssoToken;
basePostHeaders["X-XSRF-TOKEN"] = fakeXSRF;
basePostHeaders["X-CSRF-TOKEN"] = fakeXSRF;
basePostHeaders["Atvi-Auth"] = ssoToken;
basePostHeaders["ACT_SSO_COOKIE"] = ssoToken;
basePostHeaders["atkn"] = ssoToken;
basePostHeaders["cookie"] = `${baseCookie}ACT_SSO_COOKIE=${ssoToken};XSRF-TOKEN=${fakeXSRF};API_CSRF_TOKEN=${fakeXSRF};ACT_SSO_EVENT="LOGIN_SUCCESS:1644346543228";ACT_SSO_COOKIE_EXPIRY=1645556143194;comid=cod;ssoDevId=63025d09c69f47dfa2b8d5520b5b73e4;tfa_enrollment_seen=true;gtm.custom.bot.flag=human;`;
loggedIn = true;
return loggedIn;
};
exports.login = login;
const telescope_login_endpoint = "https://wzm-ios-loginservice.prod.demonware.net/v1/login/uno/?titleID=7100&client=shg-cod-jup-bnet";
const telescopeLogin = (username, password) => tslib_1.__awaiter(void 0, void 0, void 0, function* () {
if (!username || !password)
return false;
const { body, statusCode } = yield (0, undici_1.request)(telescope_login_endpoint, {
method: "POST",
headers: baseHeaders,
body: JSON.stringify({
platform: "ios",
hardwareType: "ios",
auth: {
email: username,
password: password,
},
version: 1492,
}),
});
if (statusCode === 200) {
let response = (yield body.json());
let unoToken = response.umbrella.accessToken;
telescopeUnoToken = unoToken;
}
else if (statusCode === 403) {
let errorResponse = (yield body.json());
console.error("Error Logging In:", errorResponse.error.msg);
}
loggedIn = statusCode == 200;
return loggedIn;
});
exports.telescopeLogin = telescopeLogin;
const handleLookupType = (platform) => {
return platform === platforms.Uno ? "id" : "gamer";
};
const checkForValidPlatform = (platform, gamertag) => {
if (!Object.values(platforms).includes(platform))
throw new Error(`Platform '${platform}' is not valid.\nTry one of the following:\n${JSON.stringify(Object.values(platforms), null, 2)}`);
if (gamertag && isNaN(Number(gamertag)) && platform === platforms.Uno)
throw new Error(generics.UNO_NO_NUMERICAL_ID);
};
const mapGamertagToPlatform = (gamertag, platform, steamSupport = false) => {
checkForValidPlatform(platform, gamertag);
const lookupType = handleLookupType(platform);
if (!steamSupport && platform === platforms.Steam)
throw new Error(generics.STEAM_UNSUPPORTED);
if (platform == platforms.Battlenet ||
platform == platforms.Activision ||
platform == platforms.Uno)
if (gamertag && gamertag.length > 0)
gamertag = cleanClientName(gamertag);
if (platform === platforms.Uno || platform === platforms.Activision)
platform = platforms.Uno;
return { gamertag, _platform: platform, lookupType };
};
class Endpoints {
constructor(game, gamertag, platform, mode, lookupType) {
this.fullData = () => `/stats/cod/v1/title/${this.game}/platform/${this.platform}/${this.lookupType}/${this.gamertag}/profile/type/${this.mode}`;
this.combatHistory = () => `/crm/cod/v2/title/${this.game}/platform/${this.platform}/${this.lookupType}/${this.gamertag}/matches/${this.mode}/start/0/end/0/details`;
this.combatHistoryWithDate = (startTime, endTime) => `/crm/cod/v2/title/${this.game}/platform/${this.platform}/${this.lookupType}/${this.gamertag}/matches/${this.mode}/start/${startTime}/end/${endTime}/details`;
this.breakdown = () => `/crm/cod/v2/title/${this.game}/platform/${this.platform}/${this.lookupType}/${this.gamertag}/matches/${this.mode}/start/0/end/0`;
this.breakdownWithDate = (startTime, endTime) => `/crm/cod/v2/title/${this.game}/platform/${this.platform}/${this.lookupType}/${this.gamertag}/matches/${this.mode}/start/${startTime}/end/${endTime}`;
this.matchInfo = (matchId) => `/crm/cod/v2/title/${this.game}/platform/${this.platform}/fullMatch/wz/${matchId}/en`;
this.seasonLoot = () => `/loot/title/${this.game}/platform/${this.platform}/${this.lookupType}/${this.gamertag}/status/en`;
this.mapList = () => `/ce/v1/title/${this.game}/platform/${this.platform}/gameType/${this.mode}/communityMapData/availability`;
this.purchasableItems = (gameId) => `/inventory/v1/title/${gameId}/platform/psn/purchasable/public/en`;
this.bundleInformation = (gameId, bundleId) => `/inventory/v1/title/${gameId}/bundle/${bundleId}/en`;
this.battlePassLoot = (season) => `/loot/title/${this.game}/platform/${this.platform}/list/loot_season_${season}/en`;
this.friendFeed = () => `/userfeed/v1/friendFeed/platform/${this.platform}/${this.lookupType}/${this.gamertag}/friendFeedEvents/en`;
this.eventFeed = () => `/userfeed/v1/friendFeed/rendered/en/${baseSsoToken}`;
this.loggedInIdentities = () => `/crm/cod/v2/identities/${baseSsoToken}`;
this.codPoints = () => `/inventory/v1/title/mw/platform/${this.platform}/${this.lookupType}/${this.gamertag}/currency`;
this.connectedAccounts = () => `/crm/cod/v2/accounts/platform/${this.platform}/${this.lookupType}/${this.gamertag}`;
this.settings = () => `/preferences/v1/platform/${this.platform}/${this.lookupType}/${this.gamertag}/list`;
this.friendAction = (action) => `/codfriends/v1/${action}/${this.platform}/${this.lookupType}/${this.gamertag}`;
this.search = () => `/crm/cod/v2/platform/${this.platform}/username/${this.gamertag}/search`;
this.game = game;
this.gamertag = gamertag;
this.platform = platform;
this.lookupType = lookupType;
this.mode = mode;
}
}
class TelescopeEndpoints {
constructor(game, unoId, mode) {
this.lifeTime = () => `/cr/v1/title/${this.game}/lifetime?language=english&unoId=${this.unoId}`;
this.matches = () => `/cr/v1/title/${this.game}/matches?language=english&unoId=${this.unoId}`;
this.match = (matchId) => `/cr/v1/title/${this.game}/match/${matchId}?language=english&unoId=${this.unoId}`;
this.game = game;
this.unoId = unoId;
this.mode = mode;
}
}
class WZ {
constructor() {
this.fullData = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ModernWarfare, gamertag, platform, modes.Warzone, lookupType);
return yield sendRequest(endpoint.fullData());
}); };
this.combatHistory = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ModernWarfare, gamertag, platform, modes.Warzone, lookupType);
return yield sendRequest(endpoint.combatHistory());
}); };
this.combatHistoryWithDate = (gamertag, startTime, endTime, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ModernWarfare, gamertag, platform, modes.Warzone, lookupType);
return yield sendRequest(endpoint.combatHistoryWithDate(startTime, endTime));
}); };
this.breakdown = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ModernWarfare, gamertag, platform, modes.Warzone, lookupType);
return yield sendRequest(endpoint.breakdown());
}); };
this.breakdownWithDate = (gamertag, startTime, endTime, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ModernWarfare, gamertag, platform, modes.Warzone, lookupType);
return yield sendRequest(endpoint.breakdownWithDate(startTime, endTime));
}); };
this.matchInfo = (matchId, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform("", platform));
const endpoint = new Endpoints(games.ModernWarfare, gamertag, platform, modes.Warzone, lookupType);
return yield sendRequest(endpoint.matchInfo(matchId));
}); };
this.cleanGameMode = (mode) => tslib_1.__awaiter(this, void 0, void 0, function* () {
//@ts-ignore
const foundMode = game_modes_json_1.default["modes"][mode];
if (!foundMode)
return mode;
return foundMode;
});
}
}
class MW {
constructor() {
this.fullData = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ModernWarfare, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.fullData());
}); };
this.combatHistory = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ModernWarfare, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.combatHistory());
}); };
this.combatHistoryWithDate = (gamertag, startTime, endTime, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ModernWarfare, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.combatHistoryWithDate(startTime, endTime));
}); };
this.breakdown = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ModernWarfare, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.breakdown());
}); };
this.breakdownWithDate = (gamertag, startTime, endTime, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ModernWarfare, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.breakdownWithDate(startTime, endTime));
}); };
this.matchInfo = (matchId, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform("", platform));
const endpoint = new Endpoints(games.ModernWarfare, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.matchInfo(matchId));
}); };
this.seasonloot = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ModernWarfare, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.seasonLoot());
}); };
this.mapList = (platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform("", platform));
const endpoint = new Endpoints(games.ModernWarfare, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.mapList());
}); };
}
}
class MW2 {
constructor() {
this.fullData = (unoId) => tslib_1.__awaiter(this, void 0, void 0, function* () {
var { gamertag } = mapGamertagToPlatform(unoId, platforms.Uno, true);
const endpoint = new TelescopeEndpoints(telescopeGames.ModernWarfare2, gamertag, telescopeModes.Multiplayer);
return yield sendTelescopeRequest(endpoint.lifeTime());
});
this.matches = (unoId) => tslib_1.__awaiter(this, void 0, void 0, function* () {
var { gamertag } = mapGamertagToPlatform(unoId, platforms.Uno, true);
const endpoint = new TelescopeEndpoints(telescopeGames.ModernWarfare2, gamertag, telescopeModes.Multiplayer);
return yield sendTelescopeRequest(endpoint.matches());
});
this.matchInfo = (unoId, matchId) => tslib_1.__awaiter(this, void 0, void 0, function* () {
var { gamertag } = mapGamertagToPlatform(unoId, platforms.Uno, true);
const endpoint = new TelescopeEndpoints(telescopeGames.ModernWarfare2, gamertag, telescopeModes.Multiplayer);
return yield sendTelescopeRequest(endpoint.match(matchId));
});
}
}
class WZ2 {
constructor() {
this.fullData = (unoId) => tslib_1.__awaiter(this, void 0, void 0, function* () {
var { gamertag } = mapGamertagToPlatform(unoId, platforms.Uno, true);
const endpoint = new TelescopeEndpoints(telescopeGames.Warzone2, gamertag, telescopeModes.Multiplayer);
return yield sendTelescopeRequest(endpoint.lifeTime());
});
this.matches = (unoId) => tslib_1.__awaiter(this, void 0, void 0, function* () {
var { gamertag } = mapGamertagToPlatform(unoId, platforms.Uno, true);
const endpoint = new TelescopeEndpoints(telescopeGames.Warzone2, gamertag, telescopeModes.Multiplayer);
return yield sendTelescopeRequest(endpoint.matches());
});
this.matchInfo = (unoId, matchId) => tslib_1.__awaiter(this, void 0, void 0, function* () {
var { gamertag } = mapGamertagToPlatform(unoId, platforms.Uno, true);
const endpoint = new TelescopeEndpoints(telescopeGames.Warzone2, gamertag, telescopeModes.Multiplayer);
return yield sendTelescopeRequest(endpoint.match(matchId));
});
}
}
class MW3 {
constructor() {
this.fullData = (unoId) => tslib_1.__awaiter(this, void 0, void 0, function* () {
var { gamertag } = mapGamertagToPlatform(unoId, platforms.Uno, true);
const endpoint = new TelescopeEndpoints(telescopeGames.ModernWarfare3, gamertag, telescopeModes.Multiplayer);
return yield sendTelescopeRequest(endpoint.lifeTime());
});
this.matches = (unoId) => tslib_1.__awaiter(this, void 0, void 0, function* () {
var { gamertag } = mapGamertagToPlatform(unoId, platforms.Uno, true);
const endpoint = new TelescopeEndpoints(telescopeGames.ModernWarfare3, gamertag, telescopeModes.Multiplayer);
return yield sendTelescopeRequest(endpoint.matches());
});
this.matchInfo = (unoId, matchId) => tslib_1.__awaiter(this, void 0, void 0, function* () {
var { gamertag } = mapGamertagToPlatform(unoId, platforms.Uno, true);
const endpoint = new TelescopeEndpoints(telescopeGames.ModernWarfare3, gamertag, telescopeModes.Multiplayer);
return yield sendTelescopeRequest(endpoint.match(matchId));
});
}
}
class WZM {
constructor() {
this.fullData = (unoId) => tslib_1.__awaiter(this, void 0, void 0, function* () {
var { gamertag } = mapGamertagToPlatform(unoId, platforms.Uno, true);
const endpoint = new TelescopeEndpoints(telescopeGames.Mobile, gamertag, telescopeModes.Multiplayer);
return yield sendTelescopeRequest(endpoint.lifeTime());
});
this.matches = (unoId) => tslib_1.__awaiter(this, void 0, void 0, function* () {
var { gamertag } = mapGamertagToPlatform(unoId, platforms.Uno, true);
const endpoint = new TelescopeEndpoints(telescopeGames.Mobile, gamertag, telescopeModes.Multiplayer);
return yield sendTelescopeRequest(endpoint.matches());
});
this.matchInfo = (unoId, matchId) => tslib_1.__awaiter(this, void 0, void 0, function* () {
var { gamertag } = mapGamertagToPlatform(unoId, platforms.Uno, true);
const endpoint = new TelescopeEndpoints(telescopeGames.Mobile, gamertag, telescopeModes.Multiplayer);
return yield sendTelescopeRequest(endpoint.match(matchId));
});
}
}
class CW {
constructor() {
this.fullData = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ColdWar, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.fullData());
}); };
this.combatHistory = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ColdWar, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.combatHistory());
}); };
this.combatHistoryWithDate = (gamertag, startTime, endTime, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ColdWar, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.combatHistoryWithDate(startTime, endTime));
}); };
this.breakdown = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ColdWar, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.breakdown());
}); };
this.breakdownWithDate = (gamertag, startTime, endTime, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ColdWar, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.breakdownWithDate(startTime, endTime));
}); };
this.seasonloot = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.ColdWar, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.seasonLoot());
}); };
this.mapList = (platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform("", platform));
const endpoint = new Endpoints(games.ColdWar, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.mapList());
}); };
this.matchInfo = (matchId, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform("", platform));
const endpoint = new Endpoints(games.ColdWar, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.matchInfo(matchId));
}); };
}
}
class VG {
constructor() {
this.fullData = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.Vanguard, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.fullData());
}); };
this.combatHistory = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.Vanguard, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.combatHistory());
}); };
this.combatHistoryWithDate = (gamertag, startTime, endTime, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.Vanguard, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.combatHistoryWithDate(startTime, endTime));
}); };
this.breakdown = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.Vanguard, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.breakdown());
}); };
this.breakdownWithDate = (gamertag, startTime, endTime, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.Vanguard, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.breakdownWithDate(startTime, endTime));
}); };
this.seasonloot = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.Vanguard, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.seasonLoot());
}); };
this.mapList = (platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform("", platform));
const endpoint = new Endpoints(games.Vanguard, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.mapList());
}); };
this.matchInfo = (matchId, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform("", platform));
const endpoint = new Endpoints(games.Vanguard, gamertag, platform, modes.Multiplayer, lookupType);
return yield sendRequest(endpoint.matchInfo(matchId));
}); };
}
}
class SHOP {
constructor() {
this.purchasableItems = (gameId) => tslib_1.__awaiter(this, void 0, void 0, function* () {
const endpoint = new Endpoints(games.NULL, "", platforms.NULL, modes.NULL, "");
return yield sendRequest(endpoint.purchasableItems(gameId));
});
this.bundleInformation = (title, bundleId) => tslib_1.__awaiter(this, void 0, void 0, function* () {
const endpoint = new Endpoints(games.NULL, "", platforms.NULL, modes.NULL, "");
return yield sendRequest(endpoint.bundleInformation(title, bundleId));
});
this.battlePassLoot = (title, season, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform("", platform));
const endpoint = new Endpoints(title, gamertag, platform, modes.NULL, lookupType);
return yield sendRequest(endpoint.battlePassLoot(season));
}); };
}
}
class USER {
constructor() {
this.friendFeed = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.NULL, gamertag, platform, modes.NULL, lookupType);
return yield sendRequest(endpoint.friendFeed());
}); };
this.eventFeed = () => tslib_1.__awaiter(this, void 0, void 0, function* () {
const endpoint = new Endpoints(games.NULL, "", platforms.NULL, modes.NULL, "");
return yield sendRequest(endpoint.eventFeed());
});
this.loggedInIdentities = () => tslib_1.__awaiter(this, void 0, void 0, function* () {
const endpoint = new Endpoints(games.NULL, "", platforms.NULL, modes.NULL, "");
return yield sendRequest(endpoint.loggedInIdentities());
});
this.codPoints = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.NULL, gamertag, platform, modes.NULL, lookupType);
return yield sendRequest(endpoint.codPoints());
}); };
this.connectedAccounts = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.NULL, gamertag, platform, modes.NULL, lookupType);
return yield sendRequest(endpoint.connectedAccounts());
}); };
this.settings = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.NULL, gamertag, platform, modes.NULL, lookupType);
return yield sendRequest(endpoint.settings());
}); };
this.friendAction = (gamertag, platform, action) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform));
const endpoint = new Endpoints(games.NULL, gamertag, platform, modes.NULL, lookupType);
return yield sendPostRequest(endpoint.friendAction(action), "{}");
}); };
}
}
class ALT {
constructor() {
this.search = (gamertag, platform) => { var gamertag, platform, lookupType; return tslib_1.__awaiter(this, void 0, void 0, function* () {
({
gamertag,
_platform: platform,
lookupType
} = mapGamertagToPlatform(gamertag, platform, true));
const endpoint = new Endpoints(games.NULL, gamertag, platform, modes.NULL, lookupType);
return yield sendRequest(endpoint.search());
}); };
this.cleanWeapon = (weapon) => tslib_1.__awaiter(this, void 0, void 0, function* () {
//@ts-ignore
const foundWeapon = weapon_ids_json_1.default["All Weapons"][weapon];
if (!foundWeapon)
return weapon;
return foundWeapon;
});
}
}
const Warzone = new WZ();
exports.Warzone = Warzone;
const ModernWarfare = new MW();
exports.ModernWarfare = ModernWarfare;
const ModernWarfare2 = new MW2();
exports.ModernWarfare2 = ModernWarfare2;
const Warzone2 = new WZ2();
exports.Warzone2 = Warzone2;
const ModernWarfare3 = new MW3();
exports.ModernWarfare3 = ModernWarfare3;
const WarzoneMobile = new WZM();
exports.WarzoneMobile = WarzoneMobile;
const ColdWar = new CW();
exports.ColdWar = ColdWar;
const Vanguard = new VG();
exports.Vanguard = Vanguard;
const Store = new SHOP();
exports.Store = Store;
const Me = new USER();
exports.Me = Me;
const Misc = new ALT();
exports.Misc = Misc;
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

View File

@ -1,153 +0,0 @@
{
"br_25": "BR Trios",
"br_dmz_38": "Plunder Quads",
"br_71": "BR Solos",
"br_74": "BR Trios",
"br_dmz_76": "Plunder Quads",
"br_77": "BR Scopes & Scatterguns",
"br_dmz_85": "Plunder Duos",
"br_86": "Realism Battle Royale",
"br_87": "BR Solos",
"br_88": "BR Duos",
"br_89": "BR Quads",
"br_dmz_104": "Blood Money",
"brtdm_113": "Warzone Rumble",
"brtdm_rmbl": "Warzone Rumble",
"br_brsolo": "BR Solos",
"br_brduos": "BR Duos",
"br_brtrios": "BR Trios",
"br_brquads": "BR Quads",
"br_br_real": "Realism Battle Royale",
"br_dmz_plnbld": "Plunder Blood Money",
"br_brthquad": "BR 200 Quads",
"br_brduostim_name2": "BR Stimulus Duos",
"br_brtriostim_name2": "BR Stimulus Trios",
"br_dmz_pluntrios": "Plunder Trios",
"br_dmz_plunquad": "Plunder Quads",
"br_jugg_brtriojugr": "Juggernaut Royal Trios",
"br_jugg_brquadjugr": "Juggernaut Royal Quads",
"br_mini_miniroyale": "Mini Royale",
"br_brbbsolo": "BR Buyback Solos",
"br_brbbduo": "BR Buyback Duos",
"br_brbbtrio": "BR Buyback Trios",
"br_brbbquad": "BR Buyback Quads",
"br_miniroyale": "Mini Royale",
"br_kingslayer_kingsltrios": "King Slayer Trios",
"br_truckwar_trwarsquads": "Armored Royale",
"br_dmz_plndtrios": "Plunder Trios",
"br_dmz_plndquad": "Plunder Quads",
"br_zxp_zmbroy": "Zombie Royale",
"br_jugg_jugpmpkn": "Juggourdnaut Royale",
"br_brsolohwn": "BR Solo Survivor",
"br_brduohwn": "BR Duo Die",
"br_brhwntrios": "BR Trick-Or-Trios",
"br_brhwnquad": "BR Monster Quads",
"br_dmz_plndcndy": "Plunder: Candy Collector",
"br_dmz_bldmnytrio": "Blood Money Trios",
"br_dmz_bldmnyquad": "Blood Money Quads",
"br_rebirth_rbrthduos": "Rebirth Duos",
"br_rebirth_rbrthtrios": "Rebirth Trios",
"br_rebirth_rbrthquad": "Rebirth Quads",
"br_mini_rebirth_mini_royale_solo": "Rebirth Mini Royale Solos",
"br_mini_rebirth_mini_royale_duos": "Rebirth Mini Royale Duos",
"br_mini_rebirth_mini_royale_trios": "Rebirth Mini Royale Trios",
"br_mini_rebirth_mini_royale_quads": "Rebirth Mini Royale Quads",
"brtdm_wzrumval2": "Warzone Rumble in the Sheets",
"br_dmz_plndval1": "Love And Plunder",
"br_rebirth_rbrthex": "Resurgence Extreme",
"br_exfiltrios": "Exfiltration Trios",
"br_rbrthduos": "Rebirth Resurgence Duos",
"br_rbrthquad": "Rebirth Resurgence Quads",
"br_brz_brduos": "BR Duos (Containment Protocol Event)",
"br_brz_brtrios": "BR Trios (Containment Protocol Event)",
"br_brz_brquads": "BR Quads (Containment Protocol Event)",
"br_kingslayer_rebirth_king_slayer": "Rebirth King Slayer",
"br_reveal_dov": "Destruction Of Verdansk Part 1",
"br_reveal_2_dov2": "Destruction Of Verdansk Part 2",
"br_brdov_dov2": "Destruction Of Verdansk Part 2 Verdansk '84",
"br_rebirth_resurgence_trios": "Verdansk Resurgence Trios",
"br_bodycount_pwergrb": "Power Grab",
"br_rebirth_resurgence_mini": "Verdansk Resurgence Mini",
"br_plnbld": "Blood Money",
"br_plndtrios": "Plunder Trios",
"br_payload_payload": "Payload",
"br_x2_br_reveal_x2_event/event_title_x2": "Battle of Verdansk",
"br_rumble_lua_menu_mp/clash": "Rebirth Payload",
"br_rumble_clash": "Clash",
"br_dbd_dbd": "Iron Trials '84",
"br_rebirth_rebirth_rex": "Rebirth Extreme",
"br_rebirth_shsnp_name3": "Rebirth Scopes & Scatterguns",
"br_payload_pay_prom": "Payload - Promenade",
"br_gxp_gov": "Ghosts Of Verdansk",
"br_dbd_iron_trials_solos": "Iron Trials Solos",
"br_dbd_iron_trials_duos": "Iron Trials Duos",
"br_dbd_iron_trials_trios": "Iron Trials Trios",
"br_dbd_iron_trials_quads": "Iron Trials Quads",
"br_buy_back_solo": "BR Buy Back Solos",
"br_buy_back_duos": "BR Buy Back Duos",
"br_buy_back_trios": "BR Buy Back Trios",
"br_buy_back_quads": "BR Buy Back Quads",
"br_rebirth_rust_1v1": "Rebirth Resurgence Duos",
"br_vov_op_flash": "Operation: Flashback",
"br_lep_br_lep_event/ltm_gamemode": "Final Hours of Verdansk",
"br_vg_royale_solo": "Vanguard Royale Solos",
"br_vg_royale_duos": "Vanguard Royale Duos",
"br_vg_royale_trios": "Vanguard Royale Trios",
"br_vg_royale_quads": "Vanguard Royale Quads",
"br_br_solo": "BR Solos",
"br_br_duos": "BR Duos",
"br_br_trios": "BR Trios",
"br_br_quads": "BR Quads",
"br_rebirth_vg_res_44": "Vanguard Resurgence",
"br_rebirth_cal_res_royale": "Caldera Resurgence Quads",
"br_dmz_plnduo": "Plunder Duos",
"br_dmz_vg_pln_trios": "Vanguard Plunder Trios",
"br_dmz_vg_pln_quads": "Vanguard Plunder Quads",
"br_rumble_clash_caldera": "Caldera Clash",
"br_dbd_playlist_wz320/rbrthdbd_solos": "Iron Trials Rebirth Solos",
"br_dbd_playlist_wz320/rbrthdbd_duos": "Iron Trials Rebirth Duos",
"br_dbd_playlist_wz320/rbrthdbd_trios": "Iron Trials Rebirth Trios",
"br_dbd_playlist_wz320/rbrthdbd_quads": "Iron Trials Rebirth Quads",
"br_rebirth_calderaresurgence": "Caldera Resurgence",
"br_rebirth_reverse_playlist_wz325/rbrthsolos": "Rebirth Resurgence Solos",
"br_payload_playlist_wz325/rbrthpayload": "Rebirth Payload",
"br_rebirth_cdl_resurgence_rebirth_quads": "CDL Resurgence Rebirth Quads",
"br_dmz_playlist_wz325/rbrthbmo_quads": "Rebirth Blood Money Quads",
"br_playlist_wz325/br_aprl_fool_name4": "Totally Normal BR",
"br_rebirth_playlist_wz325/afd_resurgence": "Totally Normal Rebirth",
"br_dbd_playlist_wz330/cal_iron_solos": "Iron Trials Solos",
"br_dbd_playlist_wz330/cal_iron_duos": "Iron Trials Duos",
"br_dbd_playlist_wz330/cal_iron_trios": "Iron Trials Trios",
"br_dbd_playlist_wz330/cal_iron_quads": "Iron Trials Quads",
"br_mendota_playlist_wz330/op_mon": "Operation Monarch",
"br_respect_playlist_wz335/respect": "Champions of Caldera",
"br_playlist_wz335/rebirthexfilttrios": "Rebirth Exfiltration Trios",
"br_rebirth_cal_res_trios": "Caldera Resurgence Trios",
"br_rebirth_cal_res_quads": "Caldera Resurgence Quads",
"br_rebirth_reverse_playlist_wz340/fortkeep_res_solo": "Fortunes Keep Resurgence Solos",
"br_rebirth_playlist_wz340/fortkeep_res_duos": "Fortunes Keep Resurgence Duos",
"br_rebirth_playlist_wz340/fortkeep_res_trios": "Fortunes Keep Resurgence Trios",
"br_rebirth_playlist_wz340/fortkeep_res_quad": "Fortunes Keep Resurgence Quads",
"br_gold_war_playlist_wz340/gld_pldr": "Golden Plunder",
"br_tdbd_playlist_wz345/cal_titanium_solo": "Titanium Trials Solos",
"br_tdbd_playlist_wz345/cal_titanium_duos": "Titanium Trials Duos",
"br_tdbd_playlist_wz345/cal_titanium_trios": "Titanium Trials Trios",
"br_tdbd_playlist_wz345/cal_titanium_quads": "Titanium Trials Quads",
"br_rebirth_playlist_wz340/fortkeep_extreme": "Fortunes Keep Extreme",
"br_rumble_playlist_wz340/storage_town_clash_title": "Storage Town Clash",
"br_zxp_playlist_wz345/rxp": "Rebirth Of The Dead",
"br_respect_playlist_wz345/respect_solo": "Champion of Caldera Solos",
"br_respect_playlist_wz345/respect_trios": "Champion Of Caldera Trios",
"br_wsow_br_trios": "World Series Of Warzone Battle Royale Trios",
"br_olaride_playlist_wz350/olaride": "Operation: Last Call",
"br_mmp_playlist_wz350/mmp": "Sticks & Stones",
"br_rebirth_cdlr:_fortune's_keep_trios": "CDLR: Fortune's Keep Trios",
"br_mini_minibrsolo": "Mini Royale Solos",
"br_mini_minibrduos": "Mini Royale Duos",
"br_mini_minibrtrios": "Mini Royale Trios",
"br_mini_minibrquads": "Mini Royale Quads",
"br_rebirth_dbd_reverse_playlist_wz355/res_trials_solos": "Rebirth Supreme Solos",
"br_rebirth_dbd_playlist_wz355/res_trials_duos": "Rebirth Supreme Duos",
"br_rebirth_dbd_playlist_wz355/res_trials_trios": "Rebirth Supreme Trios",
"br_rebirth_dbd_playlist_wz355/res_trials_quads": "Rebirth Supreme Quads"
}

View File

@ -1,236 +0,0 @@
{
"_MODERN WARFARE WEAPONS_": "iw8_",
"iw8_pi_golf21": "Pistol_X16",
"iw8_pi_papa320": "Pistol_M19",
"iw8_pi_decho": "Pistol_.50 GS",
"iw8_pi_mike1911": "Pistol_1911",
"iw8_pi_cpapa": "Pistol_.357 Magnum",
"iw8_pi_mike9": "Pistol_Renetti",
"iw8_pi_mike": "Pistol_Sykov",
"iw8_sm_mpapa5": "SMG_MP5",
"iw8_sm_beta": "SMG_PP19 Bizon",
"iw8_sm_augolf": "SMG_AUG",
"iw8_sm_papa90": "SMG_P90",
"iw8_sm_mpapa7": "SMG_MP7",
"iw8_sm_uzulu": "SMG_UZI",
"iw8_sm_charlie9": "SMG_CX-9",
"iw8_sm_smgolf45": "SMG_Striker 45",
"iw8_sm_victor": "SMG_Fennec",
"iw8_sm_secho": "SMG_ISO",
"iw8_me_riotshield": "Special_Riot Shield",
"iw8_knife": "Special_Knife",
"iw8_me_akimboblades": "Special_Dual Kodachis",
"iw8_me_akimboblunt": "Special_Kali Sticks",
"iw8_fists": "Special_Fists",
"iw8_knifestab": "Special_Combat Knife",
"iw8_fists_mp_zmb": "Special_Fists",
"iw8_ar_mike4": "AR_M4A1",
"iw8_ar_akilo47": "AR_AK-47",
"iw8_ar_asierra12": "AR_Oden",
"iw8_ar_falpha": "AR_FR 5.56",
"iw8_ar_mcharlie": "AR_M13",
"iw8_ar_kilo433": "AR_Kilo 141",
"iw8_ar_falima": "AR_FAL",
"iw8_ar_scharlie": "AR_FN Scar 17",
"iw8_ar_tango21": "AR_RAM-7",
"iw8_ar_sierra552": "AR_Grau 5.56",
"iw8_ar_galima": "AR_CR-56 AMAX",
"iw8_ar_anovember94": "AR_AN-94",
"iw8_ar_valpha": "AR_AS VAL",
"iw8_la_rpapa7": "Launcher_RPG-7",
"iw8_la_gromeo": "Launcher_PILA",
"iw8_la_juliet": "Launcher_JOKR",
"iw8_la_kgolf": "Launcher_Strela-P",
"iw8_la_mike32": "Launcher_MGL-32 Grenade Launcher",
"iw8_sn_mike14": "Marksman_EBR-14",
"iw8_sn_kilo98": "Marksman_Kar98k",
"iw8_sn_sbeta": "Marksman_MK2 Carbine",
"iw8_sn_golf28": "Marksman_SP-R 208",
"iw8_sn_crossbow": "Marksman_Crossbow",
"iw8_sn_sksierra": "Marksman_SKS",
"iw8_sn_romeo700": "Sniper_SP-R 208",
"iw8_sn_alpha50": "Sniper_AX-50",
"iw8_sn_delta": "Sniper_Dragunov",
"iw8_sn_hdromeo": "Sniper_HDR",
"iw8_sn_xmike109": "Sniper_Rytec AMR",
"iw8_sh_dpapa12": "Shotgun_R9-0",
"iw8_sh_oscar12": "Shotgun_Origin 12",
"iw8_sh_charlie725": "Shotgun_725",
"iw8_sh_romeo870": "Shotgun_Model 680",
"iw8_sh_mike26": "Shotgun_VLK Rogue",
"iw8_sh_aalpha12": "Shotgun_JAK-12",
"iw8_lm_kilo121": "LMG_M91",
"iw8_lm_pkilo": "LMG_PKM",
"iw8_lm_lima86": "LMG_SA87",
"iw8_lm_mgolf34": "LMG_MG34",
"iw8_lm_mgolf36": "LMG_Holger-26",
"iw8_lm_mkilo3": "LMG_Bruen Mk9",
"iw8_lm_sierrax": "LMG_FiNN LMG",
"iw8_lm_dblmg": "LMG_Minigun",
"iw8_lm_slima": "LMG_RAAL MG",
"_COLD WAR WEAPONS_": "iw8_t9",
"iw8_sn_t9quickscope": "Sniper_Pelington 703",
"iw8_sn_t9standard": "Sniper_LW3 - Tundra",
"iw8_sn_t9powersemi": "Sniper_M82",
"iw8_sn_t9damagesemi": "Sniper_Type 63",
"iw8_sn_t9precisionsemi": "Sniper_DMR 14",
"iw8_sn_t9cannon": "Sniper_ZRG 20mm",
"iw8_sn_t9crossbow": "Sniper_Crossbow",
"iw8_sn_t9accurate": "Sniper_Swiss K31",
"iw8_sn_t9explosivebow": "Sniper_Explosive Tip Crossbow",
"iw8_pi_t9burst": "Pistol_Diamatti",
"iw8_pi_t9revolver": "Pistol_Magnum",
"iw8_pi_t9semiauto": "Pistol_1911",
"iw8_pi_t9fullauto": "Pistol_AMP63",
"iw8_sm_t9standard": "SMG_MP5",
"iw8_sm_t9handling": "SMG_Milano 821",
"iw8_sm_t9heavy": "SMG_AK-74u",
"iw8_sm_t9fastfire": "SMG_MAC-10",
"iw8_sm_t9burst": "SMG_KSP 45",
"iw8_sm_t9capacity": "SMG_Bullfrog",
"iw8_sm_t9powerburst": "SMG_AUG",
"iw8_sm_t9accurate": "SMG_LC10",
"iw8_sm_t9spray": "SMG_PPsh-41",
"iw8_sm_t9nailgun": "SMG_Nailgun",
"iw8_ar_t9standard": "AR_XM4",
"iw8_ar_t9damage": "AR_AK-47",
"iw8_ar_t9accurate": "AR_Krig 6",
"iw8_ar_t9mobility": "AR_QBZ-83",
"iw8_ar_t9longburst": "AR_M16",
"iw8_ar_t9fasthandling": "AR_Groza",
"iw8_ar_t9fastfire": "AR_FFAR",
"iw8_ar_t9slowhandling": "AR_Fara 83",
"iw8_ar_t9slowfire": "AR_C58",
"iw8_ar_t9soviet": "AR_Vargo-S",
"iw8_sh_t9pump": "Shotgun_Hauer 77",
"iw8_sh_t9semiauto": "Shotgun_Gallo SA12",
"iw8_sh_t9fullauto": "Shotgun_Streetsweeper",
"iw8_lm_t9accurate": "LMG_Stoner 63",
"iw8_lm_t9slowfire": "LMG_M60",
"iw8_lm_t9light": "LMG_RPD",
"iw8_lm_t9fastfire": "LMG_Ameli",
"iw8_la_t9standard": "Launcher_Cigma 2",
"iw8_la_t9freefire": "Launcher_RPG-7",
"iw8_la_t9launcher": "Launcher_M79",
"iw8_me_t9sledgehammer": "Special_Sledgehammer",
"iw8_me_t9wakizashi": "Special_Wakizashi",
"iw8_me_t9loadout": "Special_Knife",
"iw8_me_t9machete": "Special_Machete",
"iw8_me_t9etool": "Special_Shovel",
"iw8_me_t9ballisticknife": "Special_Ballistic Knife",
"iw8_me_t9bat": "Special_Baseball Bat",
"_VANGUARD WEAPONS_": "s4_",
"s4_mr_moscar": "Sniper_3-Line Rifle",
"s4_mr_kalpha98": "Sniper_Kar98k",
"s4_mr_aromeo99": "Sniper_Type 99",
"s4_mr_ptango41": "Sniper_Gorenko Anti-Tank Rifle",
"s4_pi_malpha96": "Pistol_Machine Pistol",
"s4_pi_ttango33": "Pistol_RATT",
"s4_pi_mike1911": "Pistol_1911",
"s4_pi_wecho": "Pistol_Top Break",
"s4_pi_luniform08": "Pistol_Klauser",
"s4_sm_thotel": "SMG_M1928",
"s4_sm_stango5": "SMG_Sten",
"s4_sm_mpapa40": "SMG_Mp-40",
"s4_sm_ppapa41": "SMG_PPsh-41",
"s4_sm_owhiskey": "SMG_Owen Gun",
"s4_sm_tyankee100": "SMG_Type 100",
"s4_sm_wecho43": "SMG_Welgun",
"s4_sm_fromeo57": "SMG_Marco 5",
"s4_sm_guniform45": "SMG_H4 Blixen",
"s4_sm_aromeo43": "SMG_Armaguerra 43",
"s4_sm_salpha26": "SMG_RA 225",
"s4_ar_stango44": "AR_STG-44",
"s4_ar_bromeopg": "AR_ITRA Burst",
"s4_ar_balpha": "AR_BAR",
"s4_ar_chotel41": "AR_NZ-41",
"s4_ar_voscar": "AR_Volkssturmgewehr",
"s4_ar_asierra44": "AR_AS-44",
"s4_ar_fecho": "AR_Automaton",
"s4_ar_hyankee44": "AR_Cooper Carbine",
"s4_ar_kgolf40": "AR_KG M40",
"s4_ar_promeo45": "AR_Nikita AVT",
"s4_ar_emike1": "AR_EX1",
"s4_sh_becho": "Shotgun_Einhorn Revolving",
"s4_sh_bromeo5": "Shotgun_Gracey Auto",
"s4_sh_mike97": "Shotgun_Combat Shotgun",
"s4_sh_lindia98": "Shotgun_Double Barrel",
"s4_mg_mgolf42": "LMG_MG42",
"s4_mg_dpapa27": "LMG_DP27",
"s4_mg_bromeo37": "LMG_Bren",
"s4_mg_tyankee11": "LMG_Type11",
"s4_mg_malpha7": "LMG_Whitley",
"s4_mg_aalpha52": "LMG_UGM-8",
"s4_la_m1bravo": "Launcher_M1 Bazooka",
"s4_la_palpha": "Launcher_Panzerschreck",
"s4_la_palpha42": "Launcher_Panzerfaust",
"s4_la_mkilo1": "Launcher_Mk11 Launcher",
"s4_me_rindigo": "Melee_Combat Shield",
"s4_me_knife": "Melee_FS Fighting Knife",
"s4_me_leiomano": "Melee_Sawtooth",
"s4_me_katana": "Melee_Katanah",
"s4_me_axe": "Melee_Skål Crusher",
"s4_me_sledgehammer": "Melee_Sledgehammer",
"s4_mr_m1golf": "Marksman_M1 Grand",
"s4_mr_svictor40": "Marksman_SVT-40",
"s4_mr_gecho43": "Marksman_G-43",
"s4_mr_kalpha98": "Marksman_M1916",
"_Modern Warfare 2_": "iw9_",
"iw9_ar_mike4_mp": "AR_M4",
"iw9_ar_golf3_mp": "AR_Lachman-545",
"iw9_ar_mike4": "AR_M4 Variant",
"iw9_ar_kilo53_mp": "AR_Lachman-556",
"iw9_ar_schotel_mp": "AR_TAQ-V",
"iw9_ar_akilo74": "AR_Kastov-74U",
"iw9_ar_augolf": "AR_STB 556",
"iw9_ar_akilo": "AR_Kastov 762",
"iw9_ar_mike16": "AR_M16",
"iw9_ar_scharlie": "AR_TAQ-56",
"iw9_ar_scharlie_mp": "AR_TAQ-56",
"iw9_ar_akilo105": "AR_Kastov 545",
"iw9_sm_mpapa7_mp": "SMG_Vel 46",
"iw9_sm_victor_mp": "SMG_Fennec 45",
"iw9_sm_aviktor_mp": "SMG_Vaznev-9K",
"iw9_sm_alpha57_mp": "SMG_FSS Hurricane",
"iw9_sm_papa90_mp": "SMG_SMG_PDSW 528",
"iw9_sm_beta_mp": "SMG_Minibak",
"iw9_sm_apapa_mp": "SMG_MX9",
"iw9_sm_mpapa5_mp": "SMG_Lachmann 764",
"iw9_sh_mike1014": "Shotgun_Expedite 12",
"iw9_sh_charlie725_mp": "Shotgun_Lockwood 300",
"iw9_sh_mbravo_mp": "Shotgun_Bryson 800",
"iw9_sh_mviktor_mp": "Shotgun_Bryson 890",
"iw9_sn_mromeo": "Sniper_MCPR-300",
"iw9_sn_limax": "Sniper_Signal 50",
"iw9_sn_xmike2010": "Sniper_SP-X 80",
"iw9_dm_sbeta_mp": "Sniper_Lockwood MK2",
"iw9_dm_mike24_mp": "Sniper_SP-R 208",
"iw9_pi_papa220_mp": "Pistol_P890",
"iw9_pi_golf18": "Pistol_X13",
"iw9_pi_decho_mp": "Pistol_.50 GS",
"iw9_pi_golf17_mp": "Pistol_X12",
"iw9_pi_swhiskey_mp": "Pistol_Basilisk",
"iw9_me_climbfists": "Melee_Fists",
"iw9_me_riotshield_mp": "Melee_Riot Shield",
"throwingknife_mp": "Melee_Throwing Knife",
"iw9_la_juliet_mp": "Launcher_JOKR",
"iw9_la_gromeo_mp": "Launcher_PILA",
"iw9_la_rpapa7_mp": "Launcher_RPG-7",
"iw9_la_kgolf_mp": "Launcher_Strella-P",
"iw9_lm_rkilo_mp": "LMG_RPK",
"iw9_lm_rkilo": "LMG_RPK",
"iw9_lm_ahotel_mp": "LMG_HCR 56",
"iw9_lm_slima_mp": "LMG_RAAL MG",
"iw9_lm_slima": "LMG_RAAL MG",
"iw9_lm_ngolf7_mp": "LMG_SAKIN MG38",
"iw9_lm_rkilo21_mp": "LMG_Rapp H",
"iw9_lm_foxtrot": "LMG_556 Icarus",
"iw9_br_msecho_mp": "BR_FTac Recon",
"iw9_br_msecho": "BR_FTac Recon",
"iw9_br_soscar14": "BR_SO-14",
"iw9_dm_pgolf1_mp": "DMR_LM-S",
"iw9_dm_la700": "DMR_LA-B 330",
"iw9_dm_sa700": "DMR_SA-B 50",
"iw9_dm_scromeo": "DMR_TAQ-M",
"iw9_dm_mike14": "DMR_ERB-14"
}

View File

@ -1,63 +0,0 @@
{
"name": "call-of-duty-api",
"version": "4.1.0",
"description": "NodeJS Wrapper for the Call Of Duty API.",
"main": "dist/index",
"types": "dist/index",
"files": [
"dist"
],
"scripts": {
"prebuild": "rimraf dist",
"build": "tsc",
"prepublishOnly": "npm run build"
},
"repository": {
"type": "git",
"url": "git+https://github.com/Lierrmm/Node-CallOfDuty.git"
},
"keywords": [
"br",
"warzone",
"call-of-duty",
"call of duty",
"modern warfare",
"black ops",
"iw",
"battle royale",
"Cold War",
"cw",
"3arc",
"vanguard",
"Treyarch",
"Infinity Ward",
"Sledgehammer",
"Raven",
"Modern Warfare 2",
"Modern Warfare 3",
"Warzone 2",
"Blue Moon",
"Beenox",
"Warzone Mobile",
"WZM"
],
"bugs": {
"url": "https://github.com/Lierrmm/Node-CallOfDuty/issues"
},
"homepage": "https://codapi.dev",
"funding": {
"type": "github",
"url": "https://github.com/sponsors/Lierrmm"
},
"author": "Liam Gaskell",
"license": "MIT",
"devDependencies": {
"@types/node": "^18.11.4",
"rimraf": "^3.0.2",
"typescript": "^4.8.4"
},
"dependencies": {
"tslib": "^2.4.0",
"undici": "^5.12.0"
}
}

415
node_modules/chalk/index.d.ts generated vendored
View File

@ -1,415 +0,0 @@
/**
Basic foreground colors.
[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support)
*/
declare type ForegroundColor =
| 'black'
| 'red'
| 'green'
| 'yellow'
| 'blue'
| 'magenta'
| 'cyan'
| 'white'
| 'gray'
| 'grey'
| 'blackBright'
| 'redBright'
| 'greenBright'
| 'yellowBright'
| 'blueBright'
| 'magentaBright'
| 'cyanBright'
| 'whiteBright';
/**
Basic background colors.
[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support)
*/
declare type BackgroundColor =
| 'bgBlack'
| 'bgRed'
| 'bgGreen'
| 'bgYellow'
| 'bgBlue'
| 'bgMagenta'
| 'bgCyan'
| 'bgWhite'
| 'bgGray'
| 'bgGrey'
| 'bgBlackBright'
| 'bgRedBright'
| 'bgGreenBright'
| 'bgYellowBright'
| 'bgBlueBright'
| 'bgMagentaBright'
| 'bgCyanBright'
| 'bgWhiteBright';
/**
Basic colors.
[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support)
*/
declare type Color = ForegroundColor | BackgroundColor;
declare type Modifiers =
| 'reset'
| 'bold'
| 'dim'
| 'italic'
| 'underline'
| 'inverse'
| 'hidden'
| 'strikethrough'
| 'visible';
declare namespace chalk {
/**
Levels:
- `0` - All colors disabled.
- `1` - Basic 16 colors support.
- `2` - ANSI 256 colors support.
- `3` - Truecolor 16 million colors support.
*/
type Level = 0 | 1 | 2 | 3;
interface Options {
/**
Specify the color support for Chalk.
By default, color support is automatically detected based on the environment.
Levels:
- `0` - All colors disabled.
- `1` - Basic 16 colors support.
- `2` - ANSI 256 colors support.
- `3` - Truecolor 16 million colors support.
*/
level?: Level;
}
/**
Return a new Chalk instance.
*/
type Instance = new (options?: Options) => Chalk;
/**
Detect whether the terminal supports color.
*/
interface ColorSupport {
/**
The color level used by Chalk.
*/
level: Level;
/**
Return whether Chalk supports basic 16 colors.
*/
hasBasic: boolean;
/**
Return whether Chalk supports ANSI 256 colors.
*/
has256: boolean;
/**
Return whether Chalk supports Truecolor 16 million colors.
*/
has16m: boolean;
}
interface ChalkFunction {
/**
Use a template string.
@remarks Template literals are unsupported for nested calls (see [issue #341](https://github.com/chalk/chalk/issues/341))
@example
```
import chalk = require('chalk');
log(chalk`
CPU: {red ${cpu.totalPercent}%}
RAM: {green ${ram.used / ram.total * 100}%}
DISK: {rgb(255,131,0) ${disk.used / disk.total * 100}%}
`);
```
@example
```
import chalk = require('chalk');
log(chalk.red.bgBlack`2 + 3 = {bold ${2 + 3}}`)
```
*/
(text: TemplateStringsArray, ...placeholders: unknown[]): string;
(...text: unknown[]): string;
}
interface Chalk extends ChalkFunction {
/**
Return a new Chalk instance.
*/
Instance: Instance;
/**
The color support for Chalk.
By default, color support is automatically detected based on the environment.
Levels:
- `0` - All colors disabled.
- `1` - Basic 16 colors support.
- `2` - ANSI 256 colors support.
- `3` - Truecolor 16 million colors support.
*/
level: Level;
/**
Use HEX value to set text color.
@param color - Hexadecimal value representing the desired color.
@example
```
import chalk = require('chalk');
chalk.hex('#DEADED');
```
*/
hex(color: string): Chalk;
/**
Use keyword color value to set text color.
@param color - Keyword value representing the desired color.
@example
```
import chalk = require('chalk');
chalk.keyword('orange');
```
*/
keyword(color: string): Chalk;
/**
Use RGB values to set text color.
*/
rgb(red: number, green: number, blue: number): Chalk;
/**
Use HSL values to set text color.
*/
hsl(hue: number, saturation: number, lightness: number): Chalk;
/**
Use HSV values to set text color.
*/
hsv(hue: number, saturation: number, value: number): Chalk;
/**
Use HWB values to set text color.
*/
hwb(hue: number, whiteness: number, blackness: number): Chalk;
/**
Use a [Select/Set Graphic Rendition](https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters) (SGR) [color code number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit) to set text color.
30 <= code && code < 38 || 90 <= code && code < 98
For example, 31 for red, 91 for redBright.
*/
ansi(code: number): Chalk;
/**
Use a [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set text color.
*/
ansi256(index: number): Chalk;
/**
Use HEX value to set background color.
@param color - Hexadecimal value representing the desired color.
@example
```
import chalk = require('chalk');
chalk.bgHex('#DEADED');
```
*/
bgHex(color: string): Chalk;
/**
Use keyword color value to set background color.
@param color - Keyword value representing the desired color.
@example
```
import chalk = require('chalk');
chalk.bgKeyword('orange');
```
*/
bgKeyword(color: string): Chalk;
/**
Use RGB values to set background color.
*/
bgRgb(red: number, green: number, blue: number): Chalk;
/**
Use HSL values to set background color.
*/
bgHsl(hue: number, saturation: number, lightness: number): Chalk;
/**
Use HSV values to set background color.
*/
bgHsv(hue: number, saturation: number, value: number): Chalk;
/**
Use HWB values to set background color.
*/
bgHwb(hue: number, whiteness: number, blackness: number): Chalk;
/**
Use a [Select/Set Graphic Rendition](https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters) (SGR) [color code number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit) to set background color.
30 <= code && code < 38 || 90 <= code && code < 98
For example, 31 for red, 91 for redBright.
Use the foreground code, not the background code (for example, not 41, nor 101).
*/
bgAnsi(code: number): Chalk;
/**
Use a [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set background color.
*/
bgAnsi256(index: number): Chalk;
/**
Modifier: Resets the current color chain.
*/
readonly reset: Chalk;
/**
Modifier: Make text bold.
*/
readonly bold: Chalk;
/**
Modifier: Emitting only a small amount of light.
*/
readonly dim: Chalk;
/**
Modifier: Make text italic. (Not widely supported)
*/
readonly italic: Chalk;
/**
Modifier: Make text underline. (Not widely supported)
*/
readonly underline: Chalk;
/**
Modifier: Inverse background and foreground colors.
*/
readonly inverse: Chalk;
/**
Modifier: Prints the text, but makes it invisible.
*/
readonly hidden: Chalk;
/**
Modifier: Puts a horizontal line through the center of the text. (Not widely supported)
*/
readonly strikethrough: Chalk;
/**
Modifier: Prints the text only when Chalk has a color support level > 0.
Can be useful for things that are purely cosmetic.
*/
readonly visible: Chalk;
readonly black: Chalk;
readonly red: Chalk;
readonly green: Chalk;
readonly yellow: Chalk;
readonly blue: Chalk;
readonly magenta: Chalk;
readonly cyan: Chalk;
readonly white: Chalk;
/*
Alias for `blackBright`.
*/
readonly gray: Chalk;
/*
Alias for `blackBright`.
*/
readonly grey: Chalk;
readonly blackBright: Chalk;
readonly redBright: Chalk;
readonly greenBright: Chalk;
readonly yellowBright: Chalk;
readonly blueBright: Chalk;
readonly magentaBright: Chalk;
readonly cyanBright: Chalk;
readonly whiteBright: Chalk;
readonly bgBlack: Chalk;
readonly bgRed: Chalk;
readonly bgGreen: Chalk;
readonly bgYellow: Chalk;
readonly bgBlue: Chalk;
readonly bgMagenta: Chalk;
readonly bgCyan: Chalk;
readonly bgWhite: Chalk;
/*
Alias for `bgBlackBright`.
*/
readonly bgGray: Chalk;
/*
Alias for `bgBlackBright`.
*/
readonly bgGrey: Chalk;
readonly bgBlackBright: Chalk;
readonly bgRedBright: Chalk;
readonly bgGreenBright: Chalk;
readonly bgYellowBright: Chalk;
readonly bgBlueBright: Chalk;
readonly bgMagentaBright: Chalk;
readonly bgCyanBright: Chalk;
readonly bgWhiteBright: Chalk;
}
}
/**
Main Chalk object that allows to chain styles together.
Call the last one as a method with a string argument.
Order doesn't matter, and later styles take precedent in case of a conflict.
This simply means that `chalk.red.yellow.green` is equivalent to `chalk.green`.
*/
declare const chalk: chalk.Chalk & chalk.ChalkFunction & {
supportsColor: chalk.ColorSupport | false;
Level: chalk.Level;
Color: Color;
ForegroundColor: ForegroundColor;
BackgroundColor: BackgroundColor;
Modifiers: Modifiers;
stderr: chalk.Chalk & {supportsColor: chalk.ColorSupport | false};
};
export = chalk;

View File

@ -1,345 +0,0 @@
declare type CSSColor =
| 'aliceblue'
| 'antiquewhite'
| 'aqua'
| 'aquamarine'
| 'azure'
| 'beige'
| 'bisque'
| 'black'
| 'blanchedalmond'
| 'blue'
| 'blueviolet'
| 'brown'
| 'burlywood'
| 'cadetblue'
| 'chartreuse'
| 'chocolate'
| 'coral'
| 'cornflowerblue'
| 'cornsilk'
| 'crimson'
| 'cyan'
| 'darkblue'
| 'darkcyan'
| 'darkgoldenrod'
| 'darkgray'
| 'darkgreen'
| 'darkgrey'
| 'darkkhaki'
| 'darkmagenta'
| 'darkolivegreen'
| 'darkorange'
| 'darkorchid'
| 'darkred'
| 'darksalmon'
| 'darkseagreen'
| 'darkslateblue'
| 'darkslategray'
| 'darkslategrey'
| 'darkturquoise'
| 'darkviolet'
| 'deeppink'
| 'deepskyblue'
| 'dimgray'
| 'dimgrey'
| 'dodgerblue'
| 'firebrick'
| 'floralwhite'
| 'forestgreen'
| 'fuchsia'
| 'gainsboro'
| 'ghostwhite'
| 'gold'
| 'goldenrod'
| 'gray'
| 'green'
| 'greenyellow'
| 'grey'
| 'honeydew'
| 'hotpink'
| 'indianred'
| 'indigo'
| 'ivory'
| 'khaki'
| 'lavender'
| 'lavenderblush'
| 'lawngreen'
| 'lemonchiffon'
| 'lightblue'
| 'lightcoral'
| 'lightcyan'
| 'lightgoldenrodyellow'
| 'lightgray'
| 'lightgreen'
| 'lightgrey'
| 'lightpink'
| 'lightsalmon'
| 'lightseagreen'
| 'lightskyblue'
| 'lightslategray'
| 'lightslategrey'
| 'lightsteelblue'
| 'lightyellow'
| 'lime'
| 'limegreen'
| 'linen'
| 'magenta'
| 'maroon'
| 'mediumaquamarine'
| 'mediumblue'
| 'mediumorchid'
| 'mediumpurple'
| 'mediumseagreen'
| 'mediumslateblue'
| 'mediumspringgreen'
| 'mediumturquoise'
| 'mediumvioletred'
| 'midnightblue'
| 'mintcream'
| 'mistyrose'
| 'moccasin'
| 'navajowhite'
| 'navy'
| 'oldlace'
| 'olive'
| 'olivedrab'
| 'orange'
| 'orangered'
| 'orchid'
| 'palegoldenrod'
| 'palegreen'
| 'paleturquoise'
| 'palevioletred'
| 'papayawhip'
| 'peachpuff'
| 'peru'
| 'pink'
| 'plum'
| 'powderblue'
| 'purple'
| 'rebeccapurple'
| 'red'
| 'rosybrown'
| 'royalblue'
| 'saddlebrown'
| 'salmon'
| 'sandybrown'
| 'seagreen'
| 'seashell'
| 'sienna'
| 'silver'
| 'skyblue'
| 'slateblue'
| 'slategray'
| 'slategrey'
| 'snow'
| 'springgreen'
| 'steelblue'
| 'tan'
| 'teal'
| 'thistle'
| 'tomato'
| 'turquoise'
| 'violet'
| 'wheat'
| 'white'
| 'whitesmoke'
| 'yellow'
| 'yellowgreen';
declare namespace ansiStyles {
interface ColorConvert {
/**
The RGB color space.
@param red - (`0`-`255`)
@param green - (`0`-`255`)
@param blue - (`0`-`255`)
*/
rgb(red: number, green: number, blue: number): string;
/**
The RGB HEX color space.
@param hex - A hexadecimal string containing RGB data.
*/
hex(hex: string): string;
/**
@param keyword - A CSS color name.
*/
keyword(keyword: CSSColor): string;
/**
The HSL color space.
@param hue - (`0`-`360`)
@param saturation - (`0`-`100`)
@param lightness - (`0`-`100`)
*/
hsl(hue: number, saturation: number, lightness: number): string;
/**
The HSV color space.
@param hue - (`0`-`360`)
@param saturation - (`0`-`100`)
@param value - (`0`-`100`)
*/
hsv(hue: number, saturation: number, value: number): string;
/**
The HSV color space.
@param hue - (`0`-`360`)
@param whiteness - (`0`-`100`)
@param blackness - (`0`-`100`)
*/
hwb(hue: number, whiteness: number, blackness: number): string;
/**
Use a [4-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4-bit) to set text color.
*/
ansi(ansi: number): string;
/**
Use an [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set text color.
*/
ansi256(ansi: number): string;
}
interface CSPair {
/**
The ANSI terminal control sequence for starting this style.
*/
readonly open: string;
/**
The ANSI terminal control sequence for ending this style.
*/
readonly close: string;
}
interface ColorBase {
readonly ansi: ColorConvert;
readonly ansi256: ColorConvert;
readonly ansi16m: ColorConvert;
/**
The ANSI terminal control sequence for ending this color.
*/
readonly close: string;
}
interface Modifier {
/**
Resets the current color chain.
*/
readonly reset: CSPair;
/**
Make text bold.
*/
readonly bold: CSPair;
/**
Emitting only a small amount of light.
*/
readonly dim: CSPair;
/**
Make text italic. (Not widely supported)
*/
readonly italic: CSPair;
/**
Make text underline. (Not widely supported)
*/
readonly underline: CSPair;
/**
Inverse background and foreground colors.
*/
readonly inverse: CSPair;
/**
Prints the text, but makes it invisible.
*/
readonly hidden: CSPair;
/**
Puts a horizontal line through the center of the text. (Not widely supported)
*/
readonly strikethrough: CSPair;
}
interface ForegroundColor {
readonly black: CSPair;
readonly red: CSPair;
readonly green: CSPair;
readonly yellow: CSPair;
readonly blue: CSPair;
readonly cyan: CSPair;
readonly magenta: CSPair;
readonly white: CSPair;
/**
Alias for `blackBright`.
*/
readonly gray: CSPair;
/**
Alias for `blackBright`.
*/
readonly grey: CSPair;
readonly blackBright: CSPair;
readonly redBright: CSPair;
readonly greenBright: CSPair;
readonly yellowBright: CSPair;
readonly blueBright: CSPair;
readonly cyanBright: CSPair;
readonly magentaBright: CSPair;
readonly whiteBright: CSPair;
}
interface BackgroundColor {
readonly bgBlack: CSPair;
readonly bgRed: CSPair;
readonly bgGreen: CSPair;
readonly bgYellow: CSPair;
readonly bgBlue: CSPair;
readonly bgCyan: CSPair;
readonly bgMagenta: CSPair;
readonly bgWhite: CSPair;
/**
Alias for `bgBlackBright`.
*/
readonly bgGray: CSPair;
/**
Alias for `bgBlackBright`.
*/
readonly bgGrey: CSPair;
readonly bgBlackBright: CSPair;
readonly bgRedBright: CSPair;
readonly bgGreenBright: CSPair;
readonly bgYellowBright: CSPair;
readonly bgBlueBright: CSPair;
readonly bgCyanBright: CSPair;
readonly bgMagentaBright: CSPair;
readonly bgWhiteBright: CSPair;
}
}
declare const ansiStyles: {
readonly modifier: ansiStyles.Modifier;
readonly color: ansiStyles.ForegroundColor & ansiStyles.ColorBase;
readonly bgColor: ansiStyles.BackgroundColor & ansiStyles.ColorBase;
readonly codes: ReadonlyMap<number, number>;
} & ansiStyles.BackgroundColor & ansiStyles.ForegroundColor & ansiStyles.Modifier;
export = ansiStyles;

View File

@ -1,163 +0,0 @@
'use strict';
const wrapAnsi16 = (fn, offset) => (...args) => {
const code = fn(...args);
return `\u001B[${code + offset}m`;
};
const wrapAnsi256 = (fn, offset) => (...args) => {
const code = fn(...args);
return `\u001B[${38 + offset};5;${code}m`;
};
const wrapAnsi16m = (fn, offset) => (...args) => {
const rgb = fn(...args);
return `\u001B[${38 + offset};2;${rgb[0]};${rgb[1]};${rgb[2]}m`;
};
const ansi2ansi = n => n;
const rgb2rgb = (r, g, b) => [r, g, b];
const setLazyProperty = (object, property, get) => {
Object.defineProperty(object, property, {
get: () => {
const value = get();
Object.defineProperty(object, property, {
value,
enumerable: true,
configurable: true
});
return value;
},
enumerable: true,
configurable: true
});
};
/** @type {typeof import('color-convert')} */
let colorConvert;
const makeDynamicStyles = (wrap, targetSpace, identity, isBackground) => {
if (colorConvert === undefined) {
colorConvert = require('color-convert');
}
const offset = isBackground ? 10 : 0;
const styles = {};
for (const [sourceSpace, suite] of Object.entries(colorConvert)) {
const name = sourceSpace === 'ansi16' ? 'ansi' : sourceSpace;
if (sourceSpace === targetSpace) {
styles[name] = wrap(identity, offset);
} else if (typeof suite === 'object') {
styles[name] = wrap(suite[targetSpace], offset);
}
}
return styles;
};
function assembleStyles() {
const codes = new Map();
const styles = {
modifier: {
reset: [0, 0],
// 21 isn't widely supported and 22 does the same thing
bold: [1, 22],
dim: [2, 22],
italic: [3, 23],
underline: [4, 24],
inverse: [7, 27],
hidden: [8, 28],
strikethrough: [9, 29]
},
color: {
black: [30, 39],
red: [31, 39],
green: [32, 39],
yellow: [33, 39],
blue: [34, 39],
magenta: [35, 39],
cyan: [36, 39],
white: [37, 39],
// Bright color
blackBright: [90, 39],
redBright: [91, 39],
greenBright: [92, 39],
yellowBright: [93, 39],
blueBright: [94, 39],
magentaBright: [95, 39],
cyanBright: [96, 39],
whiteBright: [97, 39]
},
bgColor: {
bgBlack: [40, 49],
bgRed: [41, 49],
bgGreen: [42, 49],
bgYellow: [43, 49],
bgBlue: [44, 49],
bgMagenta: [45, 49],
bgCyan: [46, 49],
bgWhite: [47, 49],
// Bright color
bgBlackBright: [100, 49],
bgRedBright: [101, 49],
bgGreenBright: [102, 49],
bgYellowBright: [103, 49],
bgBlueBright: [104, 49],
bgMagentaBright: [105, 49],
bgCyanBright: [106, 49],
bgWhiteBright: [107, 49]
}
};
// Alias bright black as gray (and grey)
styles.color.gray = styles.color.blackBright;
styles.bgColor.bgGray = styles.bgColor.bgBlackBright;
styles.color.grey = styles.color.blackBright;
styles.bgColor.bgGrey = styles.bgColor.bgBlackBright;
for (const [groupName, group] of Object.entries(styles)) {
for (const [styleName, style] of Object.entries(group)) {
styles[styleName] = {
open: `\u001B[${style[0]}m`,
close: `\u001B[${style[1]}m`
};
group[styleName] = styles[styleName];
codes.set(style[0], style[1]);
}
Object.defineProperty(styles, groupName, {
value: group,
enumerable: false
});
}
Object.defineProperty(styles, 'codes', {
value: codes,
enumerable: false
});
styles.color.close = '\u001B[39m';
styles.bgColor.close = '\u001B[49m';
setLazyProperty(styles.color, 'ansi', () => makeDynamicStyles(wrapAnsi16, 'ansi16', ansi2ansi, false));
setLazyProperty(styles.color, 'ansi256', () => makeDynamicStyles(wrapAnsi256, 'ansi256', ansi2ansi, false));
setLazyProperty(styles.color, 'ansi16m', () => makeDynamicStyles(wrapAnsi16m, 'rgb', rgb2rgb, false));
setLazyProperty(styles.bgColor, 'ansi', () => makeDynamicStyles(wrapAnsi16, 'ansi16', ansi2ansi, true));
setLazyProperty(styles.bgColor, 'ansi256', () => makeDynamicStyles(wrapAnsi256, 'ansi256', ansi2ansi, true));
setLazyProperty(styles.bgColor, 'ansi16m', () => makeDynamicStyles(wrapAnsi16m, 'rgb', rgb2rgb, true));
return styles;
}
// Make the export immutable
Object.defineProperty(module, 'exports', {
enumerable: true,
get: assembleStyles
});

View File

@ -1,56 +0,0 @@
{
"name": "ansi-styles",
"version": "4.3.0",
"description": "ANSI escape codes for styling strings in the terminal",
"license": "MIT",
"repository": "chalk/ansi-styles",
"funding": "https://github.com/chalk/ansi-styles?sponsor=1",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "sindresorhus.com"
},
"engines": {
"node": ">=8"
},
"scripts": {
"test": "xo && ava && tsd",
"screenshot": "svg-term --command='node screenshot' --out=screenshot.svg --padding=3 --width=55 --height=3 --at=1000 --no-cursor"
},
"files": [
"index.js",
"index.d.ts"
],
"keywords": [
"ansi",
"styles",
"color",
"colour",
"colors",
"terminal",
"console",
"cli",
"string",
"tty",
"escape",
"formatting",
"rgb",
"256",
"shell",
"xterm",
"log",
"logging",
"command-line",
"text"
],
"dependencies": {
"color-convert": "^2.0.1"
},
"devDependencies": {
"@types/color-convert": "^1.9.0",
"ava": "^2.3.0",
"svg-term-cli": "^2.1.1",
"tsd": "^0.11.0",
"xo": "^0.25.3"
}
}

View File

@ -1,39 +0,0 @@
/**
Check if [`argv`](https://nodejs.org/docs/latest/api/process.html#process_process_argv) has a specific flag.
@param flag - CLI flag to look for. The `--` prefix is optional.
@param argv - CLI arguments. Default: `process.argv`.
@returns Whether the flag exists.
@example
```
// $ ts-node foo.ts -f --unicorn --foo=bar -- --rainbow
// foo.ts
import hasFlag = require('has-flag');
hasFlag('unicorn');
//=> true
hasFlag('--unicorn');
//=> true
hasFlag('f');
//=> true
hasFlag('-f');
//=> true
hasFlag('foo=bar');
//=> true
hasFlag('foo');
//=> false
hasFlag('rainbow');
//=> false
```
*/
declare function hasFlag(flag: string, argv?: string[]): boolean;
export = hasFlag;

View File

@ -1,8 +0,0 @@
'use strict';
module.exports = (flag, argv = process.argv) => {
const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--');
const position = argv.indexOf(prefix + flag);
const terminatorPosition = argv.indexOf('--');
return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition);
};

View File

@ -1,46 +0,0 @@
{
"name": "has-flag",
"version": "4.0.0",
"description": "Check if argv has a specific flag",
"license": "MIT",
"repository": "sindresorhus/has-flag",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "sindresorhus.com"
},
"engines": {
"node": ">=8"
},
"scripts": {
"test": "xo && ava && tsd"
},
"files": [
"index.js",
"index.d.ts"
],
"keywords": [
"has",
"check",
"detect",
"contains",
"find",
"flag",
"cli",
"command-line",
"argv",
"process",
"arg",
"args",
"argument",
"arguments",
"getopt",
"minimist",
"optimist"
],
"devDependencies": {
"ava": "^1.4.1",
"tsd": "^0.7.2",
"xo": "^0.24.0"
}
}

View File

@ -1,5 +0,0 @@
'use strict';
module.exports = {
stdout: false,
stderr: false
};

View File

@ -1,135 +0,0 @@
'use strict';
const os = require('os');
const tty = require('tty');
const hasFlag = require('has-flag');
const {env} = process;
let forceColor;
if (hasFlag('no-color') ||
hasFlag('no-colors') ||
hasFlag('color=false') ||
hasFlag('color=never')) {
forceColor = 0;
} else if (hasFlag('color') ||
hasFlag('colors') ||
hasFlag('color=true') ||
hasFlag('color=always')) {
forceColor = 1;
}
if ('FORCE_COLOR' in env) {
if (env.FORCE_COLOR === 'true') {
forceColor = 1;
} else if (env.FORCE_COLOR === 'false') {
forceColor = 0;
} else {
forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3);
}
}
function translateLevel(level) {
if (level === 0) {
return false;
}
return {
level,
hasBasic: true,
has256: level >= 2,
has16m: level >= 3
};
}
function supportsColor(haveStream, streamIsTTY) {
if (forceColor === 0) {
return 0;
}
if (hasFlag('color=16m') ||
hasFlag('color=full') ||
hasFlag('color=truecolor')) {
return 3;
}
if (hasFlag('color=256')) {
return 2;
}
if (haveStream && !streamIsTTY && forceColor === undefined) {
return 0;
}
const min = forceColor || 0;
if (env.TERM === 'dumb') {
return min;
}
if (process.platform === 'win32') {
// Windows 10 build 10586 is the first Windows release that supports 256 colors.
// Windows 10 build 14931 is the first release that supports 16m/TrueColor.
const osRelease = os.release().split('.');
if (
Number(osRelease[0]) >= 10 &&
Number(osRelease[2]) >= 10586
) {
return Number(osRelease[2]) >= 14931 ? 3 : 2;
}
return 1;
}
if ('CI' in env) {
if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') {
return 1;
}
return min;
}
if ('TEAMCITY_VERSION' in env) {
return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0;
}
if (env.COLORTERM === 'truecolor') {
return 3;
}
if ('TERM_PROGRAM' in env) {
const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10);
switch (env.TERM_PROGRAM) {
case 'iTerm.app':
return version >= 3 ? 3 : 2;
case 'Apple_Terminal':
return 2;
// No default
}
}
if (/-256(color)?$/i.test(env.TERM)) {
return 2;
}
if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) {
return 1;
}
if ('COLORTERM' in env) {
return 1;
}
return min;
}
function getSupportLevel(stream) {
const level = supportsColor(stream, stream && stream.isTTY);
return translateLevel(level);
}
module.exports = {
supportsColor: getSupportLevel,
stdout: translateLevel(supportsColor(true, tty.isatty(1))),
stderr: translateLevel(supportsColor(true, tty.isatty(2)))
};

View File

@ -1,53 +0,0 @@
{
"name": "supports-color",
"version": "7.2.0",
"description": "Detect whether a terminal supports color",
"license": "MIT",
"repository": "chalk/supports-color",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "sindresorhus.com"
},
"engines": {
"node": ">=8"
},
"scripts": {
"test": "xo && ava"
},
"files": [
"index.js",
"browser.js"
],
"keywords": [
"color",
"colour",
"colors",
"terminal",
"console",
"cli",
"ansi",
"styles",
"tty",
"rgb",
"256",
"shell",
"xterm",
"command-line",
"support",
"supports",
"capability",
"detect",
"truecolor",
"16m"
],
"dependencies": {
"has-flag": "^4.0.0"
},
"devDependencies": {
"ava": "^1.4.1",
"import-fresh": "^3.0.0",
"xo": "^0.24.0"
},
"browser": "browser.js"
}

Some files were not shown because too many files have changed in this diff Show More