format: prettify entire project

This commit is contained in:
Rim
2025-04-02 06:50:39 -04:00
parent 86f0782a98
commit 7ccc0be712
1711 changed files with 755867 additions and 235931 deletions

26
node_modules/body-parser/index.js generated vendored
View File

@ -4,7 +4,7 @@
* MIT Licensed
*/
'use strict'
'use strict';
/**
* @typedef Parsers
@ -20,7 +20,7 @@
* @type {Parsers}
*/
exports = module.exports = bodyParser
exports = module.exports = bodyParser;
/**
* JSON parser.
@ -30,8 +30,8 @@ exports = module.exports = bodyParser
Object.defineProperty(exports, 'json', {
configurable: true,
enumerable: true,
get: () => require('./lib/types/json')
})
get: () => require('./lib/types/json'),
});
/**
* Raw parser.
@ -41,8 +41,8 @@ Object.defineProperty(exports, 'json', {
Object.defineProperty(exports, 'raw', {
configurable: true,
enumerable: true,
get: () => require('./lib/types/raw')
})
get: () => require('./lib/types/raw'),
});
/**
* Text parser.
@ -52,8 +52,8 @@ Object.defineProperty(exports, 'raw', {
Object.defineProperty(exports, 'text', {
configurable: true,
enumerable: true,
get: () => require('./lib/types/text')
})
get: () => require('./lib/types/text'),
});
/**
* URL-encoded parser.
@ -63,8 +63,8 @@ Object.defineProperty(exports, 'text', {
Object.defineProperty(exports, 'urlencoded', {
configurable: true,
enumerable: true,
get: () => require('./lib/types/urlencoded')
})
get: () => require('./lib/types/urlencoded'),
});
/**
* Create a middleware to parse json and urlencoded bodies.
@ -75,6 +75,8 @@ Object.defineProperty(exports, 'urlencoded', {
* @public
*/
function bodyParser () {
throw new Error('The bodyParser() generic has been split into individual middleware to use instead.')
function bodyParser() {
throw new Error(
'The bodyParser() generic has been split into individual middleware to use instead.'
);
}

189
node_modules/body-parser/lib/read.js generated vendored
View File

@ -4,24 +4,24 @@
* MIT Licensed
*/
'use strict'
'use strict';
/**
* Module dependencies.
* @private
*/
var createError = require('http-errors')
var getBody = require('raw-body')
var iconv = require('iconv-lite')
var onFinished = require('on-finished')
var zlib = require('node:zlib')
var createError = require('http-errors');
var getBody = require('raw-body');
var iconv = require('iconv-lite');
var onFinished = require('on-finished');
var zlib = require('node:zlib');
/**
* Module exports.
*/
module.exports = read
module.exports = read;
/**
* Read a request into a buffer and parse.
@ -35,102 +35,113 @@ module.exports = read
* @private
*/
function read (req, res, next, parse, debug, options) {
var length
var opts = options
var stream
function read(req, res, next, parse, debug, options) {
var length;
var opts = options;
var stream;
// read options
var encoding = opts.encoding !== null
? opts.encoding
: null
var verify = opts.verify
var encoding = opts.encoding !== null ? opts.encoding : null;
var verify = opts.verify;
try {
// get the content stream
stream = contentstream(req, debug, opts.inflate)
length = stream.length
stream.length = undefined
stream = contentstream(req, debug, opts.inflate);
length = stream.length;
stream.length = undefined;
} catch (err) {
return next(err)
return next(err);
}
// set raw-body options
opts.length = length
opts.encoding = verify
? null
: encoding
opts.length = length;
opts.encoding = verify ? null : encoding;
// assert charset is supported
if (opts.encoding === null && encoding !== null && !iconv.encodingExists(encoding)) {
return next(createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', {
charset: encoding.toLowerCase(),
type: 'charset.unsupported'
}))
if (
opts.encoding === null &&
encoding !== null &&
!iconv.encodingExists(encoding)
) {
return next(
createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', {
charset: encoding.toLowerCase(),
type: 'charset.unsupported',
})
);
}
// read body
debug('read body')
debug('read body');
getBody(stream, opts, function (error, body) {
if (error) {
var _error
var _error;
if (error.type === 'encoding.unsupported') {
// echo back charset
_error = createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', {
charset: encoding.toLowerCase(),
type: 'charset.unsupported'
})
_error = createError(
415,
'unsupported charset "' + encoding.toUpperCase() + '"',
{
charset: encoding.toLowerCase(),
type: 'charset.unsupported',
}
);
} else {
// set status code on error
_error = createError(400, error)
_error = createError(400, error);
}
// unpipe from stream and destroy
if (stream !== req) {
req.unpipe()
stream.destroy()
req.unpipe();
stream.destroy();
}
// read off entire request
dump(req, function onfinished () {
next(createError(400, _error))
})
return
dump(req, function onfinished() {
next(createError(400, _error));
});
return;
}
// verify
if (verify) {
try {
debug('verify body')
verify(req, res, body, encoding)
debug('verify body');
verify(req, res, body, encoding);
} catch (err) {
next(createError(403, err, {
body: body,
type: err.type || 'entity.verify.failed'
}))
return
next(
createError(403, err, {
body: body,
type: err.type || 'entity.verify.failed',
})
);
return;
}
}
// parse
var str = body
var str = body;
try {
debug('parse body')
str = typeof body !== 'string' && encoding !== null
? iconv.decode(body, encoding)
: body
req.body = parse(str, encoding)
debug('parse body');
str =
typeof body !== 'string' && encoding !== null ?
iconv.decode(body, encoding)
: body;
req.body = parse(str, encoding);
} catch (err) {
next(createError(400, err, {
body: str,
type: err.type || 'entity.parse.failed'
}))
return
next(
createError(400, err, {
body: str,
type: err.type || 'entity.parse.failed',
})
);
return;
}
next()
})
next();
});
}
/**
@ -143,27 +154,27 @@ function read (req, res, next, parse, debug, options) {
* @api private
*/
function contentstream (req, debug, inflate) {
var encoding = (req.headers['content-encoding'] || 'identity').toLowerCase()
var length = req.headers['content-length']
function contentstream(req, debug, inflate) {
var encoding = (req.headers['content-encoding'] || 'identity').toLowerCase();
var length = req.headers['content-length'];
debug('content-encoding "%s"', encoding)
debug('content-encoding "%s"', encoding);
if (inflate === false && encoding !== 'identity') {
throw createError(415, 'content encoding unsupported', {
encoding: encoding,
type: 'encoding.unsupported'
})
type: 'encoding.unsupported',
});
}
if (encoding === 'identity') {
req.length = length
return req
req.length = length;
return req;
}
var stream = createDecompressionStream(encoding, debug)
req.pipe(stream)
return stream
var stream = createDecompressionStream(encoding, debug);
req.pipe(stream);
return stream;
}
/**
@ -173,22 +184,26 @@ function contentstream (req, debug, inflate) {
* @return {object}
* @api private
*/
function createDecompressionStream (encoding, debug) {
function createDecompressionStream(encoding, debug) {
switch (encoding) {
case 'deflate':
debug('inflate body')
return zlib.createInflate()
debug('inflate body');
return zlib.createInflate();
case 'gzip':
debug('gunzip body')
return zlib.createGunzip()
debug('gunzip body');
return zlib.createGunzip();
case 'br':
debug('brotli decompress body')
return zlib.createBrotliDecompress()
debug('brotli decompress body');
return zlib.createBrotliDecompress();
default:
throw createError(415, 'unsupported content encoding "' + encoding + '"', {
encoding: encoding,
type: 'encoding.unsupported'
})
throw createError(
415,
'unsupported content encoding "' + encoding + '"',
{
encoding: encoding,
type: 'encoding.unsupported',
}
);
}
}
@ -200,11 +215,11 @@ function createDecompressionStream (encoding, debug) {
* @api private
*/
function dump (req, callback) {
function dump(req, callback) {
if (onFinished.isFinished(req)) {
callback(null)
callback(null);
} else {
onFinished(req, callback)
req.resume()
onFinished(req, callback);
req.resume();
}
}

View File

@ -5,25 +5,25 @@
* MIT Licensed
*/
'use strict'
'use strict';
/**
* Module dependencies.
* @private
*/
var createError = require('http-errors')
var debug = require('debug')('body-parser:json')
var isFinished = require('on-finished').isFinished
var read = require('../read')
var typeis = require('type-is')
var { getCharset, normalizeOptions } = require('../utils')
var createError = require('http-errors');
var debug = require('debug')('body-parser:json');
var isFinished = require('on-finished').isFinished;
var read = require('../read');
var typeis = require('type-is');
var { getCharset, normalizeOptions } = require('../utils');
/**
* Module exports.
*/
module.exports = json
module.exports = json;
/**
* RegExp to match the first non-space in a string.
@ -37,10 +37,10 @@ module.exports = json
* %x0D ) ; Carriage return
*/
var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*([^\x20\x09\x0a\x0d])/ // eslint-disable-line no-control-regex
var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*([^\x20\x09\x0a\x0d])/; // eslint-disable-line no-control-regex
var JSON_SYNTAX_CHAR = '#'
var JSON_SYNTAX_REGEXP = /#+/g
var JSON_SYNTAX_CHAR = '#';
var JSON_SYNTAX_REGEXP = /#+/g;
/**
* Create a middleware to parse JSON bodies.
@ -50,75 +50,84 @@ var JSON_SYNTAX_REGEXP = /#+/g
* @public
*/
function json (options) {
var { inflate, limit, verify, shouldParse } = normalizeOptions(options, 'application/json')
function json(options) {
var { inflate, limit, verify, shouldParse } = normalizeOptions(
options,
'application/json'
);
var reviver = options?.reviver
var strict = options?.strict !== false
var reviver = options?.reviver;
var strict = options?.strict !== false;
function parse (body) {
function parse(body) {
if (body.length === 0) {
// special-case empty json body, as it's a common client-side mistake
// TODO: maybe make this configurable or part of "strict" option
return {}
return {};
}
if (strict) {
var first = firstchar(body)
var first = firstchar(body);
if (first !== '{' && first !== '[') {
debug('strict violation')
throw createStrictSyntaxError(body, first)
debug('strict violation');
throw createStrictSyntaxError(body, first);
}
}
try {
debug('parse json')
return JSON.parse(body, reviver)
debug('parse json');
return JSON.parse(body, reviver);
} catch (e) {
throw normalizeJsonSyntaxError(e, {
message: e.message,
stack: e.stack
})
stack: e.stack,
});
}
}
return function jsonParser (req, res, next) {
return function jsonParser(req, res, next) {
if (isFinished(req)) {
debug('body already parsed')
next()
return
debug('body already parsed');
next();
return;
}
if (!('body' in req)) {
req.body = undefined
req.body = undefined;
}
// skip requests without bodies
if (!typeis.hasBody(req)) {
debug('skip empty body')
next()
return
debug('skip empty body');
next();
return;
}
debug('content-type %j', req.headers['content-type'])
debug('content-type %j', req.headers['content-type']);
// determine if request should be parsed
if (!shouldParse(req)) {
debug('skip parsing')
next()
return
debug('skip parsing');
next();
return;
}
// assert charset per RFC 7159 sec 8.1
var charset = getCharset(req) || 'utf-8'
var charset = getCharset(req) || 'utf-8';
if (charset.slice(0, 4) !== 'utf-') {
debug('invalid charset')
next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', {
charset: charset,
type: 'charset.unsupported'
}))
return
debug('invalid charset');
next(
createError(
415,
'unsupported charset "' + charset.toUpperCase() + '"',
{
charset: charset,
type: 'charset.unsupported',
}
)
);
return;
}
// read
@ -126,9 +135,9 @@ function json (options) {
encoding: charset,
inflate,
limit,
verify
})
}
verify,
});
};
}
/**
@ -140,27 +149,28 @@ function json (options) {
* @private
*/
function createStrictSyntaxError (str, char) {
var index = str.indexOf(char)
var partial = ''
function createStrictSyntaxError(str, char) {
var index = str.indexOf(char);
var partial = '';
if (index !== -1) {
partial = str.substring(0, index) + JSON_SYNTAX_CHAR
partial = str.substring(0, index) + JSON_SYNTAX_CHAR;
for (var i = index + 1; i < str.length; i++) {
partial += JSON_SYNTAX_CHAR
partial += JSON_SYNTAX_CHAR;
}
}
try {
JSON.parse(partial); /* istanbul ignore next */ throw new SyntaxError('strict violation')
JSON.parse(partial);
/* istanbul ignore next */ throw new SyntaxError('strict violation');
} catch (e) {
return normalizeJsonSyntaxError(e, {
message: e.message.replace(JSON_SYNTAX_REGEXP, function (placeholder) {
return str.substring(index, index + placeholder.length)
return str.substring(index, index + placeholder.length);
}),
stack: e.stack
})
stack: e.stack,
});
}
}
@ -172,12 +182,10 @@ function createStrictSyntaxError (str, char) {
* @private
*/
function firstchar (str) {
var match = FIRST_CHAR_REGEXP.exec(str)
function firstchar(str) {
var match = FIRST_CHAR_REGEXP.exec(str);
return match
? match[1]
: undefined
return match ? match[1] : undefined;
}
/**
@ -188,19 +196,19 @@ function firstchar (str) {
* @return {SyntaxError}
*/
function normalizeJsonSyntaxError (error, obj) {
var keys = Object.getOwnPropertyNames(error)
function normalizeJsonSyntaxError(error, obj) {
var keys = Object.getOwnPropertyNames(error);
for (var i = 0; i < keys.length; i++) {
var key = keys[i]
var key = keys[i];
if (key !== 'stack' && key !== 'message') {
delete error[key]
delete error[key];
}
}
// replace stack before message for Node.js 0.10 and below
error.stack = obj.stack.replace(error.message, obj.message)
error.message = obj.message
error.stack = obj.stack.replace(error.message, obj.message);
error.message = obj.message;
return error
return error;
}

View File

@ -4,23 +4,23 @@
* MIT Licensed
*/
'use strict'
'use strict';
/**
* Module dependencies.
*/
var debug = require('debug')('body-parser:raw')
var isFinished = require('on-finished').isFinished
var read = require('../read')
var typeis = require('type-is')
var { normalizeOptions } = require('../utils')
var debug = require('debug')('body-parser:raw');
var isFinished = require('on-finished').isFinished;
var read = require('../read');
var typeis = require('type-is');
var { normalizeOptions } = require('../utils');
/**
* Module exports.
*/
module.exports = raw
module.exports = raw;
/**
* Create a middleware to parse raw bodies.
@ -30,38 +30,41 @@ module.exports = raw
* @api public
*/
function raw (options) {
var { inflate, limit, verify, shouldParse } = normalizeOptions(options, 'application/octet-stream')
function raw(options) {
var { inflate, limit, verify, shouldParse } = normalizeOptions(
options,
'application/octet-stream'
);
function parse (buf) {
return buf
function parse(buf) {
return buf;
}
return function rawParser (req, res, next) {
return function rawParser(req, res, next) {
if (isFinished(req)) {
debug('body already parsed')
next()
return
debug('body already parsed');
next();
return;
}
if (!('body' in req)) {
req.body = undefined
req.body = undefined;
}
// skip requests without bodies
if (!typeis.hasBody(req)) {
debug('skip empty body')
next()
return
debug('skip empty body');
next();
return;
}
debug('content-type %j', req.headers['content-type'])
debug('content-type %j', req.headers['content-type']);
// determine if request should be parsed
if (!shouldParse(req)) {
debug('skip parsing')
next()
return
debug('skip parsing');
next();
return;
}
// read
@ -69,7 +72,7 @@ function raw (options) {
encoding: null,
inflate,
limit,
verify
})
}
verify,
});
};
}

View File

@ -4,23 +4,23 @@
* MIT Licensed
*/
'use strict'
'use strict';
/**
* Module dependencies.
*/
var debug = require('debug')('body-parser:text')
var isFinished = require('on-finished').isFinished
var read = require('../read')
var typeis = require('type-is')
var { getCharset, normalizeOptions } = require('../utils')
var debug = require('debug')('body-parser:text');
var isFinished = require('on-finished').isFinished;
var read = require('../read');
var typeis = require('type-is');
var { getCharset, normalizeOptions } = require('../utils');
/**
* Module exports.
*/
module.exports = text
module.exports = text;
/**
* Create a middleware to parse text bodies.
@ -30,51 +30,54 @@ module.exports = text
* @api public
*/
function text (options) {
var { inflate, limit, verify, shouldParse } = normalizeOptions(options, 'text/plain')
function text(options) {
var { inflate, limit, verify, shouldParse } = normalizeOptions(
options,
'text/plain'
);
var defaultCharset = options?.defaultCharset || 'utf-8'
var defaultCharset = options?.defaultCharset || 'utf-8';
function parse (buf) {
return buf
function parse(buf) {
return buf;
}
return function textParser (req, res, next) {
return function textParser(req, res, next) {
if (isFinished(req)) {
debug('body already parsed')
next()
return
debug('body already parsed');
next();
return;
}
if (!('body' in req)) {
req.body = undefined
req.body = undefined;
}
// skip requests without bodies
if (!typeis.hasBody(req)) {
debug('skip empty body')
next()
return
debug('skip empty body');
next();
return;
}
debug('content-type %j', req.headers['content-type'])
debug('content-type %j', req.headers['content-type']);
// determine if request should be parsed
if (!shouldParse(req)) {
debug('skip parsing')
next()
return
debug('skip parsing');
next();
return;
}
// get charset
var charset = getCharset(req) || defaultCharset
var charset = getCharset(req) || defaultCharset;
// read
read(req, res, next, parse, debug, {
encoding: charset,
inflate,
limit,
verify
})
}
verify,
});
};
}

View File

@ -5,26 +5,26 @@
* MIT Licensed
*/
'use strict'
'use strict';
/**
* Module dependencies.
* @private
*/
var createError = require('http-errors')
var debug = require('debug')('body-parser:urlencoded')
var isFinished = require('on-finished').isFinished
var read = require('../read')
var typeis = require('type-is')
var qs = require('qs')
var { getCharset, normalizeOptions } = require('../utils')
var createError = require('http-errors');
var debug = require('debug')('body-parser:urlencoded');
var isFinished = require('on-finished').isFinished;
var read = require('../read');
var typeis = require('type-is');
var qs = require('qs');
var { getCharset, normalizeOptions } = require('../utils');
/**
* Module exports.
*/
module.exports = urlencoded
module.exports = urlencoded;
/**
* Create a middleware to parse urlencoded bodies.
@ -34,59 +34,68 @@ module.exports = urlencoded
* @public
*/
function urlencoded (options) {
var { inflate, limit, verify, shouldParse } = normalizeOptions(options, 'application/x-www-form-urlencoded')
function urlencoded(options) {
var { inflate, limit, verify, shouldParse } = normalizeOptions(
options,
'application/x-www-form-urlencoded'
);
var defaultCharset = options?.defaultCharset || 'utf-8'
var defaultCharset = options?.defaultCharset || 'utf-8';
if (defaultCharset !== 'utf-8' && defaultCharset !== 'iso-8859-1') {
throw new TypeError('option defaultCharset must be either utf-8 or iso-8859-1')
throw new TypeError(
'option defaultCharset must be either utf-8 or iso-8859-1'
);
}
// create the appropriate query parser
var queryparse = createQueryParser(options)
var queryparse = createQueryParser(options);
function parse (body, encoding) {
return body.length
? queryparse(body, encoding)
: {}
function parse(body, encoding) {
return body.length ? queryparse(body, encoding) : {};
}
return function urlencodedParser (req, res, next) {
return function urlencodedParser(req, res, next) {
if (isFinished(req)) {
debug('body already parsed')
next()
return
debug('body already parsed');
next();
return;
}
if (!('body' in req)) {
req.body = undefined
req.body = undefined;
}
// skip requests without bodies
if (!typeis.hasBody(req)) {
debug('skip empty body')
next()
return
debug('skip empty body');
next();
return;
}
debug('content-type %j', req.headers['content-type'])
debug('content-type %j', req.headers['content-type']);
// determine if request should be parsed
if (!shouldParse(req)) {
debug('skip parsing')
next()
return
debug('skip parsing');
next();
return;
}
// assert charset
var charset = getCharset(req) || defaultCharset
var charset = getCharset(req) || defaultCharset;
if (charset !== 'utf-8' && charset !== 'iso-8859-1') {
debug('invalid charset')
next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', {
charset: charset,
type: 'charset.unsupported'
}))
return
debug('invalid charset');
next(
createError(
415,
'unsupported charset "' + charset.toUpperCase() + '"',
{
charset: charset,
type: 'charset.unsupported',
}
)
);
return;
}
// read
@ -94,9 +103,9 @@ function urlencoded (options) {
encoding: charset,
inflate,
limit,
verify
})
}
verify,
});
};
}
/**
@ -105,40 +114,44 @@ function urlencoded (options) {
* @param {object} options
*/
function createQueryParser (options) {
var extended = Boolean(options?.extended)
var parameterLimit = options?.parameterLimit !== undefined
? options?.parameterLimit
: 1000
var charsetSentinel = options?.charsetSentinel
var interpretNumericEntities = options?.interpretNumericEntities
var depth = extended ? (options?.depth !== undefined ? options?.depth : 32) : 0
function createQueryParser(options) {
var extended = Boolean(options?.extended);
var parameterLimit =
options?.parameterLimit !== undefined ? options?.parameterLimit : 1000;
var charsetSentinel = options?.charsetSentinel;
var interpretNumericEntities = options?.interpretNumericEntities;
var depth =
extended ?
options?.depth !== undefined ?
options?.depth
: 32
: 0;
if (isNaN(parameterLimit) || parameterLimit < 1) {
throw new TypeError('option parameterLimit must be a positive number')
throw new TypeError('option parameterLimit must be a positive number');
}
if (isNaN(depth) || depth < 0) {
throw new TypeError('option depth must be a zero or a positive number')
throw new TypeError('option depth must be a zero or a positive number');
}
if (isFinite(parameterLimit)) {
parameterLimit = parameterLimit | 0
parameterLimit = parameterLimit | 0;
}
return function queryparse (body, encoding) {
var paramCount = parameterCount(body, parameterLimit)
return function queryparse(body, encoding) {
var paramCount = parameterCount(body, parameterLimit);
if (paramCount === undefined) {
debug('too many parameters')
debug('too many parameters');
throw createError(413, 'too many parameters', {
type: 'parameters.too.many'
})
type: 'parameters.too.many',
});
}
var arrayLimit = extended ? Math.max(100, paramCount) : 0
var arrayLimit = extended ? Math.max(100, paramCount) : 0;
debug('parse ' + (extended ? 'extended ' : '') + 'urlencoding')
debug('parse ' + (extended ? 'extended ' : '') + 'urlencoding');
try {
return qs.parse(body, {
allowPrototypes: true,
@ -148,18 +161,18 @@ function createQueryParser (options) {
interpretNumericEntities: interpretNumericEntities,
charset: encoding,
parameterLimit: parameterLimit,
strictDepth: true
})
strictDepth: true,
});
} catch (err) {
if (err instanceof RangeError) {
throw createError(400, 'The input exceeded the depth', {
type: 'querystring.parse.rangeError'
})
type: 'querystring.parse.rangeError',
});
} else {
throw err
throw err;
}
}
}
};
}
/**
@ -170,8 +183,8 @@ function createQueryParser (options) {
* @api private
*/
function parameterCount (body, limit) {
var len = body.split('&').length
function parameterCount(body, limit) {
var len = body.split('&').length;
return len > limit ? undefined : len - 1
return len > limit ? undefined : len - 1;
}

View File

@ -1,12 +1,12 @@
'use strict'
'use strict';
/**
* Module dependencies.
*/
var bytes = require('bytes')
var contentType = require('content-type')
var typeis = require('type-is')
var bytes = require('bytes');
var contentType = require('content-type');
var typeis = require('type-is');
/**
* Module exports.
@ -14,8 +14,8 @@ var typeis = require('type-is')
module.exports = {
getCharset,
normalizeOptions
}
normalizeOptions,
};
/**
* Get the charset of a request.
@ -24,11 +24,11 @@ module.exports = {
* @api private
*/
function getCharset (req) {
function getCharset(req) {
try {
return (contentType.parse(req).parameters.charset || '').toLowerCase()
return (contentType.parse(req).parameters.charset || '').toLowerCase();
} catch {
return undefined
return undefined;
}
}
@ -39,10 +39,10 @@ function getCharset (req) {
* @return {function}
*/
function typeChecker (type) {
return function checkType (req) {
return Boolean(typeis(req, type))
}
function typeChecker(type) {
return function checkType(req) {
return Boolean(typeis(req, type));
};
}
/**
@ -52,32 +52,31 @@ function typeChecker (type) {
* @param {string | string[] | function} defaultType default content type(s) or a function to determine it
* @returns {object}
*/
function normalizeOptions (options, defaultType) {
function normalizeOptions(options, defaultType) {
if (!defaultType) {
// Parsers must define a default content type
throw new TypeError('defaultType must be provided')
throw new TypeError('defaultType must be provided');
}
var inflate = options?.inflate !== false
var limit = typeof options?.limit !== 'number'
? bytes.parse(options?.limit || '100kb')
: options?.limit
var type = options?.type || defaultType
var verify = options?.verify || false
var inflate = options?.inflate !== false;
var limit =
typeof options?.limit !== 'number' ?
bytes.parse(options?.limit || '100kb')
: options?.limit;
var type = options?.type || defaultType;
var verify = options?.verify || false;
if (verify !== false && typeof verify !== 'function') {
throw new TypeError('option verify must be function')
throw new TypeError('option verify must be function');
}
// create the appropriate type checking function
var shouldParse = typeof type !== 'function'
? typeChecker(type)
: type
var shouldParse = typeof type !== 'function' ? typeChecker(type) : type;
return {
inflate,
limit,
verify,
shouldParse
}
shouldParse,
};
}