chore: update deps
This commit is contained in:
148
node_modules/undici/lib/agent.js
generated
vendored
148
node_modules/undici/lib/agent.js
generated
vendored
@ -1,148 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const { InvalidArgumentError } = require('./core/errors')
|
||||
const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = require('./core/symbols')
|
||||
const DispatcherBase = require('./dispatcher-base')
|
||||
const Pool = require('./pool')
|
||||
const Client = require('./client')
|
||||
const util = require('./core/util')
|
||||
const createRedirectInterceptor = require('./interceptor/redirectInterceptor')
|
||||
const { WeakRef, FinalizationRegistry } = require('./compat/dispatcher-weakref')()
|
||||
|
||||
const kOnConnect = Symbol('onConnect')
|
||||
const kOnDisconnect = Symbol('onDisconnect')
|
||||
const kOnConnectionError = Symbol('onConnectionError')
|
||||
const kMaxRedirections = Symbol('maxRedirections')
|
||||
const kOnDrain = Symbol('onDrain')
|
||||
const kFactory = Symbol('factory')
|
||||
const kFinalizer = Symbol('finalizer')
|
||||
const kOptions = Symbol('options')
|
||||
|
||||
function defaultFactory (origin, opts) {
|
||||
return opts && opts.connections === 1
|
||||
? new Client(origin, opts)
|
||||
: new Pool(origin, opts)
|
||||
}
|
||||
|
||||
class Agent extends DispatcherBase {
|
||||
constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) {
|
||||
super()
|
||||
|
||||
if (typeof factory !== 'function') {
|
||||
throw new InvalidArgumentError('factory must be a function.')
|
||||
}
|
||||
|
||||
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
|
||||
throw new InvalidArgumentError('connect must be a function or an object')
|
||||
}
|
||||
|
||||
if (!Number.isInteger(maxRedirections) || maxRedirections < 0) {
|
||||
throw new InvalidArgumentError('maxRedirections must be a positive number')
|
||||
}
|
||||
|
||||
if (connect && typeof connect !== 'function') {
|
||||
connect = { ...connect }
|
||||
}
|
||||
|
||||
this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent)
|
||||
? options.interceptors.Agent
|
||||
: [createRedirectInterceptor({ maxRedirections })]
|
||||
|
||||
this[kOptions] = { ...util.deepClone(options), connect }
|
||||
this[kOptions].interceptors = options.interceptors
|
||||
? { ...options.interceptors }
|
||||
: undefined
|
||||
this[kMaxRedirections] = maxRedirections
|
||||
this[kFactory] = factory
|
||||
this[kClients] = new Map()
|
||||
this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => {
|
||||
const ref = this[kClients].get(key)
|
||||
if (ref !== undefined && ref.deref() === undefined) {
|
||||
this[kClients].delete(key)
|
||||
}
|
||||
})
|
||||
|
||||
const agent = this
|
||||
|
||||
this[kOnDrain] = (origin, targets) => {
|
||||
agent.emit('drain', origin, [agent, ...targets])
|
||||
}
|
||||
|
||||
this[kOnConnect] = (origin, targets) => {
|
||||
agent.emit('connect', origin, [agent, ...targets])
|
||||
}
|
||||
|
||||
this[kOnDisconnect] = (origin, targets, err) => {
|
||||
agent.emit('disconnect', origin, [agent, ...targets], err)
|
||||
}
|
||||
|
||||
this[kOnConnectionError] = (origin, targets, err) => {
|
||||
agent.emit('connectionError', origin, [agent, ...targets], err)
|
||||
}
|
||||
}
|
||||
|
||||
get [kRunning] () {
|
||||
let ret = 0
|
||||
for (const ref of this[kClients].values()) {
|
||||
const client = ref.deref()
|
||||
/* istanbul ignore next: gc is undeterministic */
|
||||
if (client) {
|
||||
ret += client[kRunning]
|
||||
}
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
[kDispatch] (opts, handler) {
|
||||
let key
|
||||
if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) {
|
||||
key = String(opts.origin)
|
||||
} else {
|
||||
throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.')
|
||||
}
|
||||
|
||||
const ref = this[kClients].get(key)
|
||||
|
||||
let dispatcher = ref ? ref.deref() : null
|
||||
if (!dispatcher) {
|
||||
dispatcher = this[kFactory](opts.origin, this[kOptions])
|
||||
.on('drain', this[kOnDrain])
|
||||
.on('connect', this[kOnConnect])
|
||||
.on('disconnect', this[kOnDisconnect])
|
||||
.on('connectionError', this[kOnConnectionError])
|
||||
|
||||
this[kClients].set(key, new WeakRef(dispatcher))
|
||||
this[kFinalizer].register(dispatcher, key)
|
||||
}
|
||||
|
||||
return dispatcher.dispatch(opts, handler)
|
||||
}
|
||||
|
||||
async [kClose] () {
|
||||
const closePromises = []
|
||||
for (const ref of this[kClients].values()) {
|
||||
const client = ref.deref()
|
||||
/* istanbul ignore else: gc is undeterministic */
|
||||
if (client) {
|
||||
closePromises.push(client.close())
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all(closePromises)
|
||||
}
|
||||
|
||||
async [kDestroy] (err) {
|
||||
const destroyPromises = []
|
||||
for (const ref of this[kClients].values()) {
|
||||
const client = ref.deref()
|
||||
/* istanbul ignore else: gc is undeterministic */
|
||||
if (client) {
|
||||
destroyPromises.push(client.destroy(err))
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all(destroyPromises)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Agent
|
9
node_modules/undici/lib/api/abort-signal.js
generated
vendored
9
node_modules/undici/lib/api/abort-signal.js
generated
vendored
@ -1,3 +1,5 @@
|
||||
'use strict'
|
||||
|
||||
const { addAbortListener } = require('../core/util')
|
||||
const { RequestAbortedError } = require('../core/errors')
|
||||
|
||||
@ -6,13 +8,16 @@ const kSignal = Symbol('kSignal')
|
||||
|
||||
function abort (self) {
|
||||
if (self.abort) {
|
||||
self.abort()
|
||||
self.abort(self[kSignal]?.reason)
|
||||
} else {
|
||||
self.onError(new RequestAbortedError())
|
||||
self.reason = self[kSignal]?.reason ?? new RequestAbortedError()
|
||||
}
|
||||
removeSignal(self)
|
||||
}
|
||||
|
||||
function addSignal (self, signal) {
|
||||
self.reason = null
|
||||
|
||||
self[kSignal] = null
|
||||
self[kListener] = null
|
||||
|
||||
|
18
node_modules/undici/lib/api/api-connect.js
generated
vendored
18
node_modules/undici/lib/api/api-connect.js
generated
vendored
@ -1,7 +1,8 @@
|
||||
'use strict'
|
||||
|
||||
const { AsyncResource } = require('async_hooks')
|
||||
const { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors')
|
||||
const assert = require('node:assert')
|
||||
const { AsyncResource } = require('node:async_hooks')
|
||||
const { InvalidArgumentError, SocketError } = require('../core/errors')
|
||||
const util = require('../core/util')
|
||||
const { addSignal, removeSignal } = require('./abort-signal')
|
||||
|
||||
@ -32,10 +33,13 @@ class ConnectHandler extends AsyncResource {
|
||||
}
|
||||
|
||||
onConnect (abort, context) {
|
||||
if (!this.callback) {
|
||||
throw new RequestAbortedError()
|
||||
if (this.reason) {
|
||||
abort(this.reason)
|
||||
return
|
||||
}
|
||||
|
||||
assert(this.callback)
|
||||
|
||||
this.abort = abort
|
||||
this.context = context
|
||||
}
|
||||
@ -91,12 +95,14 @@ function connect (opts, callback) {
|
||||
|
||||
try {
|
||||
const connectHandler = new ConnectHandler(opts, callback)
|
||||
this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler)
|
||||
const connectOptions = { ...opts, method: 'CONNECT' }
|
||||
|
||||
this.dispatch(connectOptions, connectHandler)
|
||||
} catch (err) {
|
||||
if (typeof callback !== 'function') {
|
||||
throw err
|
||||
}
|
||||
const opaque = opts && opts.opaque
|
||||
const opaque = opts?.opaque
|
||||
queueMicrotask(() => callback(err, { opaque }))
|
||||
}
|
||||
}
|
||||
|
25
node_modules/undici/lib/api/api-pipeline.js
generated
vendored
25
node_modules/undici/lib/api/api-pipeline.js
generated
vendored
@ -4,16 +4,18 @@ const {
|
||||
Readable,
|
||||
Duplex,
|
||||
PassThrough
|
||||
} = require('stream')
|
||||
} = require('node:stream')
|
||||
const assert = require('node:assert')
|
||||
const { AsyncResource } = require('node:async_hooks')
|
||||
const {
|
||||
InvalidArgumentError,
|
||||
InvalidReturnValueError,
|
||||
RequestAbortedError
|
||||
} = require('../core/errors')
|
||||
const util = require('../core/util')
|
||||
const { AsyncResource } = require('async_hooks')
|
||||
const { addSignal, removeSignal } = require('./abort-signal')
|
||||
const assert = require('assert')
|
||||
|
||||
function noop () {}
|
||||
|
||||
const kResume = Symbol('resume')
|
||||
|
||||
@ -92,7 +94,7 @@ class PipelineHandler extends AsyncResource {
|
||||
this.context = null
|
||||
this.onInfo = onInfo || null
|
||||
|
||||
this.req = new PipelineRequest().on('error', util.nop)
|
||||
this.req = new PipelineRequest().on('error', noop)
|
||||
|
||||
this.ret = new Duplex({
|
||||
readableObjectMode: opts.objectMode,
|
||||
@ -100,7 +102,7 @@ class PipelineHandler extends AsyncResource {
|
||||
read: () => {
|
||||
const { body } = this
|
||||
|
||||
if (body && body.resume) {
|
||||
if (body?.resume) {
|
||||
body.resume()
|
||||
}
|
||||
},
|
||||
@ -145,14 +147,15 @@ class PipelineHandler extends AsyncResource {
|
||||
}
|
||||
|
||||
onConnect (abort, context) {
|
||||
const { ret, res } = this
|
||||
const { res } = this
|
||||
|
||||
if (this.reason) {
|
||||
abort(this.reason)
|
||||
return
|
||||
}
|
||||
|
||||
assert(!res, 'pipeline cannot be retried')
|
||||
|
||||
if (ret.destroyed) {
|
||||
throw new RequestAbortedError()
|
||||
}
|
||||
|
||||
this.abort = abort
|
||||
this.context = context
|
||||
}
|
||||
@ -182,7 +185,7 @@ class PipelineHandler extends AsyncResource {
|
||||
context
|
||||
})
|
||||
} catch (err) {
|
||||
this.res.on('error', util.nop)
|
||||
this.res.on('error', noop)
|
||||
throw err
|
||||
}
|
||||
|
||||
|
115
node_modules/undici/lib/api/api-request.js
generated
vendored
115
node_modules/undici/lib/api/api-request.js
generated
vendored
@ -1,14 +1,12 @@
|
||||
'use strict'
|
||||
|
||||
const Readable = require('./readable')
|
||||
const {
|
||||
InvalidArgumentError,
|
||||
RequestAbortedError
|
||||
} = require('../core/errors')
|
||||
const assert = require('node:assert')
|
||||
const { AsyncResource } = require('node:async_hooks')
|
||||
const { Readable } = require('./readable')
|
||||
const { InvalidArgumentError, RequestAbortedError } = require('../core/errors')
|
||||
const util = require('../core/util')
|
||||
const { getResolveErrorBodyCallback } = require('./util')
|
||||
const { AsyncResource } = require('async_hooks')
|
||||
const { addSignal, removeSignal } = require('./abort-signal')
|
||||
|
||||
function noop () {}
|
||||
|
||||
class RequestHandler extends AsyncResource {
|
||||
constructor (opts, callback) {
|
||||
@ -16,7 +14,7 @@ class RequestHandler extends AsyncResource {
|
||||
throw new InvalidArgumentError('invalid opts')
|
||||
}
|
||||
|
||||
const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts
|
||||
const { signal, method, opaque, body, onInfo, responseHeaders, highWaterMark } = opts
|
||||
|
||||
try {
|
||||
if (typeof callback !== 'function') {
|
||||
@ -42,11 +40,12 @@ class RequestHandler extends AsyncResource {
|
||||
super('UNDICI_REQUEST')
|
||||
} catch (err) {
|
||||
if (util.isStream(body)) {
|
||||
util.destroy(body.on('error', util.nop), err)
|
||||
util.destroy(body.on('error', noop), err)
|
||||
}
|
||||
throw err
|
||||
}
|
||||
|
||||
this.method = method
|
||||
this.responseHeaders = responseHeaders || null
|
||||
this.opaque = opaque || null
|
||||
this.callback = callback
|
||||
@ -56,23 +55,32 @@ class RequestHandler extends AsyncResource {
|
||||
this.trailers = {}
|
||||
this.context = null
|
||||
this.onInfo = onInfo || null
|
||||
this.throwOnError = throwOnError
|
||||
this.highWaterMark = highWaterMark
|
||||
this.reason = null
|
||||
this.removeAbortListener = null
|
||||
|
||||
if (util.isStream(body)) {
|
||||
body.on('error', (err) => {
|
||||
this.onError(err)
|
||||
if (signal?.aborted) {
|
||||
this.reason = signal.reason ?? new RequestAbortedError()
|
||||
} else if (signal) {
|
||||
this.removeAbortListener = util.addAbortListener(signal, () => {
|
||||
this.reason = signal.reason ?? new RequestAbortedError()
|
||||
if (this.res) {
|
||||
util.destroy(this.res.on('error', noop), this.reason)
|
||||
} else if (this.abort) {
|
||||
this.abort(this.reason)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
addSignal(this, signal)
|
||||
}
|
||||
|
||||
onConnect (abort, context) {
|
||||
if (!this.callback) {
|
||||
throw new RequestAbortedError()
|
||||
if (this.reason) {
|
||||
abort(this.reason)
|
||||
return
|
||||
}
|
||||
|
||||
assert(this.callback)
|
||||
|
||||
this.abort = abort
|
||||
this.context = context
|
||||
}
|
||||
@ -91,48 +99,48 @@ class RequestHandler extends AsyncResource {
|
||||
|
||||
const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
|
||||
const contentType = parsedHeaders['content-type']
|
||||
const body = new Readable({ resume, abort, contentType, highWaterMark })
|
||||
const contentLength = parsedHeaders['content-length']
|
||||
const res = new Readable({
|
||||
resume,
|
||||
abort,
|
||||
contentType,
|
||||
contentLength: this.method !== 'HEAD' && contentLength
|
||||
? Number(contentLength)
|
||||
: null,
|
||||
highWaterMark
|
||||
})
|
||||
|
||||
if (this.removeAbortListener) {
|
||||
res.on('close', this.removeAbortListener)
|
||||
this.removeAbortListener = null
|
||||
}
|
||||
|
||||
this.callback = null
|
||||
this.res = body
|
||||
this.res = res
|
||||
if (callback !== null) {
|
||||
if (this.throwOnError && statusCode >= 400) {
|
||||
this.runInAsyncScope(getResolveErrorBodyCallback, null,
|
||||
{ callback, body, contentType, statusCode, statusMessage, headers }
|
||||
)
|
||||
} else {
|
||||
this.runInAsyncScope(callback, null, null, {
|
||||
statusCode,
|
||||
headers,
|
||||
trailers: this.trailers,
|
||||
opaque,
|
||||
body,
|
||||
context
|
||||
})
|
||||
}
|
||||
this.runInAsyncScope(callback, null, null, {
|
||||
statusCode,
|
||||
headers,
|
||||
trailers: this.trailers,
|
||||
opaque,
|
||||
body: res,
|
||||
context
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
onData (chunk) {
|
||||
const { res } = this
|
||||
return res.push(chunk)
|
||||
return this.res.push(chunk)
|
||||
}
|
||||
|
||||
onComplete (trailers) {
|
||||
const { res } = this
|
||||
|
||||
removeSignal(this)
|
||||
|
||||
util.parseHeaders(trailers, this.trailers)
|
||||
|
||||
res.push(null)
|
||||
this.res.push(null)
|
||||
}
|
||||
|
||||
onError (err) {
|
||||
const { res, callback, body, opaque } = this
|
||||
|
||||
removeSignal(this)
|
||||
|
||||
if (callback) {
|
||||
// TODO: Does this need queueMicrotask?
|
||||
this.callback = null
|
||||
@ -145,13 +153,22 @@ class RequestHandler extends AsyncResource {
|
||||
this.res = null
|
||||
// Ensure all queued handlers are invoked before destroying res.
|
||||
queueMicrotask(() => {
|
||||
util.destroy(res, err)
|
||||
util.destroy(res.on('error', noop), err)
|
||||
})
|
||||
}
|
||||
|
||||
if (body) {
|
||||
this.body = null
|
||||
util.destroy(body, err)
|
||||
|
||||
if (util.isStream(body)) {
|
||||
body.on('error', noop)
|
||||
util.destroy(body, err)
|
||||
}
|
||||
}
|
||||
|
||||
if (this.removeAbortListener) {
|
||||
this.removeAbortListener()
|
||||
this.removeAbortListener = null
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -166,12 +183,14 @@ function request (opts, callback) {
|
||||
}
|
||||
|
||||
try {
|
||||
this.dispatch(opts, new RequestHandler(opts, callback))
|
||||
const handler = new RequestHandler(opts, callback)
|
||||
|
||||
this.dispatch(opts, handler)
|
||||
} catch (err) {
|
||||
if (typeof callback !== 'function') {
|
||||
throw err
|
||||
}
|
||||
const opaque = opts && opts.opaque
|
||||
const opaque = opts?.opaque
|
||||
queueMicrotask(() => callback(err, { opaque }))
|
||||
}
|
||||
}
|
||||
|
113
node_modules/undici/lib/api/api-stream.js
generated
vendored
113
node_modules/undici/lib/api/api-stream.js
generated
vendored
@ -1,23 +1,21 @@
|
||||
'use strict'
|
||||
|
||||
const { finished, PassThrough } = require('stream')
|
||||
const {
|
||||
InvalidArgumentError,
|
||||
InvalidReturnValueError,
|
||||
RequestAbortedError
|
||||
} = require('../core/errors')
|
||||
const assert = require('node:assert')
|
||||
const { finished } = require('node:stream')
|
||||
const { AsyncResource } = require('node:async_hooks')
|
||||
const { InvalidArgumentError, InvalidReturnValueError } = require('../core/errors')
|
||||
const util = require('../core/util')
|
||||
const { getResolveErrorBodyCallback } = require('./util')
|
||||
const { AsyncResource } = require('async_hooks')
|
||||
const { addSignal, removeSignal } = require('./abort-signal')
|
||||
|
||||
function noop () {}
|
||||
|
||||
class StreamHandler extends AsyncResource {
|
||||
constructor (opts, factory, callback) {
|
||||
if (!opts || typeof opts !== 'object') {
|
||||
throw new InvalidArgumentError('invalid opts')
|
||||
}
|
||||
|
||||
const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts
|
||||
const { signal, method, opaque, body, onInfo, responseHeaders } = opts
|
||||
|
||||
try {
|
||||
if (typeof callback !== 'function') {
|
||||
@ -43,7 +41,7 @@ class StreamHandler extends AsyncResource {
|
||||
super('UNDICI_STREAM')
|
||||
} catch (err) {
|
||||
if (util.isStream(body)) {
|
||||
util.destroy(body.on('error', util.nop), err)
|
||||
util.destroy(body.on('error', noop), err)
|
||||
}
|
||||
throw err
|
||||
}
|
||||
@ -58,7 +56,6 @@ class StreamHandler extends AsyncResource {
|
||||
this.trailers = null
|
||||
this.body = body
|
||||
this.onInfo = onInfo || null
|
||||
this.throwOnError = throwOnError || false
|
||||
|
||||
if (util.isStream(body)) {
|
||||
body.on('error', (err) => {
|
||||
@ -70,16 +67,19 @@ class StreamHandler extends AsyncResource {
|
||||
}
|
||||
|
||||
onConnect (abort, context) {
|
||||
if (!this.callback) {
|
||||
throw new RequestAbortedError()
|
||||
if (this.reason) {
|
||||
abort(this.reason)
|
||||
return
|
||||
}
|
||||
|
||||
assert(this.callback)
|
||||
|
||||
this.abort = abort
|
||||
this.context = context
|
||||
}
|
||||
|
||||
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
|
||||
const { factory, opaque, context, callback, responseHeaders } = this
|
||||
const { factory, opaque, context, responseHeaders } = this
|
||||
|
||||
const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||
|
||||
@ -92,55 +92,42 @@ class StreamHandler extends AsyncResource {
|
||||
|
||||
this.factory = null
|
||||
|
||||
let res
|
||||
if (factory === null) {
|
||||
return
|
||||
}
|
||||
|
||||
if (this.throwOnError && statusCode >= 400) {
|
||||
const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
|
||||
const contentType = parsedHeaders['content-type']
|
||||
res = new PassThrough()
|
||||
const res = this.runInAsyncScope(factory, null, {
|
||||
statusCode,
|
||||
headers,
|
||||
opaque,
|
||||
context
|
||||
})
|
||||
|
||||
if (
|
||||
!res ||
|
||||
typeof res.write !== 'function' ||
|
||||
typeof res.end !== 'function' ||
|
||||
typeof res.on !== 'function'
|
||||
) {
|
||||
throw new InvalidReturnValueError('expected Writable')
|
||||
}
|
||||
|
||||
// TODO: Avoid finished. It registers an unnecessary amount of listeners.
|
||||
finished(res, { readable: false }, (err) => {
|
||||
const { callback, res, opaque, trailers, abort } = this
|
||||
|
||||
this.res = null
|
||||
if (err || !res.readable) {
|
||||
util.destroy(res, err)
|
||||
}
|
||||
|
||||
this.callback = null
|
||||
this.runInAsyncScope(getResolveErrorBodyCallback, null,
|
||||
{ callback, body: res, contentType, statusCode, statusMessage, headers }
|
||||
)
|
||||
} else {
|
||||
if (factory === null) {
|
||||
return
|
||||
this.runInAsyncScope(callback, null, err || null, { opaque, trailers })
|
||||
|
||||
if (err) {
|
||||
abort()
|
||||
}
|
||||
|
||||
res = this.runInAsyncScope(factory, null, {
|
||||
statusCode,
|
||||
headers,
|
||||
opaque,
|
||||
context
|
||||
})
|
||||
|
||||
if (
|
||||
!res ||
|
||||
typeof res.write !== 'function' ||
|
||||
typeof res.end !== 'function' ||
|
||||
typeof res.on !== 'function'
|
||||
) {
|
||||
throw new InvalidReturnValueError('expected Writable')
|
||||
}
|
||||
|
||||
// TODO: Avoid finished. It registers an unnecessary amount of listeners.
|
||||
finished(res, { readable: false }, (err) => {
|
||||
const { callback, res, opaque, trailers, abort } = this
|
||||
|
||||
this.res = null
|
||||
if (err || !res.readable) {
|
||||
util.destroy(res, err)
|
||||
}
|
||||
|
||||
this.callback = null
|
||||
this.runInAsyncScope(callback, null, err || null, { opaque, trailers })
|
||||
|
||||
if (err) {
|
||||
abort()
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
res.on('drain', resume)
|
||||
|
||||
@ -148,7 +135,7 @@ class StreamHandler extends AsyncResource {
|
||||
|
||||
const needDrain = res.writableNeedDrain !== undefined
|
||||
? res.writableNeedDrain
|
||||
: res._writableState && res._writableState.needDrain
|
||||
: res._writableState?.needDrain
|
||||
|
||||
return needDrain !== true
|
||||
}
|
||||
@ -207,12 +194,14 @@ function stream (opts, factory, callback) {
|
||||
}
|
||||
|
||||
try {
|
||||
this.dispatch(opts, new StreamHandler(opts, factory, callback))
|
||||
const handler = new StreamHandler(opts, factory, callback)
|
||||
|
||||
this.dispatch(opts, handler)
|
||||
} catch (err) {
|
||||
if (typeof callback !== 'function') {
|
||||
throw err
|
||||
}
|
||||
const opaque = opts && opts.opaque
|
||||
const opaque = opts?.opaque
|
||||
queueMicrotask(() => callback(err, { opaque }))
|
||||
}
|
||||
}
|
||||
|
25
node_modules/undici/lib/api/api-upgrade.js
generated
vendored
25
node_modules/undici/lib/api/api-upgrade.js
generated
vendored
@ -1,10 +1,10 @@
|
||||
'use strict'
|
||||
|
||||
const { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors')
|
||||
const { AsyncResource } = require('async_hooks')
|
||||
const { InvalidArgumentError, SocketError } = require('../core/errors')
|
||||
const { AsyncResource } = require('node:async_hooks')
|
||||
const assert = require('node:assert')
|
||||
const util = require('../core/util')
|
||||
const { addSignal, removeSignal } = require('./abort-signal')
|
||||
const assert = require('assert')
|
||||
|
||||
class UpgradeHandler extends AsyncResource {
|
||||
constructor (opts, callback) {
|
||||
@ -34,10 +34,13 @@ class UpgradeHandler extends AsyncResource {
|
||||
}
|
||||
|
||||
onConnect (abort, context) {
|
||||
if (!this.callback) {
|
||||
throw new RequestAbortedError()
|
||||
if (this.reason) {
|
||||
abort(this.reason)
|
||||
return
|
||||
}
|
||||
|
||||
assert(this.callback)
|
||||
|
||||
this.abort = abort
|
||||
this.context = null
|
||||
}
|
||||
@ -47,9 +50,9 @@ class UpgradeHandler extends AsyncResource {
|
||||
}
|
||||
|
||||
onUpgrade (statusCode, rawHeaders, socket) {
|
||||
const { callback, opaque, context } = this
|
||||
assert(statusCode === 101)
|
||||
|
||||
assert.strictEqual(statusCode, 101)
|
||||
const { callback, opaque, context } = this
|
||||
|
||||
removeSignal(this)
|
||||
|
||||
@ -88,16 +91,18 @@ function upgrade (opts, callback) {
|
||||
|
||||
try {
|
||||
const upgradeHandler = new UpgradeHandler(opts, callback)
|
||||
this.dispatch({
|
||||
const upgradeOpts = {
|
||||
...opts,
|
||||
method: opts.method || 'GET',
|
||||
upgrade: opts.protocol || 'Websocket'
|
||||
}, upgradeHandler)
|
||||
}
|
||||
|
||||
this.dispatch(upgradeOpts, upgradeHandler)
|
||||
} catch (err) {
|
||||
if (typeof callback !== 'function') {
|
||||
throw err
|
||||
}
|
||||
const opaque = opts && opts.opaque
|
||||
const opaque = opts?.opaque
|
||||
queueMicrotask(() => callback(err, { opaque }))
|
||||
}
|
||||
}
|
||||
|
500
node_modules/undici/lib/api/readable.js
generated
vendored
500
node_modules/undici/lib/api/readable.js
generated
vendored
@ -2,27 +2,42 @@
|
||||
|
||||
'use strict'
|
||||
|
||||
const assert = require('assert')
|
||||
const { Readable } = require('stream')
|
||||
const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = require('../core/errors')
|
||||
const assert = require('node:assert')
|
||||
const { Readable } = require('node:stream')
|
||||
const { RequestAbortedError, NotSupportedError, InvalidArgumentError, AbortError } = require('../core/errors')
|
||||
const util = require('../core/util')
|
||||
const { ReadableStreamFrom, toUSVString } = require('../core/util')
|
||||
|
||||
let Blob
|
||||
const { ReadableStreamFrom } = require('../core/util')
|
||||
|
||||
const kConsume = Symbol('kConsume')
|
||||
const kReading = Symbol('kReading')
|
||||
const kBody = Symbol('kBody')
|
||||
const kAbort = Symbol('abort')
|
||||
const kAbort = Symbol('kAbort')
|
||||
const kContentType = Symbol('kContentType')
|
||||
const kContentLength = Symbol('kContentLength')
|
||||
const kUsed = Symbol('kUsed')
|
||||
const kBytesRead = Symbol('kBytesRead')
|
||||
|
||||
const noop = () => {}
|
||||
|
||||
module.exports = class BodyReadable extends Readable {
|
||||
/**
|
||||
* @class
|
||||
* @extends {Readable}
|
||||
* @see https://fetch.spec.whatwg.org/#body
|
||||
*/
|
||||
class BodyReadable extends Readable {
|
||||
/**
|
||||
* @param {object} opts
|
||||
* @param {(this: Readable, size: number) => void} opts.resume
|
||||
* @param {() => (void | null)} opts.abort
|
||||
* @param {string} [opts.contentType = '']
|
||||
* @param {number} [opts.contentLength]
|
||||
* @param {number} [opts.highWaterMark = 64 * 1024]
|
||||
*/
|
||||
constructor ({
|
||||
resume,
|
||||
abort,
|
||||
contentType = '',
|
||||
contentLength,
|
||||
highWaterMark = 64 * 1024 // Same as nodejs fs streams.
|
||||
}) {
|
||||
super({
|
||||
@ -34,9 +49,19 @@ module.exports = class BodyReadable extends Readable {
|
||||
this._readableState.dataEmitted = false
|
||||
|
||||
this[kAbort] = abort
|
||||
|
||||
/**
|
||||
* @type {Consume | null}
|
||||
*/
|
||||
this[kConsume] = null
|
||||
this[kBytesRead] = 0
|
||||
/**
|
||||
* @type {ReadableStream|null}
|
||||
*/
|
||||
this[kBody] = null
|
||||
this[kUsed] = false
|
||||
this[kContentType] = contentType
|
||||
this[kContentLength] = Number.isFinite(contentLength) ? contentLength : null
|
||||
|
||||
// Is stream being consumed through Readable API?
|
||||
// This is an optimization so that we avoid checking
|
||||
@ -45,12 +70,12 @@ module.exports = class BodyReadable extends Readable {
|
||||
this[kReading] = false
|
||||
}
|
||||
|
||||
destroy (err) {
|
||||
if (this.destroyed) {
|
||||
// Node < 16
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Error|null} err
|
||||
* @param {(error:(Error|null)) => void} callback
|
||||
* @returns {void}
|
||||
*/
|
||||
_destroy (err, callback) {
|
||||
if (!err && !this._readableState.endEmitted) {
|
||||
err = new RequestAbortedError()
|
||||
}
|
||||
@ -59,34 +84,49 @@ module.exports = class BodyReadable extends Readable {
|
||||
this[kAbort]()
|
||||
}
|
||||
|
||||
return super.destroy(err)
|
||||
}
|
||||
|
||||
emit (ev, ...args) {
|
||||
if (ev === 'data') {
|
||||
// Node < 16.7
|
||||
this._readableState.dataEmitted = true
|
||||
} else if (ev === 'error') {
|
||||
// Node < 16
|
||||
this._readableState.errorEmitted = true
|
||||
// Workaround for Node "bug". If the stream is destroyed in same
|
||||
// tick as it is created, then a user who is waiting for a
|
||||
// promise (i.e micro tick) for installing an 'error' listener will
|
||||
// never get a chance and will always encounter an unhandled exception.
|
||||
if (!this[kUsed]) {
|
||||
setImmediate(() => {
|
||||
callback(err)
|
||||
})
|
||||
} else {
|
||||
callback(err)
|
||||
}
|
||||
return super.emit(ev, ...args)
|
||||
}
|
||||
|
||||
on (ev, ...args) {
|
||||
if (ev === 'data' || ev === 'readable') {
|
||||
/**
|
||||
* @param {string} event
|
||||
* @param {(...args: any[]) => void} listener
|
||||
* @returns {this}
|
||||
*/
|
||||
on (event, listener) {
|
||||
if (event === 'data' || event === 'readable') {
|
||||
this[kReading] = true
|
||||
this[kUsed] = true
|
||||
}
|
||||
return super.on(ev, ...args)
|
||||
return super.on(event, listener)
|
||||
}
|
||||
|
||||
addListener (ev, ...args) {
|
||||
return this.on(ev, ...args)
|
||||
/**
|
||||
* @param {string} event
|
||||
* @param {(...args: any[]) => void} listener
|
||||
* @returns {this}
|
||||
*/
|
||||
addListener (event, listener) {
|
||||
return this.on(event, listener)
|
||||
}
|
||||
|
||||
off (ev, ...args) {
|
||||
const ret = super.off(ev, ...args)
|
||||
if (ev === 'data' || ev === 'readable') {
|
||||
/**
|
||||
* @param {string|symbol} event
|
||||
* @param {(...args: any[]) => void} listener
|
||||
* @returns {this}
|
||||
*/
|
||||
off (event, listener) {
|
||||
const ret = super.off(event, listener)
|
||||
if (event === 'data' || event === 'readable') {
|
||||
this[kReading] = (
|
||||
this.listenerCount('data') > 0 ||
|
||||
this.listenerCount('readable') > 0
|
||||
@ -95,50 +135,107 @@ module.exports = class BodyReadable extends Readable {
|
||||
return ret
|
||||
}
|
||||
|
||||
removeListener (ev, ...args) {
|
||||
return this.off(ev, ...args)
|
||||
/**
|
||||
* @param {string|symbol} event
|
||||
* @param {(...args: any[]) => void} listener
|
||||
* @returns {this}
|
||||
*/
|
||||
removeListener (event, listener) {
|
||||
return this.off(event, listener)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer|null} chunk
|
||||
* @returns {boolean}
|
||||
*/
|
||||
push (chunk) {
|
||||
if (this[kConsume] && chunk !== null && this.readableLength === 0) {
|
||||
this[kBytesRead] += chunk ? chunk.length : 0
|
||||
|
||||
if (this[kConsume] && chunk !== null) {
|
||||
consumePush(this[kConsume], chunk)
|
||||
return this[kReading] ? super.push(chunk) : true
|
||||
}
|
||||
return super.push(chunk)
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-body-text
|
||||
async text () {
|
||||
/**
|
||||
* Consumes and returns the body as a string.
|
||||
*
|
||||
* @see https://fetch.spec.whatwg.org/#dom-body-text
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
text () {
|
||||
return consume(this, 'text')
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-body-json
|
||||
async json () {
|
||||
/**
|
||||
* Consumes and returns the body as a JavaScript Object.
|
||||
*
|
||||
* @see https://fetch.spec.whatwg.org/#dom-body-json
|
||||
* @returns {Promise<unknown>}
|
||||
*/
|
||||
json () {
|
||||
return consume(this, 'json')
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-body-blob
|
||||
async blob () {
|
||||
/**
|
||||
* Consumes and returns the body as a Blob
|
||||
*
|
||||
* @see https://fetch.spec.whatwg.org/#dom-body-blob
|
||||
* @returns {Promise<Blob>}
|
||||
*/
|
||||
blob () {
|
||||
return consume(this, 'blob')
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-body-arraybuffer
|
||||
async arrayBuffer () {
|
||||
/**
|
||||
* Consumes and returns the body as an Uint8Array.
|
||||
*
|
||||
* @see https://fetch.spec.whatwg.org/#dom-body-bytes
|
||||
* @returns {Promise<Uint8Array>}
|
||||
*/
|
||||
bytes () {
|
||||
return consume(this, 'bytes')
|
||||
}
|
||||
|
||||
/**
|
||||
* Consumes and returns the body as an ArrayBuffer.
|
||||
*
|
||||
* @see https://fetch.spec.whatwg.org/#dom-body-arraybuffer
|
||||
* @returns {Promise<ArrayBuffer>}
|
||||
*/
|
||||
arrayBuffer () {
|
||||
return consume(this, 'arrayBuffer')
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-body-formdata
|
||||
/**
|
||||
* Not implemented
|
||||
*
|
||||
* @see https://fetch.spec.whatwg.org/#dom-body-formdata
|
||||
* @throws {NotSupportedError}
|
||||
*/
|
||||
async formData () {
|
||||
// TODO: Implement.
|
||||
throw new NotSupportedError()
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-body-bodyused
|
||||
/**
|
||||
* Returns true if the body is not null and the body has been consumed.
|
||||
* Otherwise, returns false.
|
||||
*
|
||||
* @see https://fetch.spec.whatwg.org/#dom-body-bodyused
|
||||
* @readonly
|
||||
* @returns {boolean}
|
||||
*/
|
||||
get bodyUsed () {
|
||||
return util.isDisturbed(this)
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-body-body
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#dom-body-body
|
||||
* @readonly
|
||||
* @returns {ReadableStream}
|
||||
*/
|
||||
get body () {
|
||||
if (!this[kBody]) {
|
||||
this[kBody] = ReadableStreamFrom(this)
|
||||
@ -151,95 +248,161 @@ module.exports = class BodyReadable extends Readable {
|
||||
return this[kBody]
|
||||
}
|
||||
|
||||
dump (opts) {
|
||||
let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144
|
||||
const signal = opts && opts.signal
|
||||
/**
|
||||
* Dumps the response body by reading `limit` number of bytes.
|
||||
* @param {object} opts
|
||||
* @param {number} [opts.limit = 131072] Number of bytes to read.
|
||||
* @param {AbortSignal} [opts.signal] An AbortSignal to cancel the dump.
|
||||
* @returns {Promise<null>}
|
||||
*/
|
||||
async dump (opts) {
|
||||
const signal = opts?.signal
|
||||
|
||||
if (signal) {
|
||||
try {
|
||||
if (typeof signal !== 'object' || !('aborted' in signal)) {
|
||||
throw new InvalidArgumentError('signal must be an AbortSignal')
|
||||
}
|
||||
util.throwIfAborted(signal)
|
||||
} catch (err) {
|
||||
return Promise.reject(err)
|
||||
if (signal != null && (typeof signal !== 'object' || !('aborted' in signal))) {
|
||||
throw new InvalidArgumentError('signal must be an AbortSignal')
|
||||
}
|
||||
|
||||
const limit = opts?.limit && Number.isFinite(opts.limit)
|
||||
? opts.limit
|
||||
: 128 * 1024
|
||||
|
||||
signal?.throwIfAborted()
|
||||
|
||||
if (this._readableState.closeEmitted) {
|
||||
return null
|
||||
}
|
||||
|
||||
return await new Promise((resolve, reject) => {
|
||||
if (
|
||||
(this[kContentLength] && (this[kContentLength] > limit)) ||
|
||||
this[kBytesRead] > limit
|
||||
) {
|
||||
this.destroy(new AbortError())
|
||||
}
|
||||
}
|
||||
|
||||
if (this.closed) {
|
||||
return Promise.resolve(null)
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const signalListenerCleanup = signal
|
||||
? util.addAbortListener(signal, () => {
|
||||
this.destroy()
|
||||
})
|
||||
: noop
|
||||
if (signal) {
|
||||
const onAbort = () => {
|
||||
this.destroy(signal.reason ?? new AbortError())
|
||||
}
|
||||
signal.addEventListener('abort', onAbort)
|
||||
this
|
||||
.on('close', function () {
|
||||
signal.removeEventListener('abort', onAbort)
|
||||
if (signal.aborted) {
|
||||
reject(signal.reason ?? new AbortError())
|
||||
} else {
|
||||
resolve(null)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
this.on('close', resolve)
|
||||
}
|
||||
|
||||
this
|
||||
.on('close', function () {
|
||||
signalListenerCleanup()
|
||||
if (signal && signal.aborted) {
|
||||
reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' }))
|
||||
} else {
|
||||
resolve(null)
|
||||
}
|
||||
})
|
||||
.on('error', noop)
|
||||
.on('data', function (chunk) {
|
||||
limit -= chunk.length
|
||||
if (limit <= 0) {
|
||||
.on('data', () => {
|
||||
if (this[kBytesRead] > limit) {
|
||||
this.destroy()
|
||||
}
|
||||
})
|
||||
.resume()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// https://streams.spec.whatwg.org/#readablestream-locked
|
||||
function isLocked (self) {
|
||||
// Consume is an implicit lock.
|
||||
return (self[kBody] && self[kBody].locked === true) || self[kConsume]
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#body-unusable
|
||||
function isUnusable (self) {
|
||||
return util.isDisturbed(self) || isLocked(self)
|
||||
}
|
||||
|
||||
async function consume (stream, type) {
|
||||
if (isUnusable(stream)) {
|
||||
throw new TypeError('unusable')
|
||||
/**
|
||||
* @param {BufferEncoding} encoding
|
||||
* @returns {this}
|
||||
*/
|
||||
setEncoding (encoding) {
|
||||
if (Buffer.isEncoding(encoding)) {
|
||||
this._readableState.encoding = encoding
|
||||
}
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://streams.spec.whatwg.org/#readablestream-locked
|
||||
* @param {BodyReadable} bodyReadable
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isLocked (bodyReadable) {
|
||||
// Consume is an implicit lock.
|
||||
return bodyReadable[kBody]?.locked === true || bodyReadable[kConsume] !== null
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#body-unusable
|
||||
* @param {BodyReadable} bodyReadable
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isUnusable (bodyReadable) {
|
||||
return util.isDisturbed(bodyReadable) || isLocked(bodyReadable)
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {object} Consume
|
||||
* @property {string} type
|
||||
* @property {BodyReadable} stream
|
||||
* @property {((value?: any) => void)} resolve
|
||||
* @property {((err: Error) => void)} reject
|
||||
* @property {number} length
|
||||
* @property {Buffer[]} body
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {BodyReadable} stream
|
||||
* @param {string} type
|
||||
* @returns {Promise<any>}
|
||||
*/
|
||||
function consume (stream, type) {
|
||||
assert(!stream[kConsume])
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
stream[kConsume] = {
|
||||
type,
|
||||
stream,
|
||||
resolve,
|
||||
reject,
|
||||
length: 0,
|
||||
body: []
|
||||
}
|
||||
|
||||
stream
|
||||
.on('error', function (err) {
|
||||
consumeFinish(this[kConsume], err)
|
||||
})
|
||||
.on('close', function () {
|
||||
if (this[kConsume].body !== null) {
|
||||
consumeFinish(this[kConsume], new RequestAbortedError())
|
||||
if (isUnusable(stream)) {
|
||||
const rState = stream._readableState
|
||||
if (rState.destroyed && rState.closeEmitted === false) {
|
||||
stream
|
||||
.on('error', err => {
|
||||
reject(err)
|
||||
})
|
||||
.on('close', () => {
|
||||
reject(new TypeError('unusable'))
|
||||
})
|
||||
} else {
|
||||
reject(rState.errored ?? new TypeError('unusable'))
|
||||
}
|
||||
} else {
|
||||
queueMicrotask(() => {
|
||||
stream[kConsume] = {
|
||||
type,
|
||||
stream,
|
||||
resolve,
|
||||
reject,
|
||||
length: 0,
|
||||
body: []
|
||||
}
|
||||
})
|
||||
|
||||
process.nextTick(consumeStart, stream[kConsume])
|
||||
stream
|
||||
.on('error', function (err) {
|
||||
consumeFinish(this[kConsume], err)
|
||||
})
|
||||
.on('close', function () {
|
||||
if (this[kConsume].body !== null) {
|
||||
consumeFinish(this[kConsume], new RequestAbortedError())
|
||||
}
|
||||
})
|
||||
|
||||
consumeStart(stream[kConsume])
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Consume} consume
|
||||
* @returns {void}
|
||||
*/
|
||||
function consumeStart (consume) {
|
||||
if (consume.body === null) {
|
||||
return
|
||||
@ -247,15 +410,23 @@ function consumeStart (consume) {
|
||||
|
||||
const { _readableState: state } = consume.stream
|
||||
|
||||
for (const chunk of state.buffer) {
|
||||
consumePush(consume, chunk)
|
||||
if (state.bufferIndex) {
|
||||
const start = state.bufferIndex
|
||||
const end = state.buffer.length
|
||||
for (let n = start; n < end; n++) {
|
||||
consumePush(consume, state.buffer[n])
|
||||
}
|
||||
} else {
|
||||
for (const chunk of state.buffer) {
|
||||
consumePush(consume, chunk)
|
||||
}
|
||||
}
|
||||
|
||||
if (state.endEmitted) {
|
||||
consumeEnd(this[kConsume])
|
||||
consumeEnd(this[kConsume], this._readableState.encoding)
|
||||
} else {
|
||||
consume.stream.on('end', function () {
|
||||
consumeEnd(this[kConsume])
|
||||
consumeEnd(this[kConsume], this._readableState.encoding)
|
||||
})
|
||||
}
|
||||
|
||||
@ -266,29 +437,78 @@ function consumeStart (consume) {
|
||||
}
|
||||
}
|
||||
|
||||
function consumeEnd (consume) {
|
||||
/**
|
||||
* @param {Buffer[]} chunks
|
||||
* @param {number} length
|
||||
* @param {BufferEncoding} encoding
|
||||
* @returns {string}
|
||||
*/
|
||||
function chunksDecode (chunks, length, encoding) {
|
||||
if (chunks.length === 0 || length === 0) {
|
||||
return ''
|
||||
}
|
||||
const buffer = chunks.length === 1 ? chunks[0] : Buffer.concat(chunks, length)
|
||||
const bufferLength = buffer.length
|
||||
|
||||
// Skip BOM.
|
||||
const start =
|
||||
bufferLength > 2 &&
|
||||
buffer[0] === 0xef &&
|
||||
buffer[1] === 0xbb &&
|
||||
buffer[2] === 0xbf
|
||||
? 3
|
||||
: 0
|
||||
if (!encoding || encoding === 'utf8' || encoding === 'utf-8') {
|
||||
return buffer.utf8Slice(start, bufferLength)
|
||||
} else {
|
||||
return buffer.subarray(start, bufferLength).toString(encoding)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer[]} chunks
|
||||
* @param {number} length
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
function chunksConcat (chunks, length) {
|
||||
if (chunks.length === 0 || length === 0) {
|
||||
return new Uint8Array(0)
|
||||
}
|
||||
if (chunks.length === 1) {
|
||||
// fast-path
|
||||
return new Uint8Array(chunks[0])
|
||||
}
|
||||
const buffer = new Uint8Array(Buffer.allocUnsafeSlow(length).buffer)
|
||||
|
||||
let offset = 0
|
||||
for (let i = 0; i < chunks.length; ++i) {
|
||||
const chunk = chunks[i]
|
||||
buffer.set(chunk, offset)
|
||||
offset += chunk.length
|
||||
}
|
||||
|
||||
return buffer
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Consume} consume
|
||||
* @param {BufferEncoding} encoding
|
||||
* @returns {void}
|
||||
*/
|
||||
function consumeEnd (consume, encoding) {
|
||||
const { type, body, resolve, stream, length } = consume
|
||||
|
||||
try {
|
||||
if (type === 'text') {
|
||||
resolve(toUSVString(Buffer.concat(body)))
|
||||
resolve(chunksDecode(body, length, encoding))
|
||||
} else if (type === 'json') {
|
||||
resolve(JSON.parse(Buffer.concat(body)))
|
||||
resolve(JSON.parse(chunksDecode(body, length, encoding)))
|
||||
} else if (type === 'arrayBuffer') {
|
||||
const dst = new Uint8Array(length)
|
||||
|
||||
let pos = 0
|
||||
for (const buf of body) {
|
||||
dst.set(buf, pos)
|
||||
pos += buf.byteLength
|
||||
}
|
||||
|
||||
resolve(dst.buffer)
|
||||
resolve(chunksConcat(body, length).buffer)
|
||||
} else if (type === 'blob') {
|
||||
if (!Blob) {
|
||||
Blob = require('buffer').Blob
|
||||
}
|
||||
resolve(new Blob(body, { type: stream[kContentType] }))
|
||||
} else if (type === 'bytes') {
|
||||
resolve(chunksConcat(body, length))
|
||||
}
|
||||
|
||||
consumeFinish(consume)
|
||||
@ -297,11 +517,21 @@ function consumeEnd (consume) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Consume} consume
|
||||
* @param {Buffer} chunk
|
||||
* @returns {void}
|
||||
*/
|
||||
function consumePush (consume, chunk) {
|
||||
consume.length += chunk.length
|
||||
consume.body.push(chunk)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Consume} consume
|
||||
* @param {Error} [err]
|
||||
* @returns {void}
|
||||
*/
|
||||
function consumeFinish (consume, err) {
|
||||
if (consume.body === null) {
|
||||
return
|
||||
@ -313,6 +543,7 @@ function consumeFinish (consume, err) {
|
||||
consume.resolve()
|
||||
}
|
||||
|
||||
// Reset the consume object to allow for garbage collection.
|
||||
consume.type = null
|
||||
consume.stream = null
|
||||
consume.resolve = null
|
||||
@ -320,3 +551,8 @@ function consumeFinish (consume, err) {
|
||||
consume.length = 0
|
||||
consume.body = null
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Readable: BodyReadable,
|
||||
chunksDecode
|
||||
}
|
||||
|
101
node_modules/undici/lib/api/util.js
generated
vendored
101
node_modules/undici/lib/api/util.js
generated
vendored
@ -1,46 +1,95 @@
|
||||
const assert = require('assert')
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const {
|
||||
ResponseStatusCodeError
|
||||
} = require('../core/errors')
|
||||
const { toUSVString } = require('../core/util')
|
||||
|
||||
const { chunksDecode } = require('./readable')
|
||||
const CHUNK_LIMIT = 128 * 1024
|
||||
|
||||
async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) {
|
||||
assert(body)
|
||||
|
||||
let chunks = []
|
||||
let limit = 0
|
||||
let length = 0
|
||||
|
||||
for await (const chunk of body) {
|
||||
chunks.push(chunk)
|
||||
limit += chunk.length
|
||||
if (limit > 128 * 1024) {
|
||||
chunks = null
|
||||
break
|
||||
try {
|
||||
for await (const chunk of body) {
|
||||
chunks.push(chunk)
|
||||
length += chunk.length
|
||||
if (length > CHUNK_LIMIT) {
|
||||
chunks = []
|
||||
length = 0
|
||||
break
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
chunks = []
|
||||
length = 0
|
||||
// Do nothing....
|
||||
}
|
||||
|
||||
if (statusCode === 204 || !contentType || !chunks) {
|
||||
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
|
||||
const message = `Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`
|
||||
|
||||
if (statusCode === 204 || !contentType || !length) {
|
||||
queueMicrotask(() => callback(new ResponseStatusCodeError(message, statusCode, headers)))
|
||||
return
|
||||
}
|
||||
|
||||
const stackTraceLimit = Error.stackTraceLimit
|
||||
Error.stackTraceLimit = 0
|
||||
let payload
|
||||
|
||||
try {
|
||||
if (contentType.startsWith('application/json')) {
|
||||
const payload = JSON.parse(toUSVString(Buffer.concat(chunks)))
|
||||
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
|
||||
return
|
||||
if (isContentTypeApplicationJson(contentType)) {
|
||||
payload = JSON.parse(chunksDecode(chunks, length))
|
||||
} else if (isContentTypeText(contentType)) {
|
||||
payload = chunksDecode(chunks, length)
|
||||
}
|
||||
|
||||
if (contentType.startsWith('text/')) {
|
||||
const payload = toUSVString(Buffer.concat(chunks))
|
||||
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
|
||||
return
|
||||
}
|
||||
} catch (err) {
|
||||
// Process in a fallback if error
|
||||
} catch {
|
||||
// process in a callback to avoid throwing in the microtask queue
|
||||
} finally {
|
||||
Error.stackTraceLimit = stackTraceLimit
|
||||
}
|
||||
|
||||
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
|
||||
queueMicrotask(() => callback(new ResponseStatusCodeError(message, statusCode, headers, payload)))
|
||||
}
|
||||
|
||||
module.exports = { getResolveErrorBodyCallback }
|
||||
const isContentTypeApplicationJson = (contentType) => {
|
||||
return (
|
||||
contentType.length > 15 &&
|
||||
contentType[11] === '/' &&
|
||||
contentType[0] === 'a' &&
|
||||
contentType[1] === 'p' &&
|
||||
contentType[2] === 'p' &&
|
||||
contentType[3] === 'l' &&
|
||||
contentType[4] === 'i' &&
|
||||
contentType[5] === 'c' &&
|
||||
contentType[6] === 'a' &&
|
||||
contentType[7] === 't' &&
|
||||
contentType[8] === 'i' &&
|
||||
contentType[9] === 'o' &&
|
||||
contentType[10] === 'n' &&
|
||||
contentType[12] === 'j' &&
|
||||
contentType[13] === 's' &&
|
||||
contentType[14] === 'o' &&
|
||||
contentType[15] === 'n'
|
||||
)
|
||||
}
|
||||
|
||||
const isContentTypeText = (contentType) => {
|
||||
return (
|
||||
contentType.length > 4 &&
|
||||
contentType[4] === '/' &&
|
||||
contentType[0] === 't' &&
|
||||
contentType[1] === 'e' &&
|
||||
contentType[2] === 'x' &&
|
||||
contentType[3] === 't'
|
||||
)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getResolveErrorBodyCallback,
|
||||
isContentTypeApplicationJson,
|
||||
isContentTypeText
|
||||
}
|
||||
|
183
node_modules/undici/lib/cache/memory-cache-store.js
generated
vendored
Normal file
183
node_modules/undici/lib/cache/memory-cache-store.js
generated
vendored
Normal file
@ -0,0 +1,183 @@
|
||||
'use strict'
|
||||
|
||||
const { Writable } = require('node:stream')
|
||||
const { assertCacheKey, assertCacheValue } = require('../util/cache.js')
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/cache-interceptor.d.ts').default.CacheKey} CacheKey
|
||||
* @typedef {import('../../types/cache-interceptor.d.ts').default.CacheValue} CacheValue
|
||||
* @typedef {import('../../types/cache-interceptor.d.ts').default.CacheStore} CacheStore
|
||||
* @typedef {import('../../types/cache-interceptor.d.ts').default.GetResult} GetResult
|
||||
*/
|
||||
|
||||
/**
|
||||
* @implements {CacheStore}
|
||||
*/
|
||||
class MemoryCacheStore {
|
||||
#maxCount = Infinity
|
||||
#maxSize = Infinity
|
||||
#maxEntrySize = Infinity
|
||||
|
||||
#size = 0
|
||||
#count = 0
|
||||
#entries = new Map()
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.MemoryCacheStoreOpts | undefined} [opts]
|
||||
*/
|
||||
constructor (opts) {
|
||||
if (opts) {
|
||||
if (typeof opts !== 'object') {
|
||||
throw new TypeError('MemoryCacheStore options must be an object')
|
||||
}
|
||||
|
||||
if (opts.maxCount !== undefined) {
|
||||
if (
|
||||
typeof opts.maxCount !== 'number' ||
|
||||
!Number.isInteger(opts.maxCount) ||
|
||||
opts.maxCount < 0
|
||||
) {
|
||||
throw new TypeError('MemoryCacheStore options.maxCount must be a non-negative integer')
|
||||
}
|
||||
this.#maxCount = opts.maxCount
|
||||
}
|
||||
|
||||
if (opts.maxSize !== undefined) {
|
||||
if (
|
||||
typeof opts.maxSize !== 'number' ||
|
||||
!Number.isInteger(opts.maxSize) ||
|
||||
opts.maxSize < 0
|
||||
) {
|
||||
throw new TypeError('MemoryCacheStore options.maxSize must be a non-negative integer')
|
||||
}
|
||||
this.#maxSize = opts.maxSize
|
||||
}
|
||||
|
||||
if (opts.maxEntrySize !== undefined) {
|
||||
if (
|
||||
typeof opts.maxEntrySize !== 'number' ||
|
||||
!Number.isInteger(opts.maxEntrySize) ||
|
||||
opts.maxEntrySize < 0
|
||||
) {
|
||||
throw new TypeError('MemoryCacheStore options.maxEntrySize must be a non-negative integer')
|
||||
}
|
||||
this.#maxEntrySize = opts.maxEntrySize
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} req
|
||||
* @returns {import('../../types/cache-interceptor.d.ts').default.GetResult | undefined}
|
||||
*/
|
||||
get (key) {
|
||||
assertCacheKey(key)
|
||||
|
||||
const topLevelKey = `${key.origin}:${key.path}`
|
||||
|
||||
const now = Date.now()
|
||||
const entry = this.#entries.get(topLevelKey)?.find((entry) => (
|
||||
entry.deleteAt > now &&
|
||||
entry.method === key.method &&
|
||||
(entry.vary == null || Object.keys(entry.vary).every(headerName => {
|
||||
if (entry.vary[headerName] === null) {
|
||||
return key.headers[headerName] === undefined
|
||||
}
|
||||
|
||||
return entry.vary[headerName] === key.headers[headerName]
|
||||
}))
|
||||
))
|
||||
|
||||
return entry == null
|
||||
? undefined
|
||||
: {
|
||||
statusMessage: entry.statusMessage,
|
||||
statusCode: entry.statusCode,
|
||||
headers: entry.headers,
|
||||
body: entry.body,
|
||||
vary: entry.vary ? entry.vary : undefined,
|
||||
etag: entry.etag,
|
||||
cacheControlDirectives: entry.cacheControlDirectives,
|
||||
cachedAt: entry.cachedAt,
|
||||
staleAt: entry.staleAt,
|
||||
deleteAt: entry.deleteAt
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheValue} val
|
||||
* @returns {Writable | undefined}
|
||||
*/
|
||||
createWriteStream (key, val) {
|
||||
assertCacheKey(key)
|
||||
assertCacheValue(val)
|
||||
|
||||
const topLevelKey = `${key.origin}:${key.path}`
|
||||
|
||||
const store = this
|
||||
const entry = { ...key, ...val, body: [], size: 0 }
|
||||
|
||||
return new Writable({
|
||||
write (chunk, encoding, callback) {
|
||||
if (typeof chunk === 'string') {
|
||||
chunk = Buffer.from(chunk, encoding)
|
||||
}
|
||||
|
||||
entry.size += chunk.byteLength
|
||||
|
||||
if (entry.size >= store.#maxEntrySize) {
|
||||
this.destroy()
|
||||
} else {
|
||||
entry.body.push(chunk)
|
||||
}
|
||||
|
||||
callback(null)
|
||||
},
|
||||
final (callback) {
|
||||
let entries = store.#entries.get(topLevelKey)
|
||||
if (!entries) {
|
||||
entries = []
|
||||
store.#entries.set(topLevelKey, entries)
|
||||
}
|
||||
entries.push(entry)
|
||||
|
||||
store.#size += entry.size
|
||||
store.#count += 1
|
||||
|
||||
if (store.#size > store.#maxSize || store.#count > store.#maxCount) {
|
||||
for (const [key, entries] of store.#entries) {
|
||||
for (const entry of entries.splice(0, entries.length / 2)) {
|
||||
store.#size -= entry.size
|
||||
store.#count -= 1
|
||||
}
|
||||
if (entries.length === 0) {
|
||||
store.#entries.delete(key)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
callback(null)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {CacheKey} key
|
||||
*/
|
||||
delete (key) {
|
||||
if (typeof key !== 'object') {
|
||||
throw new TypeError(`expected key to be object, got ${typeof key}`)
|
||||
}
|
||||
|
||||
const topLevelKey = `${key.origin}:${key.path}`
|
||||
|
||||
for (const entry of this.#entries.get(topLevelKey) ?? []) {
|
||||
this.#size -= entry.size
|
||||
this.#count -= 1
|
||||
}
|
||||
this.#entries.delete(topLevelKey)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MemoryCacheStore
|
457
node_modules/undici/lib/cache/sqlite-cache-store.js
generated
vendored
Normal file
457
node_modules/undici/lib/cache/sqlite-cache-store.js
generated
vendored
Normal file
@ -0,0 +1,457 @@
|
||||
'use strict'
|
||||
|
||||
const { Writable } = require('stream')
|
||||
const { assertCacheKey, assertCacheValue } = require('../util/cache.js')
|
||||
|
||||
let DatabaseSync
|
||||
|
||||
const VERSION = 3
|
||||
|
||||
// 2gb
|
||||
const MAX_ENTRY_SIZE = 2 * 1000 * 1000 * 1000
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/cache-interceptor.d.ts').default.CacheStore} CacheStore
|
||||
* @implements {CacheStore}
|
||||
*
|
||||
* @typedef {{
|
||||
* id: Readonly<number>,
|
||||
* body?: Uint8Array
|
||||
* statusCode: number
|
||||
* statusMessage: string
|
||||
* headers?: string
|
||||
* vary?: string
|
||||
* etag?: string
|
||||
* cacheControlDirectives?: string
|
||||
* cachedAt: number
|
||||
* staleAt: number
|
||||
* deleteAt: number
|
||||
* }} SqliteStoreValue
|
||||
*/
|
||||
module.exports = class SqliteCacheStore {
|
||||
#maxEntrySize = MAX_ENTRY_SIZE
|
||||
#maxCount = Infinity
|
||||
|
||||
/**
|
||||
* @type {import('node:sqlite').DatabaseSync}
|
||||
*/
|
||||
#db
|
||||
|
||||
/**
|
||||
* @type {import('node:sqlite').StatementSync}
|
||||
*/
|
||||
#getValuesQuery
|
||||
|
||||
/**
|
||||
* @type {import('node:sqlite').StatementSync}
|
||||
*/
|
||||
#updateValueQuery
|
||||
|
||||
/**
|
||||
* @type {import('node:sqlite').StatementSync}
|
||||
*/
|
||||
#insertValueQuery
|
||||
|
||||
/**
|
||||
* @type {import('node:sqlite').StatementSync}
|
||||
*/
|
||||
#deleteExpiredValuesQuery
|
||||
|
||||
/**
|
||||
* @type {import('node:sqlite').StatementSync}
|
||||
*/
|
||||
#deleteByUrlQuery
|
||||
|
||||
/**
|
||||
* @type {import('node:sqlite').StatementSync}
|
||||
*/
|
||||
#countEntriesQuery
|
||||
|
||||
/**
|
||||
* @type {import('node:sqlite').StatementSync | null}
|
||||
*/
|
||||
#deleteOldValuesQuery
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.SqliteCacheStoreOpts | undefined} opts
|
||||
*/
|
||||
constructor (opts) {
|
||||
if (opts) {
|
||||
if (typeof opts !== 'object') {
|
||||
throw new TypeError('SqliteCacheStore options must be an object')
|
||||
}
|
||||
|
||||
if (opts.maxEntrySize !== undefined) {
|
||||
if (
|
||||
typeof opts.maxEntrySize !== 'number' ||
|
||||
!Number.isInteger(opts.maxEntrySize) ||
|
||||
opts.maxEntrySize < 0
|
||||
) {
|
||||
throw new TypeError('SqliteCacheStore options.maxEntrySize must be a non-negative integer')
|
||||
}
|
||||
|
||||
if (opts.maxEntrySize > MAX_ENTRY_SIZE) {
|
||||
throw new TypeError('SqliteCacheStore options.maxEntrySize must be less than 2gb')
|
||||
}
|
||||
|
||||
this.#maxEntrySize = opts.maxEntrySize
|
||||
}
|
||||
|
||||
if (opts.maxCount !== undefined) {
|
||||
if (
|
||||
typeof opts.maxCount !== 'number' ||
|
||||
!Number.isInteger(opts.maxCount) ||
|
||||
opts.maxCount < 0
|
||||
) {
|
||||
throw new TypeError('SqliteCacheStore options.maxCount must be a non-negative integer')
|
||||
}
|
||||
this.#maxCount = opts.maxCount
|
||||
}
|
||||
}
|
||||
|
||||
if (!DatabaseSync) {
|
||||
DatabaseSync = require('node:sqlite').DatabaseSync
|
||||
}
|
||||
this.#db = new DatabaseSync(opts?.location ?? ':memory:')
|
||||
|
||||
this.#db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS cacheInterceptorV${VERSION} (
|
||||
-- Data specific to us
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
url TEXT NOT NULL,
|
||||
method TEXT NOT NULL,
|
||||
|
||||
-- Data returned to the interceptor
|
||||
body BUF NULL,
|
||||
deleteAt INTEGER NOT NULL,
|
||||
statusCode INTEGER NOT NULL,
|
||||
statusMessage TEXT NOT NULL,
|
||||
headers TEXT NULL,
|
||||
cacheControlDirectives TEXT NULL,
|
||||
etag TEXT NULL,
|
||||
vary TEXT NULL,
|
||||
cachedAt INTEGER NOT NULL,
|
||||
staleAt INTEGER NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_cacheInterceptorV${VERSION}_url ON cacheInterceptorV${VERSION}(url);
|
||||
CREATE INDEX IF NOT EXISTS idx_cacheInterceptorV${VERSION}_method ON cacheInterceptorV${VERSION}(method);
|
||||
CREATE INDEX IF NOT EXISTS idx_cacheInterceptorV${VERSION}_deleteAt ON cacheInterceptorV${VERSION}(deleteAt);
|
||||
`)
|
||||
|
||||
this.#getValuesQuery = this.#db.prepare(`
|
||||
SELECT
|
||||
id,
|
||||
body,
|
||||
deleteAt,
|
||||
statusCode,
|
||||
statusMessage,
|
||||
headers,
|
||||
etag,
|
||||
cacheControlDirectives,
|
||||
vary,
|
||||
cachedAt,
|
||||
staleAt
|
||||
FROM cacheInterceptorV${VERSION}
|
||||
WHERE
|
||||
url = ?
|
||||
AND method = ?
|
||||
ORDER BY
|
||||
deleteAt ASC
|
||||
`)
|
||||
|
||||
this.#updateValueQuery = this.#db.prepare(`
|
||||
UPDATE cacheInterceptorV${VERSION} SET
|
||||
body = ?,
|
||||
deleteAt = ?,
|
||||
statusCode = ?,
|
||||
statusMessage = ?,
|
||||
headers = ?,
|
||||
etag = ?,
|
||||
cacheControlDirectives = ?,
|
||||
cachedAt = ?,
|
||||
staleAt = ?
|
||||
WHERE
|
||||
id = ?
|
||||
`)
|
||||
|
||||
this.#insertValueQuery = this.#db.prepare(`
|
||||
INSERT INTO cacheInterceptorV${VERSION} (
|
||||
url,
|
||||
method,
|
||||
body,
|
||||
deleteAt,
|
||||
statusCode,
|
||||
statusMessage,
|
||||
headers,
|
||||
etag,
|
||||
cacheControlDirectives,
|
||||
vary,
|
||||
cachedAt,
|
||||
staleAt
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`)
|
||||
|
||||
this.#deleteByUrlQuery = this.#db.prepare(
|
||||
`DELETE FROM cacheInterceptorV${VERSION} WHERE url = ?`
|
||||
)
|
||||
|
||||
this.#countEntriesQuery = this.#db.prepare(
|
||||
`SELECT COUNT(*) AS total FROM cacheInterceptorV${VERSION}`
|
||||
)
|
||||
|
||||
this.#deleteExpiredValuesQuery = this.#db.prepare(
|
||||
`DELETE FROM cacheInterceptorV${VERSION} WHERE deleteAt <= ?`
|
||||
)
|
||||
|
||||
this.#deleteOldValuesQuery = this.#maxCount === Infinity
|
||||
? null
|
||||
: this.#db.prepare(`
|
||||
DELETE FROM cacheInterceptorV${VERSION}
|
||||
WHERE id IN (
|
||||
SELECT
|
||||
id
|
||||
FROM cacheInterceptorV${VERSION}
|
||||
ORDER BY cachedAt DESC
|
||||
LIMIT ?
|
||||
)
|
||||
`)
|
||||
}
|
||||
|
||||
close () {
|
||||
this.#db.close()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
|
||||
* @returns {(import('../../types/cache-interceptor.d.ts').default.GetResult & { body?: Buffer }) | undefined}
|
||||
*/
|
||||
get (key) {
|
||||
assertCacheKey(key)
|
||||
|
||||
const value = this.#findValue(key)
|
||||
return value
|
||||
? {
|
||||
body: value.body ? Buffer.from(value.body.buffer, value.body.byteOffset, value.body.byteLength) : undefined,
|
||||
statusCode: value.statusCode,
|
||||
statusMessage: value.statusMessage,
|
||||
headers: value.headers ? JSON.parse(value.headers) : undefined,
|
||||
etag: value.etag ? value.etag : undefined,
|
||||
vary: value.vary ? JSON.parse(value.vary) : undefined,
|
||||
cacheControlDirectives: value.cacheControlDirectives
|
||||
? JSON.parse(value.cacheControlDirectives)
|
||||
: undefined,
|
||||
cachedAt: value.cachedAt,
|
||||
staleAt: value.staleAt,
|
||||
deleteAt: value.deleteAt
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheValue & { body: null | Buffer | Array<Buffer>}} value
|
||||
*/
|
||||
set (key, value) {
|
||||
assertCacheKey(key)
|
||||
|
||||
const url = this.#makeValueUrl(key)
|
||||
const body = Array.isArray(value.body) ? Buffer.concat(value.body) : value.body
|
||||
const size = body?.byteLength
|
||||
|
||||
if (size && size > this.#maxEntrySize) {
|
||||
return
|
||||
}
|
||||
|
||||
const existingValue = this.#findValue(key, true)
|
||||
if (existingValue) {
|
||||
// Updating an existing response, let's overwrite it
|
||||
this.#updateValueQuery.run(
|
||||
body,
|
||||
value.deleteAt,
|
||||
value.statusCode,
|
||||
value.statusMessage,
|
||||
value.headers ? JSON.stringify(value.headers) : null,
|
||||
value.etag ? value.etag : null,
|
||||
value.cacheControlDirectives ? JSON.stringify(value.cacheControlDirectives) : null,
|
||||
value.cachedAt,
|
||||
value.staleAt,
|
||||
existingValue.id
|
||||
)
|
||||
} else {
|
||||
this.#prune()
|
||||
// New response, let's insert it
|
||||
this.#insertValueQuery.run(
|
||||
url,
|
||||
key.method,
|
||||
body,
|
||||
value.deleteAt,
|
||||
value.statusCode,
|
||||
value.statusMessage,
|
||||
value.headers ? JSON.stringify(value.headers) : null,
|
||||
value.etag ? value.etag : null,
|
||||
value.cacheControlDirectives ? JSON.stringify(value.cacheControlDirectives) : null,
|
||||
value.vary ? JSON.stringify(value.vary) : null,
|
||||
value.cachedAt,
|
||||
value.staleAt
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheValue} value
|
||||
* @returns {Writable | undefined}
|
||||
*/
|
||||
createWriteStream (key, value) {
|
||||
assertCacheKey(key)
|
||||
assertCacheValue(value)
|
||||
|
||||
let size = 0
|
||||
/**
|
||||
* @type {Buffer[] | null}
|
||||
*/
|
||||
const body = []
|
||||
const store = this
|
||||
|
||||
return new Writable({
|
||||
decodeStrings: true,
|
||||
write (chunk, encoding, callback) {
|
||||
size += chunk.byteLength
|
||||
|
||||
if (size < store.#maxEntrySize) {
|
||||
body.push(chunk)
|
||||
} else {
|
||||
this.destroy()
|
||||
}
|
||||
|
||||
callback()
|
||||
},
|
||||
final (callback) {
|
||||
store.set(key, { ...value, body })
|
||||
callback()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
|
||||
*/
|
||||
delete (key) {
|
||||
if (typeof key !== 'object') {
|
||||
throw new TypeError(`expected key to be object, got ${typeof key}`)
|
||||
}
|
||||
|
||||
this.#deleteByUrlQuery.run(this.#makeValueUrl(key))
|
||||
}
|
||||
|
||||
#prune () {
|
||||
if (this.size <= this.#maxCount) {
|
||||
return 0
|
||||
}
|
||||
|
||||
{
|
||||
const removed = this.#deleteExpiredValuesQuery.run(Date.now()).changes
|
||||
if (removed) {
|
||||
return removed
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
const removed = this.#deleteOldValuesQuery?.run(Math.max(Math.floor(this.#maxCount * 0.1), 1)).changes
|
||||
if (removed) {
|
||||
return removed
|
||||
}
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Counts the number of rows in the cache
|
||||
* @returns {Number}
|
||||
*/
|
||||
get size () {
|
||||
const { total } = this.#countEntriesQuery.get()
|
||||
return total
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
|
||||
* @returns {string}
|
||||
*/
|
||||
#makeValueUrl (key) {
|
||||
return `${key.origin}/${key.path}`
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
|
||||
* @param {boolean} [canBeExpired=false]
|
||||
* @returns {SqliteStoreValue | undefined}
|
||||
*/
|
||||
#findValue (key, canBeExpired = false) {
|
||||
const url = this.#makeValueUrl(key)
|
||||
const { headers, method } = key
|
||||
|
||||
/**
|
||||
* @type {SqliteStoreValue[]}
|
||||
*/
|
||||
const values = this.#getValuesQuery.all(url, method)
|
||||
|
||||
if (values.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const now = Date.now()
|
||||
for (const value of values) {
|
||||
if (now >= value.deleteAt && !canBeExpired) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
let matches = true
|
||||
|
||||
if (value.vary) {
|
||||
const vary = JSON.parse(value.vary)
|
||||
|
||||
for (const header in vary) {
|
||||
if (!headerValueEquals(headers[header], vary[header])) {
|
||||
matches = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (matches) {
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string|string[]|null|undefined} lhs
|
||||
* @param {string|string[]|null|undefined} rhs
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function headerValueEquals (lhs, rhs) {
|
||||
if (lhs == null && rhs == null) {
|
||||
return true
|
||||
}
|
||||
|
||||
if ((lhs == null && rhs != null) ||
|
||||
(lhs != null && rhs == null)) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (Array.isArray(lhs) && Array.isArray(rhs)) {
|
||||
if (lhs.length !== rhs.length) {
|
||||
return false
|
||||
}
|
||||
|
||||
return lhs.every((x, i) => x === rhs[i])
|
||||
}
|
||||
|
||||
return lhs === rhs
|
||||
}
|
5
node_modules/undici/lib/cache/symbols.js
generated
vendored
5
node_modules/undici/lib/cache/symbols.js
generated
vendored
@ -1,5 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
kConstruct: require('../core/symbols').kConstruct
|
||||
}
|
2283
node_modules/undici/lib/client.js
generated
vendored
2283
node_modules/undici/lib/client.js
generated
vendored
File diff suppressed because it is too large
Load Diff
125
node_modules/undici/lib/core/connect.js
generated
vendored
125
node_modules/undici/lib/core/connect.js
generated
vendored
@ -1,9 +1,12 @@
|
||||
'use strict'
|
||||
|
||||
const net = require('net')
|
||||
const assert = require('assert')
|
||||
const net = require('node:net')
|
||||
const assert = require('node:assert')
|
||||
const util = require('./util')
|
||||
const { InvalidArgumentError, ConnectTimeoutError } = require('./errors')
|
||||
const timers = require('../util/timers')
|
||||
|
||||
function noop () {}
|
||||
|
||||
let tls // include tls conditionally since it is not always available
|
||||
|
||||
@ -15,7 +18,7 @@ let tls // include tls conditionally since it is not always available
|
||||
let SessionCache
|
||||
// FIXME: remove workaround when the Node bug is fixed
|
||||
// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
|
||||
if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) {
|
||||
if (global.FinalizationRegistry && !(process.env.NODE_V8_COVERAGE || process.env.UNDICI_NO_FG)) {
|
||||
SessionCache = class WeakSessionCache {
|
||||
constructor (maxCachedSessions) {
|
||||
this._maxCachedSessions = maxCachedSessions
|
||||
@ -73,7 +76,7 @@ if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) {
|
||||
}
|
||||
}
|
||||
|
||||
function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) {
|
||||
function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, session: customSession, ...opts }) {
|
||||
if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {
|
||||
throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero')
|
||||
}
|
||||
@ -86,15 +89,17 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...o
|
||||
let socket
|
||||
if (protocol === 'https:') {
|
||||
if (!tls) {
|
||||
tls = require('tls')
|
||||
tls = require('node:tls')
|
||||
}
|
||||
servername = servername || options.servername || util.getServerName(host) || null
|
||||
|
||||
const sessionKey = servername || hostname
|
||||
const session = sessionCache.get(sessionKey) || null
|
||||
|
||||
assert(sessionKey)
|
||||
|
||||
const session = customSession || sessionCache.get(sessionKey) || null
|
||||
|
||||
port = port || 443
|
||||
|
||||
socket = tls.connect({
|
||||
highWaterMark: 16384, // TLS in node can't have bigger HWM anyway...
|
||||
...options,
|
||||
@ -104,7 +109,7 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...o
|
||||
// TODO(HTTP/2): Add support for h2c
|
||||
ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'],
|
||||
socket: httpSocket, // upgrade socket connection
|
||||
port: port || 443,
|
||||
port,
|
||||
host: hostname
|
||||
})
|
||||
|
||||
@ -115,11 +120,14 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...o
|
||||
})
|
||||
} else {
|
||||
assert(!httpSocket, 'httpSocket can only be sent on TLS update')
|
||||
|
||||
port = port || 80
|
||||
|
||||
socket = net.connect({
|
||||
highWaterMark: 64 * 1024, // Same as nodejs fs streams.
|
||||
...options,
|
||||
localAddress,
|
||||
port: port || 80,
|
||||
port,
|
||||
host: hostname
|
||||
})
|
||||
}
|
||||
@ -130,12 +138,12 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...o
|
||||
socket.setKeepAlive(true, keepAliveInitialDelay)
|
||||
}
|
||||
|
||||
const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout)
|
||||
const clearConnectTimeout = setupConnectTimeout(new WeakRef(socket), { timeout, hostname, port })
|
||||
|
||||
socket
|
||||
.setNoDelay(true)
|
||||
.once(protocol === 'https:' ? 'secureConnect' : 'connect', function () {
|
||||
cancelTimeout()
|
||||
queueMicrotask(clearConnectTimeout)
|
||||
|
||||
if (callback) {
|
||||
const cb = callback
|
||||
@ -144,7 +152,7 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...o
|
||||
}
|
||||
})
|
||||
.on('error', function (err) {
|
||||
cancelTimeout()
|
||||
queueMicrotask(clearConnectTimeout)
|
||||
|
||||
if (callback) {
|
||||
const cb = callback
|
||||
@ -157,33 +165,76 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...o
|
||||
}
|
||||
}
|
||||
|
||||
function setupTimeout (onConnectTimeout, timeout) {
|
||||
if (!timeout) {
|
||||
return () => {}
|
||||
}
|
||||
|
||||
let s1 = null
|
||||
let s2 = null
|
||||
const timeoutId = setTimeout(() => {
|
||||
// setImmediate is added to make sure that we priotorise socket error events over timeouts
|
||||
s1 = setImmediate(() => {
|
||||
if (process.platform === 'win32') {
|
||||
// Windows needs an extra setImmediate probably due to implementation differences in the socket logic
|
||||
s2 = setImmediate(() => onConnectTimeout())
|
||||
} else {
|
||||
onConnectTimeout()
|
||||
/**
|
||||
* @param {WeakRef<net.Socket>} socketWeakRef
|
||||
* @param {object} opts
|
||||
* @param {number} opts.timeout
|
||||
* @param {string} opts.hostname
|
||||
* @param {number} opts.port
|
||||
* @returns {() => void}
|
||||
*/
|
||||
const setupConnectTimeout = process.platform === 'win32'
|
||||
? (socketWeakRef, opts) => {
|
||||
if (!opts.timeout) {
|
||||
return noop
|
||||
}
|
||||
})
|
||||
}, timeout)
|
||||
return () => {
|
||||
clearTimeout(timeoutId)
|
||||
clearImmediate(s1)
|
||||
clearImmediate(s2)
|
||||
}
|
||||
}
|
||||
|
||||
function onConnectTimeout (socket) {
|
||||
util.destroy(socket, new ConnectTimeoutError())
|
||||
let s1 = null
|
||||
let s2 = null
|
||||
const fastTimer = timers.setFastTimeout(() => {
|
||||
// setImmediate is added to make sure that we prioritize socket error events over timeouts
|
||||
s1 = setImmediate(() => {
|
||||
// Windows needs an extra setImmediate probably due to implementation differences in the socket logic
|
||||
s2 = setImmediate(() => onConnectTimeout(socketWeakRef.deref(), opts))
|
||||
})
|
||||
}, opts.timeout)
|
||||
return () => {
|
||||
timers.clearFastTimeout(fastTimer)
|
||||
clearImmediate(s1)
|
||||
clearImmediate(s2)
|
||||
}
|
||||
}
|
||||
: (socketWeakRef, opts) => {
|
||||
if (!opts.timeout) {
|
||||
return noop
|
||||
}
|
||||
|
||||
let s1 = null
|
||||
const fastTimer = timers.setFastTimeout(() => {
|
||||
// setImmediate is added to make sure that we prioritize socket error events over timeouts
|
||||
s1 = setImmediate(() => {
|
||||
onConnectTimeout(socketWeakRef.deref(), opts)
|
||||
})
|
||||
}, opts.timeout)
|
||||
return () => {
|
||||
timers.clearFastTimeout(fastTimer)
|
||||
clearImmediate(s1)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {net.Socket} socket
|
||||
* @param {object} opts
|
||||
* @param {number} opts.timeout
|
||||
* @param {string} opts.hostname
|
||||
* @param {number} opts.port
|
||||
*/
|
||||
function onConnectTimeout (socket, opts) {
|
||||
// The socket could be already garbage collected
|
||||
if (socket == null) {
|
||||
return
|
||||
}
|
||||
|
||||
let message = 'Connect Timeout Error'
|
||||
if (Array.isArray(socket.autoSelectFamilyAttemptedAddresses)) {
|
||||
message += ` (attempted addresses: ${socket.autoSelectFamilyAttemptedAddresses.join(', ')},`
|
||||
} else {
|
||||
message += ` (attempted address: ${opts.hostname}:${opts.port},`
|
||||
}
|
||||
|
||||
message += ` timeout: ${opts.timeout}ms)`
|
||||
|
||||
util.destroy(socket, new ConnectTimeoutError(message))
|
||||
}
|
||||
|
||||
module.exports = buildConnector
|
||||
|
45
node_modules/undici/lib/core/constants.js
generated
vendored
45
node_modules/undici/lib/core/constants.js
generated
vendored
@ -1,10 +1,9 @@
|
||||
'use strict'
|
||||
|
||||
/** @type {Record<string, string | undefined>} */
|
||||
const headerNameLowerCasedRecord = {}
|
||||
|
||||
// https://developer.mozilla.org/docs/Web/HTTP/Headers
|
||||
const wellknownHeaderNames = [
|
||||
/**
|
||||
* @see https://developer.mozilla.org/docs/Web/HTTP/Headers
|
||||
*/
|
||||
const wellknownHeaderNames = /** @type {const} */ ([
|
||||
'Accept',
|
||||
'Accept-Encoding',
|
||||
'Accept-Language',
|
||||
@ -100,7 +99,35 @@ const wellknownHeaderNames = [
|
||||
'X-Powered-By',
|
||||
'X-Requested-With',
|
||||
'X-XSS-Protection'
|
||||
]
|
||||
])
|
||||
|
||||
/** @type {Record<typeof wellknownHeaderNames[number]|Lowercase<typeof wellknownHeaderNames[number]>, string>} */
|
||||
const headerNameLowerCasedRecord = {}
|
||||
|
||||
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
|
||||
Object.setPrototypeOf(headerNameLowerCasedRecord, null)
|
||||
|
||||
/**
|
||||
* @type {Record<Lowercase<typeof wellknownHeaderNames[number]>, Buffer>}
|
||||
*/
|
||||
const wellknownHeaderNameBuffers = {}
|
||||
|
||||
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
|
||||
Object.setPrototypeOf(wellknownHeaderNameBuffers, null)
|
||||
|
||||
/**
|
||||
* @param {string} header Lowercased header
|
||||
* @returns {Buffer}
|
||||
*/
|
||||
function getHeaderNameAsBuffer (header) {
|
||||
let buffer = wellknownHeaderNameBuffers[header]
|
||||
|
||||
if (buffer === undefined) {
|
||||
buffer = Buffer.from(header)
|
||||
}
|
||||
|
||||
return buffer
|
||||
}
|
||||
|
||||
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
|
||||
const key = wellknownHeaderNames[i]
|
||||
@ -109,10 +136,8 @@ for (let i = 0; i < wellknownHeaderNames.length; ++i) {
|
||||
lowerCasedKey
|
||||
}
|
||||
|
||||
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
|
||||
Object.setPrototypeOf(headerNameLowerCasedRecord, null)
|
||||
|
||||
module.exports = {
|
||||
wellknownHeaderNames,
|
||||
headerNameLowerCasedRecord
|
||||
headerNameLowerCasedRecord,
|
||||
getHeaderNameAsBuffer
|
||||
}
|
||||
|
196
node_modules/undici/lib/core/diagnostics.js
generated
vendored
Normal file
196
node_modules/undici/lib/core/diagnostics.js
generated
vendored
Normal file
@ -0,0 +1,196 @@
|
||||
'use strict'
|
||||
|
||||
const diagnosticsChannel = require('node:diagnostics_channel')
|
||||
const util = require('node:util')
|
||||
|
||||
const undiciDebugLog = util.debuglog('undici')
|
||||
const fetchDebuglog = util.debuglog('fetch')
|
||||
const websocketDebuglog = util.debuglog('websocket')
|
||||
|
||||
const channels = {
|
||||
// Client
|
||||
beforeConnect: diagnosticsChannel.channel('undici:client:beforeConnect'),
|
||||
connected: diagnosticsChannel.channel('undici:client:connected'),
|
||||
connectError: diagnosticsChannel.channel('undici:client:connectError'),
|
||||
sendHeaders: diagnosticsChannel.channel('undici:client:sendHeaders'),
|
||||
// Request
|
||||
create: diagnosticsChannel.channel('undici:request:create'),
|
||||
bodySent: diagnosticsChannel.channel('undici:request:bodySent'),
|
||||
headers: diagnosticsChannel.channel('undici:request:headers'),
|
||||
trailers: diagnosticsChannel.channel('undici:request:trailers'),
|
||||
error: diagnosticsChannel.channel('undici:request:error'),
|
||||
// WebSocket
|
||||
open: diagnosticsChannel.channel('undici:websocket:open'),
|
||||
close: diagnosticsChannel.channel('undici:websocket:close'),
|
||||
socketError: diagnosticsChannel.channel('undici:websocket:socket_error'),
|
||||
ping: diagnosticsChannel.channel('undici:websocket:ping'),
|
||||
pong: diagnosticsChannel.channel('undici:websocket:pong')
|
||||
}
|
||||
|
||||
let isTrackingClientEvents = false
|
||||
|
||||
function trackClientEvents (debugLog = undiciDebugLog) {
|
||||
if (isTrackingClientEvents) {
|
||||
return
|
||||
}
|
||||
|
||||
isTrackingClientEvents = true
|
||||
|
||||
diagnosticsChannel.subscribe('undici:client:beforeConnect',
|
||||
evt => {
|
||||
const {
|
||||
connectParams: { version, protocol, port, host }
|
||||
} = evt
|
||||
debugLog(
|
||||
'connecting to %s%s using %s%s',
|
||||
host,
|
||||
port ? `:${port}` : '',
|
||||
protocol,
|
||||
version
|
||||
)
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:client:connected',
|
||||
evt => {
|
||||
const {
|
||||
connectParams: { version, protocol, port, host }
|
||||
} = evt
|
||||
debugLog(
|
||||
'connected to %s%s using %s%s',
|
||||
host,
|
||||
port ? `:${port}` : '',
|
||||
protocol,
|
||||
version
|
||||
)
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:client:connectError',
|
||||
evt => {
|
||||
const {
|
||||
connectParams: { version, protocol, port, host },
|
||||
error
|
||||
} = evt
|
||||
debugLog(
|
||||
'connection to %s%s using %s%s errored - %s',
|
||||
host,
|
||||
port ? `:${port}` : '',
|
||||
protocol,
|
||||
version,
|
||||
error.message
|
||||
)
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:client:sendHeaders',
|
||||
evt => {
|
||||
const {
|
||||
request: { method, path, origin }
|
||||
} = evt
|
||||
debugLog('sending request to %s %s/%s', method, origin, path)
|
||||
})
|
||||
}
|
||||
|
||||
let isTrackingRequestEvents = false
|
||||
|
||||
function trackRequestEvents (debugLog = undiciDebugLog) {
|
||||
if (isTrackingRequestEvents) {
|
||||
return
|
||||
}
|
||||
|
||||
isTrackingRequestEvents = true
|
||||
|
||||
diagnosticsChannel.subscribe('undici:request:headers',
|
||||
evt => {
|
||||
const {
|
||||
request: { method, path, origin },
|
||||
response: { statusCode }
|
||||
} = evt
|
||||
debugLog(
|
||||
'received response to %s %s/%s - HTTP %d',
|
||||
method,
|
||||
origin,
|
||||
path,
|
||||
statusCode
|
||||
)
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:request:trailers',
|
||||
evt => {
|
||||
const {
|
||||
request: { method, path, origin }
|
||||
} = evt
|
||||
debugLog('trailers received from %s %s/%s', method, origin, path)
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:request:error',
|
||||
evt => {
|
||||
const {
|
||||
request: { method, path, origin },
|
||||
error
|
||||
} = evt
|
||||
debugLog(
|
||||
'request to %s %s/%s errored - %s',
|
||||
method,
|
||||
origin,
|
||||
path,
|
||||
error.message
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
let isTrackingWebSocketEvents = false
|
||||
|
||||
function trackWebSocketEvents (debugLog = websocketDebuglog) {
|
||||
if (isTrackingWebSocketEvents) {
|
||||
return
|
||||
}
|
||||
|
||||
isTrackingWebSocketEvents = true
|
||||
|
||||
diagnosticsChannel.subscribe('undici:websocket:open',
|
||||
evt => {
|
||||
const {
|
||||
address: { address, port }
|
||||
} = evt
|
||||
debugLog('connection opened %s%s', address, port ? `:${port}` : '')
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:websocket:close',
|
||||
evt => {
|
||||
const { websocket, code, reason } = evt
|
||||
debugLog(
|
||||
'closed connection to %s - %s %s',
|
||||
websocket.url,
|
||||
code,
|
||||
reason
|
||||
)
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:websocket:socket_error',
|
||||
err => {
|
||||
debugLog('connection errored - %s', err.message)
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:websocket:ping',
|
||||
evt => {
|
||||
debugLog('ping received')
|
||||
})
|
||||
|
||||
diagnosticsChannel.subscribe('undici:websocket:pong',
|
||||
evt => {
|
||||
debugLog('pong received')
|
||||
})
|
||||
}
|
||||
|
||||
if (undiciDebugLog.enabled || fetchDebuglog.enabled) {
|
||||
trackClientEvents(fetchDebuglog.enabled ? fetchDebuglog : undiciDebugLog)
|
||||
trackRequestEvents(fetchDebuglog.enabled ? fetchDebuglog : undiciDebugLog)
|
||||
}
|
||||
|
||||
if (websocketDebuglog.enabled) {
|
||||
trackClientEvents(undiciDebugLog.enabled ? undiciDebugLog : websocketDebuglog)
|
||||
trackWebSocketEvents(websocketDebuglog)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
channels
|
||||
}
|
60
node_modules/undici/lib/core/errors.js
generated
vendored
60
node_modules/undici/lib/core/errors.js
generated
vendored
@ -1,8 +1,8 @@
|
||||
'use strict'
|
||||
|
||||
class UndiciError extends Error {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
constructor (message, options) {
|
||||
super(message, options)
|
||||
this.name = 'UndiciError'
|
||||
this.code = 'UND_ERR'
|
||||
}
|
||||
@ -11,7 +11,6 @@ class UndiciError extends Error {
|
||||
class ConnectTimeoutError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, ConnectTimeoutError)
|
||||
this.name = 'ConnectTimeoutError'
|
||||
this.message = message || 'Connect Timeout Error'
|
||||
this.code = 'UND_ERR_CONNECT_TIMEOUT'
|
||||
@ -21,7 +20,6 @@ class ConnectTimeoutError extends UndiciError {
|
||||
class HeadersTimeoutError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, HeadersTimeoutError)
|
||||
this.name = 'HeadersTimeoutError'
|
||||
this.message = message || 'Headers Timeout Error'
|
||||
this.code = 'UND_ERR_HEADERS_TIMEOUT'
|
||||
@ -31,7 +29,6 @@ class HeadersTimeoutError extends UndiciError {
|
||||
class HeadersOverflowError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, HeadersOverflowError)
|
||||
this.name = 'HeadersOverflowError'
|
||||
this.message = message || 'Headers Overflow Error'
|
||||
this.code = 'UND_ERR_HEADERS_OVERFLOW'
|
||||
@ -41,7 +38,6 @@ class HeadersOverflowError extends UndiciError {
|
||||
class BodyTimeoutError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, BodyTimeoutError)
|
||||
this.name = 'BodyTimeoutError'
|
||||
this.message = message || 'Body Timeout Error'
|
||||
this.code = 'UND_ERR_BODY_TIMEOUT'
|
||||
@ -51,7 +47,6 @@ class BodyTimeoutError extends UndiciError {
|
||||
class ResponseStatusCodeError extends UndiciError {
|
||||
constructor (message, statusCode, headers, body) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, ResponseStatusCodeError)
|
||||
this.name = 'ResponseStatusCodeError'
|
||||
this.message = message || 'Response Status Code Error'
|
||||
this.code = 'UND_ERR_RESPONSE_STATUS_CODE'
|
||||
@ -65,7 +60,6 @@ class ResponseStatusCodeError extends UndiciError {
|
||||
class InvalidArgumentError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, InvalidArgumentError)
|
||||
this.name = 'InvalidArgumentError'
|
||||
this.message = message || 'Invalid Argument Error'
|
||||
this.code = 'UND_ERR_INVALID_ARG'
|
||||
@ -75,17 +69,23 @@ class InvalidArgumentError extends UndiciError {
|
||||
class InvalidReturnValueError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, InvalidReturnValueError)
|
||||
this.name = 'InvalidReturnValueError'
|
||||
this.message = message || 'Invalid Return Value Error'
|
||||
this.code = 'UND_ERR_INVALID_RETURN_VALUE'
|
||||
}
|
||||
}
|
||||
|
||||
class RequestAbortedError extends UndiciError {
|
||||
class AbortError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.name = 'AbortError'
|
||||
this.message = message || 'The operation was aborted'
|
||||
}
|
||||
}
|
||||
|
||||
class RequestAbortedError extends AbortError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, RequestAbortedError)
|
||||
this.name = 'AbortError'
|
||||
this.message = message || 'Request aborted'
|
||||
this.code = 'UND_ERR_ABORTED'
|
||||
@ -95,7 +95,6 @@ class RequestAbortedError extends UndiciError {
|
||||
class InformationalError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, InformationalError)
|
||||
this.name = 'InformationalError'
|
||||
this.message = message || 'Request information'
|
||||
this.code = 'UND_ERR_INFO'
|
||||
@ -105,7 +104,6 @@ class InformationalError extends UndiciError {
|
||||
class RequestContentLengthMismatchError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, RequestContentLengthMismatchError)
|
||||
this.name = 'RequestContentLengthMismatchError'
|
||||
this.message = message || 'Request body length does not match content-length header'
|
||||
this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'
|
||||
@ -115,7 +113,6 @@ class RequestContentLengthMismatchError extends UndiciError {
|
||||
class ResponseContentLengthMismatchError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, ResponseContentLengthMismatchError)
|
||||
this.name = 'ResponseContentLengthMismatchError'
|
||||
this.message = message || 'Response body length does not match content-length header'
|
||||
this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'
|
||||
@ -125,7 +122,6 @@ class ResponseContentLengthMismatchError extends UndiciError {
|
||||
class ClientDestroyedError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, ClientDestroyedError)
|
||||
this.name = 'ClientDestroyedError'
|
||||
this.message = message || 'The client is destroyed'
|
||||
this.code = 'UND_ERR_DESTROYED'
|
||||
@ -135,7 +131,6 @@ class ClientDestroyedError extends UndiciError {
|
||||
class ClientClosedError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, ClientClosedError)
|
||||
this.name = 'ClientClosedError'
|
||||
this.message = message || 'The client is closed'
|
||||
this.code = 'UND_ERR_CLOSED'
|
||||
@ -145,7 +140,6 @@ class ClientClosedError extends UndiciError {
|
||||
class SocketError extends UndiciError {
|
||||
constructor (message, socket) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, SocketError)
|
||||
this.name = 'SocketError'
|
||||
this.message = message || 'Socket error'
|
||||
this.code = 'UND_ERR_SOCKET'
|
||||
@ -156,7 +150,6 @@ class SocketError extends UndiciError {
|
||||
class NotSupportedError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, NotSupportedError)
|
||||
this.name = 'NotSupportedError'
|
||||
this.message = message || 'Not supported error'
|
||||
this.code = 'UND_ERR_NOT_SUPPORTED'
|
||||
@ -166,7 +159,6 @@ class NotSupportedError extends UndiciError {
|
||||
class BalancedPoolMissingUpstreamError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, NotSupportedError)
|
||||
this.name = 'MissingUpstreamError'
|
||||
this.message = message || 'No upstream has been added to the BalancedPool'
|
||||
this.code = 'UND_ERR_BPL_MISSING_UPSTREAM'
|
||||
@ -176,7 +168,6 @@ class BalancedPoolMissingUpstreamError extends UndiciError {
|
||||
class HTTPParserError extends Error {
|
||||
constructor (message, code, data) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, HTTPParserError)
|
||||
this.name = 'HTTPParserError'
|
||||
this.code = code ? `HPE_${code}` : undefined
|
||||
this.data = data ? data.toString() : undefined
|
||||
@ -186,7 +177,6 @@ class HTTPParserError extends Error {
|
||||
class ResponseExceededMaxSizeError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, ResponseExceededMaxSizeError)
|
||||
this.name = 'ResponseExceededMaxSizeError'
|
||||
this.message = message || 'Response content exceeded max size'
|
||||
this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE'
|
||||
@ -196,7 +186,6 @@ class ResponseExceededMaxSizeError extends UndiciError {
|
||||
class RequestRetryError extends UndiciError {
|
||||
constructor (message, code, { headers, data }) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, RequestRetryError)
|
||||
this.name = 'RequestRetryError'
|
||||
this.message = message || 'Request retry error'
|
||||
this.code = 'UND_ERR_REQ_RETRY'
|
||||
@ -206,7 +195,30 @@ class RequestRetryError extends UndiciError {
|
||||
}
|
||||
}
|
||||
|
||||
class ResponseError extends UndiciError {
|
||||
constructor (message, code, { headers, body }) {
|
||||
super(message)
|
||||
this.name = 'ResponseError'
|
||||
this.message = message || 'Response error'
|
||||
this.code = 'UND_ERR_RESPONSE'
|
||||
this.statusCode = code
|
||||
this.body = body
|
||||
this.headers = headers
|
||||
}
|
||||
}
|
||||
|
||||
class SecureProxyConnectionError extends UndiciError {
|
||||
constructor (cause, message, options = {}) {
|
||||
super(message, { cause, ...options })
|
||||
this.name = 'SecureProxyConnectionError'
|
||||
this.message = message || 'Secure Proxy Connection failed'
|
||||
this.code = 'UND_ERR_PRX_TLS'
|
||||
this.cause = cause
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
AbortError,
|
||||
HTTPParserError,
|
||||
UndiciError,
|
||||
HeadersTimeoutError,
|
||||
@ -226,5 +238,7 @@ module.exports = {
|
||||
ResponseContentLengthMismatchError,
|
||||
BalancedPoolMissingUpstreamError,
|
||||
ResponseExceededMaxSizeError,
|
||||
RequestRetryError
|
||||
RequestRetryError,
|
||||
ResponseError,
|
||||
SecureProxyConnectionError
|
||||
}
|
||||
|
306
node_modules/undici/lib/core/request.js
generated
vendored
306
node_modules/undici/lib/core/request.js
generated
vendored
@ -4,52 +4,29 @@ const {
|
||||
InvalidArgumentError,
|
||||
NotSupportedError
|
||||
} = require('./errors')
|
||||
const assert = require('assert')
|
||||
const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = require('./symbols')
|
||||
const util = require('./util')
|
||||
|
||||
// tokenRegExp and headerCharRegex have been lifted from
|
||||
// https://github.com/nodejs/node/blob/main/lib/_http_common.js
|
||||
|
||||
/**
|
||||
* Verifies that the given val is a valid HTTP token
|
||||
* per the rules defined in RFC 7230
|
||||
* See https://tools.ietf.org/html/rfc7230#section-3.2.6
|
||||
*/
|
||||
const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/
|
||||
|
||||
/**
|
||||
* Matches if val contains an invalid field-vchar
|
||||
* field-value = *( field-content / obs-fold )
|
||||
* field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
|
||||
* field-vchar = VCHAR / obs-text
|
||||
*/
|
||||
const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/
|
||||
const assert = require('node:assert')
|
||||
const {
|
||||
isValidHTTPToken,
|
||||
isValidHeaderValue,
|
||||
isStream,
|
||||
destroy,
|
||||
isBuffer,
|
||||
isFormDataLike,
|
||||
isIterable,
|
||||
isBlobLike,
|
||||
serializePathWithQuery,
|
||||
assertRequestHandler,
|
||||
getServerName,
|
||||
normalizedMethodRecords
|
||||
} = require('./util')
|
||||
const { channels } = require('./diagnostics.js')
|
||||
const { headerNameLowerCasedRecord } = require('./constants')
|
||||
|
||||
// Verifies that a given path is valid does not contain control chars \x00 to \x20
|
||||
const invalidPathRegex = /[^\u0021-\u00ff]/
|
||||
|
||||
const kHandler = Symbol('handler')
|
||||
|
||||
const channels = {}
|
||||
|
||||
let extractBody
|
||||
|
||||
try {
|
||||
const diagnosticsChannel = require('diagnostics_channel')
|
||||
channels.create = diagnosticsChannel.channel('undici:request:create')
|
||||
channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent')
|
||||
channels.headers = diagnosticsChannel.channel('undici:request:headers')
|
||||
channels.trailers = diagnosticsChannel.channel('undici:request:trailers')
|
||||
channels.error = diagnosticsChannel.channel('undici:request:error')
|
||||
} catch {
|
||||
channels.create = { hasSubscribers: false }
|
||||
channels.bodySent = { hasSubscribers: false }
|
||||
channels.headers = { hasSubscribers: false }
|
||||
channels.trailers = { hasSubscribers: false }
|
||||
channels.error = { hasSubscribers: false }
|
||||
}
|
||||
|
||||
class Request {
|
||||
constructor (origin, {
|
||||
path,
|
||||
@ -63,8 +40,9 @@ class Request {
|
||||
headersTimeout,
|
||||
bodyTimeout,
|
||||
reset,
|
||||
throwOnError,
|
||||
expectContinue
|
||||
expectContinue,
|
||||
servername,
|
||||
throwOnError
|
||||
}, handler) {
|
||||
if (typeof path !== 'string') {
|
||||
throw new InvalidArgumentError('path must be a string')
|
||||
@ -74,13 +52,13 @@ class Request {
|
||||
method !== 'CONNECT'
|
||||
) {
|
||||
throw new InvalidArgumentError('path must be an absolute URL or start with a slash')
|
||||
} else if (invalidPathRegex.exec(path) !== null) {
|
||||
} else if (invalidPathRegex.test(path)) {
|
||||
throw new InvalidArgumentError('invalid request path')
|
||||
}
|
||||
|
||||
if (typeof method !== 'string') {
|
||||
throw new InvalidArgumentError('method must be a string')
|
||||
} else if (tokenRegExp.exec(method) === null) {
|
||||
} else if (normalizedMethodRecords[method] === undefined && !isValidHTTPToken(method)) {
|
||||
throw new InvalidArgumentError('invalid request method')
|
||||
}
|
||||
|
||||
@ -104,25 +82,27 @@ class Request {
|
||||
throw new InvalidArgumentError('invalid expectContinue')
|
||||
}
|
||||
|
||||
if (throwOnError != null) {
|
||||
throw new InvalidArgumentError('invalid throwOnError')
|
||||
}
|
||||
|
||||
this.headersTimeout = headersTimeout
|
||||
|
||||
this.bodyTimeout = bodyTimeout
|
||||
|
||||
this.throwOnError = throwOnError === true
|
||||
|
||||
this.method = method
|
||||
|
||||
this.abort = null
|
||||
|
||||
if (body == null) {
|
||||
this.body = null
|
||||
} else if (util.isStream(body)) {
|
||||
} else if (isStream(body)) {
|
||||
this.body = body
|
||||
|
||||
const rState = this.body._readableState
|
||||
if (!rState || !rState.autoDestroy) {
|
||||
this.endHandler = function autoDestroy () {
|
||||
util.destroy(this)
|
||||
destroy(this)
|
||||
}
|
||||
this.body.on('end', this.endHandler)
|
||||
}
|
||||
@ -135,7 +115,7 @@ class Request {
|
||||
}
|
||||
}
|
||||
this.body.on('error', this.errorHandler)
|
||||
} else if (util.isBuffer(body)) {
|
||||
} else if (isBuffer(body)) {
|
||||
this.body = body.byteLength ? body : null
|
||||
} else if (ArrayBuffer.isView(body)) {
|
||||
this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null
|
||||
@ -143,19 +123,18 @@ class Request {
|
||||
this.body = body.byteLength ? Buffer.from(body) : null
|
||||
} else if (typeof body === 'string') {
|
||||
this.body = body.length ? Buffer.from(body) : null
|
||||
} else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) {
|
||||
} else if (isFormDataLike(body) || isIterable(body) || isBlobLike(body)) {
|
||||
this.body = body
|
||||
} else {
|
||||
throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable')
|
||||
}
|
||||
|
||||
this.completed = false
|
||||
|
||||
this.aborted = false
|
||||
|
||||
this.upgrade = upgrade || null
|
||||
|
||||
this.path = query ? util.buildURL(path, query) : path
|
||||
this.path = query ? serializePathWithQuery(path, query) : path
|
||||
|
||||
this.origin = origin
|
||||
|
||||
@ -163,7 +142,7 @@ class Request {
|
||||
? method === 'HEAD' || method === 'GET'
|
||||
: idempotent
|
||||
|
||||
this.blocking = blocking == null ? false : blocking
|
||||
this.blocking = blocking ?? this.method !== 'HEAD'
|
||||
|
||||
this.reset = reset == null ? null : reset
|
||||
|
||||
@ -173,7 +152,7 @@ class Request {
|
||||
|
||||
this.contentType = null
|
||||
|
||||
this.headers = ''
|
||||
this.headers = []
|
||||
|
||||
// Only for H2
|
||||
this.expectContinue = expectContinue != null ? expectContinue : false
|
||||
@ -186,39 +165,26 @@ class Request {
|
||||
processHeader(this, headers[i], headers[i + 1])
|
||||
}
|
||||
} else if (headers && typeof headers === 'object') {
|
||||
const keys = Object.keys(headers)
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i]
|
||||
processHeader(this, key, headers[key])
|
||||
if (headers[Symbol.iterator]) {
|
||||
for (const header of headers) {
|
||||
if (!Array.isArray(header) || header.length !== 2) {
|
||||
throw new InvalidArgumentError('headers must be in key-value pair format')
|
||||
}
|
||||
processHeader(this, header[0], header[1])
|
||||
}
|
||||
} else {
|
||||
const keys = Object.keys(headers)
|
||||
for (let i = 0; i < keys.length; ++i) {
|
||||
processHeader(this, keys[i], headers[keys[i]])
|
||||
}
|
||||
}
|
||||
} else if (headers != null) {
|
||||
throw new InvalidArgumentError('headers must be an object or an array')
|
||||
}
|
||||
|
||||
if (util.isFormDataLike(this.body)) {
|
||||
if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) {
|
||||
throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.')
|
||||
}
|
||||
assertRequestHandler(handler, method, upgrade)
|
||||
|
||||
if (!extractBody) {
|
||||
extractBody = require('../fetch/body.js').extractBody
|
||||
}
|
||||
|
||||
const [bodyStream, contentType] = extractBody(body)
|
||||
if (this.contentType == null) {
|
||||
this.contentType = contentType
|
||||
this.headers += `content-type: ${contentType}\r\n`
|
||||
}
|
||||
this.body = bodyStream.stream
|
||||
this.contentLength = bodyStream.length
|
||||
} else if (util.isBlobLike(body) && this.contentType == null && body.type) {
|
||||
this.contentType = body.type
|
||||
this.headers += `content-type: ${body.type}\r\n`
|
||||
}
|
||||
|
||||
util.validateHandler(handler, method, upgrade)
|
||||
|
||||
this.servername = util.getServerName(this.host)
|
||||
this.servername = servername || getServerName(this.host) || null
|
||||
|
||||
this[kHandler] = handler
|
||||
|
||||
@ -263,6 +229,10 @@ class Request {
|
||||
}
|
||||
}
|
||||
|
||||
onResponseStarted () {
|
||||
return this[kHandler].onResponseStarted?.()
|
||||
}
|
||||
|
||||
onHeaders (statusCode, headers, resume, statusText) {
|
||||
assert(!this.aborted)
|
||||
assert(!this.completed)
|
||||
@ -301,6 +271,7 @@ class Request {
|
||||
this.onFinally()
|
||||
|
||||
assert(!this.aborted)
|
||||
assert(!this.completed)
|
||||
|
||||
this.completed = true
|
||||
if (channels.trailers.hasSubscribers) {
|
||||
@ -342,157 +313,84 @@ class Request {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: adjust to support H2
|
||||
addHeader (key, value) {
|
||||
processHeader(this, key, value)
|
||||
return this
|
||||
}
|
||||
|
||||
static [kHTTP1BuildRequest] (origin, opts, handler) {
|
||||
// TODO: Migrate header parsing here, to make Requests
|
||||
// HTTP agnostic
|
||||
return new Request(origin, opts, handler)
|
||||
}
|
||||
|
||||
static [kHTTP2BuildRequest] (origin, opts, handler) {
|
||||
const headers = opts.headers
|
||||
opts = { ...opts, headers: null }
|
||||
|
||||
const request = new Request(origin, opts, handler)
|
||||
|
||||
request.headers = {}
|
||||
|
||||
if (Array.isArray(headers)) {
|
||||
if (headers.length % 2 !== 0) {
|
||||
throw new InvalidArgumentError('headers array must be even')
|
||||
}
|
||||
for (let i = 0; i < headers.length; i += 2) {
|
||||
processHeader(request, headers[i], headers[i + 1], true)
|
||||
}
|
||||
} else if (headers && typeof headers === 'object') {
|
||||
const keys = Object.keys(headers)
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i]
|
||||
processHeader(request, key, headers[key], true)
|
||||
}
|
||||
} else if (headers != null) {
|
||||
throw new InvalidArgumentError('headers must be an object or an array')
|
||||
}
|
||||
|
||||
return request
|
||||
}
|
||||
|
||||
static [kHTTP2CopyHeaders] (raw) {
|
||||
const rawHeaders = raw.split('\r\n')
|
||||
const headers = {}
|
||||
|
||||
for (const header of rawHeaders) {
|
||||
const [key, value] = header.split(': ')
|
||||
|
||||
if (value == null || value.length === 0) continue
|
||||
|
||||
if (headers[key]) headers[key] += `,${value}`
|
||||
else headers[key] = value
|
||||
}
|
||||
|
||||
return headers
|
||||
}
|
||||
}
|
||||
|
||||
function processHeaderValue (key, val, skipAppend) {
|
||||
if (val && typeof val === 'object') {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
}
|
||||
|
||||
val = val != null ? `${val}` : ''
|
||||
|
||||
if (headerCharRegex.exec(val) !== null) {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
}
|
||||
|
||||
return skipAppend ? val : `${key}: ${val}\r\n`
|
||||
}
|
||||
|
||||
function processHeader (request, key, val, skipAppend = false) {
|
||||
function processHeader (request, key, val) {
|
||||
if (val && (typeof val === 'object' && !Array.isArray(val))) {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
} else if (val === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
request.host === null &&
|
||||
key.length === 4 &&
|
||||
key.toLowerCase() === 'host'
|
||||
) {
|
||||
if (headerCharRegex.exec(val) !== null) {
|
||||
let headerName = headerNameLowerCasedRecord[key]
|
||||
|
||||
if (headerName === undefined) {
|
||||
headerName = key.toLowerCase()
|
||||
if (headerNameLowerCasedRecord[headerName] === undefined && !isValidHTTPToken(headerName)) {
|
||||
throw new InvalidArgumentError('invalid header key')
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(val)) {
|
||||
const arr = []
|
||||
for (let i = 0; i < val.length; i++) {
|
||||
if (typeof val[i] === 'string') {
|
||||
if (!isValidHeaderValue(val[i])) {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
}
|
||||
arr.push(val[i])
|
||||
} else if (val[i] === null) {
|
||||
arr.push('')
|
||||
} else if (typeof val[i] === 'object') {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
} else {
|
||||
arr.push(`${val[i]}`)
|
||||
}
|
||||
}
|
||||
val = arr
|
||||
} else if (typeof val === 'string') {
|
||||
if (!isValidHeaderValue(val)) {
|
||||
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||
}
|
||||
} else if (val === null) {
|
||||
val = ''
|
||||
} else {
|
||||
val = `${val}`
|
||||
}
|
||||
|
||||
if (request.host === null && headerName === 'host') {
|
||||
if (typeof val !== 'string') {
|
||||
throw new InvalidArgumentError('invalid host header')
|
||||
}
|
||||
// Consumed by Client
|
||||
request.host = val
|
||||
} else if (
|
||||
request.contentLength === null &&
|
||||
key.length === 14 &&
|
||||
key.toLowerCase() === 'content-length'
|
||||
) {
|
||||
} else if (request.contentLength === null && headerName === 'content-length') {
|
||||
request.contentLength = parseInt(val, 10)
|
||||
if (!Number.isFinite(request.contentLength)) {
|
||||
throw new InvalidArgumentError('invalid content-length header')
|
||||
}
|
||||
} else if (
|
||||
request.contentType === null &&
|
||||
key.length === 12 &&
|
||||
key.toLowerCase() === 'content-type'
|
||||
) {
|
||||
} else if (request.contentType === null && headerName === 'content-type') {
|
||||
request.contentType = val
|
||||
if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)
|
||||
else request.headers += processHeaderValue(key, val)
|
||||
} else if (
|
||||
key.length === 17 &&
|
||||
key.toLowerCase() === 'transfer-encoding'
|
||||
) {
|
||||
throw new InvalidArgumentError('invalid transfer-encoding header')
|
||||
} else if (
|
||||
key.length === 10 &&
|
||||
key.toLowerCase() === 'connection'
|
||||
) {
|
||||
request.headers.push(key, val)
|
||||
} else if (headerName === 'transfer-encoding' || headerName === 'keep-alive' || headerName === 'upgrade') {
|
||||
throw new InvalidArgumentError(`invalid ${headerName} header`)
|
||||
} else if (headerName === 'connection') {
|
||||
const value = typeof val === 'string' ? val.toLowerCase() : null
|
||||
if (value !== 'close' && value !== 'keep-alive') {
|
||||
throw new InvalidArgumentError('invalid connection header')
|
||||
} else if (value === 'close') {
|
||||
}
|
||||
|
||||
if (value === 'close') {
|
||||
request.reset = true
|
||||
}
|
||||
} else if (
|
||||
key.length === 10 &&
|
||||
key.toLowerCase() === 'keep-alive'
|
||||
) {
|
||||
throw new InvalidArgumentError('invalid keep-alive header')
|
||||
} else if (
|
||||
key.length === 7 &&
|
||||
key.toLowerCase() === 'upgrade'
|
||||
) {
|
||||
throw new InvalidArgumentError('invalid upgrade header')
|
||||
} else if (
|
||||
key.length === 6 &&
|
||||
key.toLowerCase() === 'expect'
|
||||
) {
|
||||
} else if (headerName === 'expect') {
|
||||
throw new NotSupportedError('expect header not supported')
|
||||
} else if (tokenRegExp.exec(key) === null) {
|
||||
throw new InvalidArgumentError('invalid header key')
|
||||
} else {
|
||||
if (Array.isArray(val)) {
|
||||
for (let i = 0; i < val.length; i++) {
|
||||
if (skipAppend) {
|
||||
if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}`
|
||||
else request.headers[key] = processHeaderValue(key, val[i], skipAppend)
|
||||
} else {
|
||||
request.headers += processHeaderValue(key, val[i])
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)
|
||||
else request.headers += processHeaderValue(key, val)
|
||||
}
|
||||
request.headers.push(key, val)
|
||||
}
|
||||
}
|
||||
|
||||
|
19
node_modules/undici/lib/core/symbols.js
generated
vendored
19
node_modules/undici/lib/core/symbols.js
generated
vendored
@ -1,3 +1,5 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
kClose: Symbol('close'),
|
||||
kDestroy: Symbol('destroy'),
|
||||
@ -8,7 +10,6 @@ module.exports = {
|
||||
kQueue: Symbol('queue'),
|
||||
kConnect: Symbol('connect'),
|
||||
kConnecting: Symbol('connecting'),
|
||||
kHeadersList: Symbol('headers list'),
|
||||
kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'),
|
||||
kKeepAliveMaxTimeout: Symbol('max keep alive timeout'),
|
||||
kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'),
|
||||
@ -21,6 +22,7 @@ module.exports = {
|
||||
kHost: Symbol('host'),
|
||||
kNoRef: Symbol('no ref'),
|
||||
kBodyUsed: Symbol('used'),
|
||||
kBody: Symbol('abstracted request body'),
|
||||
kRunning: Symbol('running'),
|
||||
kBlocking: Symbol('blocking'),
|
||||
kPending: Symbol('pending'),
|
||||
@ -33,6 +35,8 @@ module.exports = {
|
||||
kNeedDrain: Symbol('need drain'),
|
||||
kReset: Symbol('reset'),
|
||||
kDestroyed: Symbol.for('nodejs.stream.destroyed'),
|
||||
kResume: Symbol('resume'),
|
||||
kOnError: Symbol('on error'),
|
||||
kMaxHeadersSize: Symbol('max headers size'),
|
||||
kRunningIdx: Symbol('running index'),
|
||||
kPendingIdx: Symbol('pending index'),
|
||||
@ -50,14 +54,15 @@ module.exports = {
|
||||
kMaxRequests: Symbol('maxRequestsPerClient'),
|
||||
kProxy: Symbol('proxy agent options'),
|
||||
kCounter: Symbol('socket request counter'),
|
||||
kInterceptors: Symbol('dispatch interceptors'),
|
||||
kMaxResponseSize: Symbol('max response size'),
|
||||
kHTTP2Session: Symbol('http2Session'),
|
||||
kHTTP2SessionState: Symbol('http2Session state'),
|
||||
kHTTP2BuildRequest: Symbol('http2 build request'),
|
||||
kHTTP1BuildRequest: Symbol('http1 build request'),
|
||||
kHTTP2CopyHeaders: Symbol('http2 copy headers'),
|
||||
kHTTPConnVersion: Symbol('http connection version'),
|
||||
kRetryHandlerDefaultRetry: Symbol('retry agent default retry'),
|
||||
kConstruct: Symbol('constructable')
|
||||
kConstruct: Symbol('constructable'),
|
||||
kListeners: Symbol('listeners'),
|
||||
kHTTPContext: Symbol('http context'),
|
||||
kMaxConcurrentStreams: Symbol('max concurrent streams'),
|
||||
kNoProxyAgent: Symbol('no proxy agent'),
|
||||
kHttpProxyAgent: Symbol('http proxy agent'),
|
||||
kHttpsProxyAgent: Symbol('https proxy agent')
|
||||
}
|
||||
|
160
node_modules/undici/lib/core/tree.js
generated
vendored
Normal file
160
node_modules/undici/lib/core/tree.js
generated
vendored
Normal file
@ -0,0 +1,160 @@
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
wellknownHeaderNames,
|
||||
headerNameLowerCasedRecord
|
||||
} = require('./constants')
|
||||
|
||||
class TstNode {
|
||||
/** @type {any} */
|
||||
value = null
|
||||
/** @type {null | TstNode} */
|
||||
left = null
|
||||
/** @type {null | TstNode} */
|
||||
middle = null
|
||||
/** @type {null | TstNode} */
|
||||
right = null
|
||||
/** @type {number} */
|
||||
code
|
||||
/**
|
||||
* @param {string} key
|
||||
* @param {any} value
|
||||
* @param {number} index
|
||||
*/
|
||||
constructor (key, value, index) {
|
||||
if (index === undefined || index >= key.length) {
|
||||
throw new TypeError('Unreachable')
|
||||
}
|
||||
const code = this.code = key.charCodeAt(index)
|
||||
// check code is ascii string
|
||||
if (code > 0x7F) {
|
||||
throw new TypeError('key must be ascii string')
|
||||
}
|
||||
if (key.length !== ++index) {
|
||||
this.middle = new TstNode(key, value, index)
|
||||
} else {
|
||||
this.value = value
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} key
|
||||
* @param {any} value
|
||||
* @returns {void}
|
||||
*/
|
||||
add (key, value) {
|
||||
const length = key.length
|
||||
if (length === 0) {
|
||||
throw new TypeError('Unreachable')
|
||||
}
|
||||
let index = 0
|
||||
/**
|
||||
* @type {TstNode}
|
||||
*/
|
||||
let node = this
|
||||
while (true) {
|
||||
const code = key.charCodeAt(index)
|
||||
// check code is ascii string
|
||||
if (code > 0x7F) {
|
||||
throw new TypeError('key must be ascii string')
|
||||
}
|
||||
if (node.code === code) {
|
||||
if (length === ++index) {
|
||||
node.value = value
|
||||
break
|
||||
} else if (node.middle !== null) {
|
||||
node = node.middle
|
||||
} else {
|
||||
node.middle = new TstNode(key, value, index)
|
||||
break
|
||||
}
|
||||
} else if (node.code < code) {
|
||||
if (node.left !== null) {
|
||||
node = node.left
|
||||
} else {
|
||||
node.left = new TstNode(key, value, index)
|
||||
break
|
||||
}
|
||||
} else if (node.right !== null) {
|
||||
node = node.right
|
||||
} else {
|
||||
node.right = new TstNode(key, value, index)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} key
|
||||
* @return {TstNode | null}
|
||||
*/
|
||||
search (key) {
|
||||
const keylength = key.length
|
||||
let index = 0
|
||||
/**
|
||||
* @type {TstNode|null}
|
||||
*/
|
||||
let node = this
|
||||
while (node !== null && index < keylength) {
|
||||
let code = key[index]
|
||||
// A-Z
|
||||
// First check if it is bigger than 0x5a.
|
||||
// Lowercase letters have higher char codes than uppercase ones.
|
||||
// Also we assume that headers will mostly contain lowercase characters.
|
||||
if (code <= 0x5a && code >= 0x41) {
|
||||
// Lowercase for uppercase.
|
||||
code |= 32
|
||||
}
|
||||
while (node !== null) {
|
||||
if (code === node.code) {
|
||||
if (keylength === ++index) {
|
||||
// Returns Node since it is the last key.
|
||||
return node
|
||||
}
|
||||
node = node.middle
|
||||
break
|
||||
}
|
||||
node = node.code < code ? node.left : node.right
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
class TernarySearchTree {
|
||||
/** @type {TstNode | null} */
|
||||
node = null
|
||||
|
||||
/**
|
||||
* @param {string} key
|
||||
* @param {any} value
|
||||
* @returns {void}
|
||||
* */
|
||||
insert (key, value) {
|
||||
if (this.node === null) {
|
||||
this.node = new TstNode(key, value, 0)
|
||||
} else {
|
||||
this.node.add(key, value)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} key
|
||||
* @returns {any}
|
||||
*/
|
||||
lookup (key) {
|
||||
return this.node?.search(key)?.value ?? null
|
||||
}
|
||||
}
|
||||
|
||||
const tree = new TernarySearchTree()
|
||||
|
||||
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
|
||||
const key = headerNameLowerCasedRecord[wellknownHeaderNames[i]]
|
||||
tree.insert(key, key)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
TernarySearchTree,
|
||||
tree
|
||||
}
|
704
node_modules/undici/lib/core/util.js
generated
vendored
704
node_modules/undici/lib/core/util.js
generated
vendored
@ -1,36 +1,112 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('assert')
|
||||
const { kDestroyed, kBodyUsed } = require('./symbols')
|
||||
const { IncomingMessage } = require('http')
|
||||
const stream = require('stream')
|
||||
const net = require('net')
|
||||
const assert = require('node:assert')
|
||||
const { kDestroyed, kBodyUsed, kListeners, kBody } = require('./symbols')
|
||||
const { IncomingMessage } = require('node:http')
|
||||
const stream = require('node:stream')
|
||||
const net = require('node:net')
|
||||
const { Blob } = require('node:buffer')
|
||||
const nodeUtil = require('node:util')
|
||||
const { stringify } = require('node:querystring')
|
||||
const { EventEmitter: EE } = require('node:events')
|
||||
const { InvalidArgumentError } = require('./errors')
|
||||
const { Blob } = require('buffer')
|
||||
const nodeUtil = require('util')
|
||||
const { stringify } = require('querystring')
|
||||
const { headerNameLowerCasedRecord } = require('./constants')
|
||||
const { tree } = require('./tree')
|
||||
|
||||
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
|
||||
const [nodeMajor, nodeMinor] = process.versions.node.split('.', 2).map(v => Number(v))
|
||||
|
||||
function nop () {}
|
||||
class BodyAsyncIterable {
|
||||
constructor (body) {
|
||||
this[kBody] = body
|
||||
this[kBodyUsed] = false
|
||||
}
|
||||
|
||||
async * [Symbol.asyncIterator] () {
|
||||
assert(!this[kBodyUsed], 'disturbed')
|
||||
this[kBodyUsed] = true
|
||||
yield * this[kBody]
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} body
|
||||
* @returns {*}
|
||||
*/
|
||||
function wrapRequestBody (body) {
|
||||
if (isStream(body)) {
|
||||
// TODO (fix): Provide some way for the user to cache the file to e.g. /tmp
|
||||
// so that it can be dispatched again?
|
||||
// TODO (fix): Do we need 100-expect support to provide a way to do this properly?
|
||||
if (bodyLength(body) === 0) {
|
||||
body
|
||||
.on('data', function () {
|
||||
assert(false)
|
||||
})
|
||||
}
|
||||
|
||||
if (typeof body.readableDidRead !== 'boolean') {
|
||||
body[kBodyUsed] = false
|
||||
EE.prototype.on.call(body, 'data', function () {
|
||||
this[kBodyUsed] = true
|
||||
})
|
||||
}
|
||||
|
||||
return body
|
||||
} else if (body && typeof body.pipeTo === 'function') {
|
||||
// TODO (fix): We can't access ReadableStream internal state
|
||||
// to determine whether or not it has been disturbed. This is just
|
||||
// a workaround.
|
||||
return new BodyAsyncIterable(body)
|
||||
} else if (
|
||||
body &&
|
||||
typeof body !== 'string' &&
|
||||
!ArrayBuffer.isView(body) &&
|
||||
isIterable(body)
|
||||
) {
|
||||
// TODO: Should we allow re-using iterable if !this.opts.idempotent
|
||||
// or through some other flag?
|
||||
return new BodyAsyncIterable(body)
|
||||
} else {
|
||||
return body
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} obj
|
||||
* @returns {obj is import('node:stream').Stream}
|
||||
*/
|
||||
function isStream (obj) {
|
||||
return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function'
|
||||
}
|
||||
|
||||
// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License)
|
||||
/**
|
||||
* @param {*} object
|
||||
* @returns {object is Blob}
|
||||
* based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License)
|
||||
*/
|
||||
function isBlobLike (object) {
|
||||
return (Blob && object instanceof Blob) || (
|
||||
object &&
|
||||
typeof object === 'object' &&
|
||||
(typeof object.stream === 'function' ||
|
||||
typeof object.arrayBuffer === 'function') &&
|
||||
/^(Blob|File)$/.test(object[Symbol.toStringTag])
|
||||
)
|
||||
if (object === null) {
|
||||
return false
|
||||
} else if (object instanceof Blob) {
|
||||
return true
|
||||
} else if (typeof object !== 'object') {
|
||||
return false
|
||||
} else {
|
||||
const sTag = object[Symbol.toStringTag]
|
||||
|
||||
return (sTag === 'Blob' || sTag === 'File') && (
|
||||
('stream' in object && typeof object.stream === 'function') ||
|
||||
('arrayBuffer' in object && typeof object.arrayBuffer === 'function')
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function buildURL (url, queryParams) {
|
||||
/**
|
||||
* @param {string} url The URL to add the query params to
|
||||
* @param {import('node:querystring').ParsedUrlQueryInput} queryParams The object to serialize into a URL query string
|
||||
* @returns {string} The URL with the query params added
|
||||
*/
|
||||
function serializePathWithQuery (url, queryParams) {
|
||||
if (url.includes('?') || url.includes('#')) {
|
||||
throw new Error('Query params cannot be passed when url already contains "?" or "#".')
|
||||
}
|
||||
@ -44,11 +120,54 @@ function buildURL (url, queryParams) {
|
||||
return url
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number|string|undefined} port
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isValidPort (port) {
|
||||
const value = parseInt(port, 10)
|
||||
return (
|
||||
value === Number(port) &&
|
||||
value >= 0 &&
|
||||
value <= 65535
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the value is a valid http or https prefixed string.
|
||||
*
|
||||
* @param {string} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isHttpOrHttpsPrefixed (value) {
|
||||
return (
|
||||
value != null &&
|
||||
value[0] === 'h' &&
|
||||
value[1] === 't' &&
|
||||
value[2] === 't' &&
|
||||
value[3] === 'p' &&
|
||||
(
|
||||
value[4] === ':' ||
|
||||
(
|
||||
value[4] === 's' &&
|
||||
value[5] === ':'
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string|URL|Record<string,string>} url
|
||||
* @returns {URL}
|
||||
*/
|
||||
function parseURL (url) {
|
||||
if (typeof url === 'string') {
|
||||
/**
|
||||
* @type {URL}
|
||||
*/
|
||||
url = new URL(url)
|
||||
|
||||
if (!/^https?:/.test(url.origin || url.protocol)) {
|
||||
if (!isHttpOrHttpsPrefixed(url.origin || url.protocol)) {
|
||||
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
|
||||
}
|
||||
|
||||
@ -59,12 +178,8 @@ function parseURL (url) {
|
||||
throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.')
|
||||
}
|
||||
|
||||
if (!/^https?:/.test(url.origin || url.protocol)) {
|
||||
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
|
||||
}
|
||||
|
||||
if (!(url instanceof URL)) {
|
||||
if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) {
|
||||
if (url.port != null && url.port !== '' && isValidPort(url.port) === false) {
|
||||
throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.')
|
||||
}
|
||||
|
||||
@ -84,33 +199,45 @@ function parseURL (url) {
|
||||
throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.')
|
||||
}
|
||||
|
||||
if (!isHttpOrHttpsPrefixed(url.origin || url.protocol)) {
|
||||
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
|
||||
}
|
||||
|
||||
const port = url.port != null
|
||||
? url.port
|
||||
: (url.protocol === 'https:' ? 443 : 80)
|
||||
let origin = url.origin != null
|
||||
? url.origin
|
||||
: `${url.protocol}//${url.hostname}:${port}`
|
||||
: `${url.protocol || ''}//${url.hostname || ''}:${port}`
|
||||
let path = url.path != null
|
||||
? url.path
|
||||
: `${url.pathname || ''}${url.search || ''}`
|
||||
|
||||
if (origin.endsWith('/')) {
|
||||
origin = origin.substring(0, origin.length - 1)
|
||||
if (origin[origin.length - 1] === '/') {
|
||||
origin = origin.slice(0, origin.length - 1)
|
||||
}
|
||||
|
||||
if (path && !path.startsWith('/')) {
|
||||
if (path && path[0] !== '/') {
|
||||
path = `/${path}`
|
||||
}
|
||||
// new URL(path, origin) is unsafe when `path` contains an absolute URL
|
||||
// From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL:
|
||||
// If first parameter is a relative URL, second param is required, and will be used as the base URL.
|
||||
// If first parameter is an absolute URL, a given second param will be ignored.
|
||||
url = new URL(origin + path)
|
||||
return new URL(`${origin}${path}`)
|
||||
}
|
||||
|
||||
if (!isHttpOrHttpsPrefixed(url.origin || url.protocol)) {
|
||||
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
|
||||
}
|
||||
|
||||
return url
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string|URL|Record<string, string>} url
|
||||
* @returns {URL}
|
||||
*/
|
||||
function parseOrigin (url) {
|
||||
url = parseURL(url)
|
||||
|
||||
@ -121,6 +248,10 @@ function parseOrigin (url) {
|
||||
return url
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} host
|
||||
* @returns {string}
|
||||
*/
|
||||
function getHostname (host) {
|
||||
if (host[0] === '[') {
|
||||
const idx = host.indexOf(']')
|
||||
@ -135,14 +266,18 @@ function getHostname (host) {
|
||||
return host.substring(0, idx)
|
||||
}
|
||||
|
||||
// IP addresses are not valid server names per RFC6066
|
||||
// > Currently, the only server names supported are DNS hostnames
|
||||
/**
|
||||
* IP addresses are not valid server names per RFC6066
|
||||
* Currently, the only server names supported are DNS hostnames
|
||||
* @param {string|null} host
|
||||
* @returns {string|null}
|
||||
*/
|
||||
function getServerName (host) {
|
||||
if (!host) {
|
||||
return null
|
||||
}
|
||||
|
||||
assert.strictEqual(typeof host, 'string')
|
||||
assert(typeof host === 'string')
|
||||
|
||||
const servername = getHostname(host)
|
||||
if (net.isIP(servername)) {
|
||||
@ -152,18 +287,36 @@ function getServerName (host) {
|
||||
return servername
|
||||
}
|
||||
|
||||
/**
|
||||
* @function
|
||||
* @template T
|
||||
* @param {T} obj
|
||||
* @returns {T}
|
||||
*/
|
||||
function deepClone (obj) {
|
||||
return JSON.parse(JSON.stringify(obj))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} obj
|
||||
* @returns {obj is AsyncIterable}
|
||||
*/
|
||||
function isAsyncIterable (obj) {
|
||||
return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} obj
|
||||
* @returns {obj is Iterable}
|
||||
*/
|
||||
function isIterable (obj) {
|
||||
return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function'))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Blob|Buffer|import ('stream').Stream} body
|
||||
* @returns {number|null}
|
||||
*/
|
||||
function bodyLength (body) {
|
||||
if (body == null) {
|
||||
return 0
|
||||
@ -181,15 +334,19 @@ function bodyLength (body) {
|
||||
return null
|
||||
}
|
||||
|
||||
function isDestroyed (stream) {
|
||||
return !stream || !!(stream.destroyed || stream[kDestroyed])
|
||||
}
|
||||
|
||||
function isReadableAborted (stream) {
|
||||
const state = stream && stream._readableState
|
||||
return isDestroyed(stream) && state && !state.endEmitted
|
||||
/**
|
||||
* @param {import ('stream').Stream} body
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isDestroyed (body) {
|
||||
return body && !!(body.destroyed || body[kDestroyed] || (stream.isDestroyed?.(body)))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import ('stream').Stream} stream
|
||||
* @param {Error} [err]
|
||||
* @returns {void}
|
||||
*/
|
||||
function destroy (stream, err) {
|
||||
if (stream == null || !isStream(stream) || isDestroyed(stream)) {
|
||||
return
|
||||
@ -203,9 +360,9 @@ function destroy (stream, err) {
|
||||
|
||||
stream.destroy(err)
|
||||
} else if (err) {
|
||||
process.nextTick((stream, err) => {
|
||||
queueMicrotask(() => {
|
||||
stream.emit('error', err)
|
||||
}, stream, err)
|
||||
})
|
||||
}
|
||||
|
||||
if (stream.destroyed !== true) {
|
||||
@ -214,8 +371,12 @@ function destroy (stream, err) {
|
||||
}
|
||||
|
||||
const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/
|
||||
/**
|
||||
* @param {string} val
|
||||
* @returns {number | null}
|
||||
*/
|
||||
function parseKeepAliveTimeout (val) {
|
||||
const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR)
|
||||
const m = val.match(KEEPALIVE_TIMEOUT_EXPR)
|
||||
return m ? parseInt(m[1], 10) * 1000 : null
|
||||
}
|
||||
|
||||
@ -225,29 +386,45 @@ function parseKeepAliveTimeout (val) {
|
||||
* @returns {string}
|
||||
*/
|
||||
function headerNameToString (value) {
|
||||
return headerNameLowerCasedRecord[value] || value.toLowerCase()
|
||||
return typeof value === 'string'
|
||||
? headerNameLowerCasedRecord[value] ?? value.toLowerCase()
|
||||
: tree.lookup(value) ?? value.toString('latin1').toLowerCase()
|
||||
}
|
||||
|
||||
function parseHeaders (headers, obj = {}) {
|
||||
// For H2 support
|
||||
if (!Array.isArray(headers)) return headers
|
||||
/**
|
||||
* Receive the buffer as a string and return its lowercase value.
|
||||
* @param {Buffer} value Header name
|
||||
* @returns {string}
|
||||
*/
|
||||
function bufferToLowerCasedHeaderName (value) {
|
||||
return tree.lookup(value) ?? value.toString('latin1').toLowerCase()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {(Buffer | string)[]} headers
|
||||
* @param {Record<string, string | string[]>} [obj]
|
||||
* @returns {Record<string, string | string[]>}
|
||||
*/
|
||||
function parseHeaders (headers, obj) {
|
||||
if (obj === undefined) obj = {}
|
||||
|
||||
for (let i = 0; i < headers.length; i += 2) {
|
||||
const key = headers[i].toString().toLowerCase()
|
||||
const key = headerNameToString(headers[i])
|
||||
let val = obj[key]
|
||||
|
||||
if (!val) {
|
||||
if (Array.isArray(headers[i + 1])) {
|
||||
obj[key] = headers[i + 1].map(x => x.toString('utf8'))
|
||||
} else {
|
||||
obj[key] = headers[i + 1].toString('utf8')
|
||||
}
|
||||
} else {
|
||||
if (!Array.isArray(val)) {
|
||||
if (val) {
|
||||
if (typeof val === 'string') {
|
||||
val = [val]
|
||||
obj[key] = val
|
||||
}
|
||||
val.push(headers[i + 1].toString('utf8'))
|
||||
} else {
|
||||
const headersValue = headers[i + 1]
|
||||
if (typeof headersValue === 'string') {
|
||||
obj[key] = headersValue
|
||||
} else {
|
||||
obj[key] = Array.isArray(headersValue) ? headersValue.map(x => x.toString('utf8')) : headersValue.toString('utf8')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -259,23 +436,38 @@ function parseHeaders (headers, obj = {}) {
|
||||
return obj
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer[]} headers
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function parseRawHeaders (headers) {
|
||||
const ret = []
|
||||
const headersLength = headers.length
|
||||
/**
|
||||
* @type {string[]}
|
||||
*/
|
||||
const ret = new Array(headersLength)
|
||||
|
||||
let hasContentLength = false
|
||||
let contentDispositionIdx = -1
|
||||
let key
|
||||
let val
|
||||
let kLen = 0
|
||||
|
||||
for (let n = 0; n < headers.length; n += 2) {
|
||||
const key = headers[n + 0].toString()
|
||||
const val = headers[n + 1].toString('utf8')
|
||||
for (let n = 0; n < headersLength; n += 2) {
|
||||
key = headers[n]
|
||||
val = headers[n + 1]
|
||||
|
||||
if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) {
|
||||
ret.push(key, val)
|
||||
typeof key !== 'string' && (key = key.toString())
|
||||
typeof val !== 'string' && (val = val.toString('utf8'))
|
||||
|
||||
kLen = key.length
|
||||
if (kLen === 14 && key[7] === '-' && (key === 'content-length' || key.toLowerCase() === 'content-length')) {
|
||||
hasContentLength = true
|
||||
} else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) {
|
||||
contentDispositionIdx = ret.push(key, val) - 1
|
||||
} else {
|
||||
ret.push(key, val)
|
||||
} else if (kLen === 19 && key[7] === '-' && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) {
|
||||
contentDispositionIdx = n + 1
|
||||
}
|
||||
ret[n] = key
|
||||
ret[n + 1] = val
|
||||
}
|
||||
|
||||
// See https://github.com/nodejs/node/pull/46528
|
||||
@ -286,16 +478,44 @@ function parseRawHeaders (headers) {
|
||||
return ret
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string[]} headers
|
||||
* @param {Buffer[]} headers
|
||||
*/
|
||||
function encodeRawHeaders (headers) {
|
||||
if (!Array.isArray(headers)) {
|
||||
throw new TypeError('expected headers to be an array')
|
||||
}
|
||||
return headers.map(x => Buffer.from(x))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} buffer
|
||||
* @returns {buffer is Buffer}
|
||||
*/
|
||||
function isBuffer (buffer) {
|
||||
// See, https://github.com/mcollina/undici/pull/319
|
||||
return buffer instanceof Uint8Array || Buffer.isBuffer(buffer)
|
||||
}
|
||||
|
||||
function validateHandler (handler, method, upgrade) {
|
||||
/**
|
||||
* Asserts that the handler object is a request handler.
|
||||
*
|
||||
* @param {object} handler
|
||||
* @param {string} method
|
||||
* @param {string} [upgrade]
|
||||
* @returns {asserts handler is import('../api/api-request').RequestHandler}
|
||||
*/
|
||||
function assertRequestHandler (handler, method, upgrade) {
|
||||
if (!handler || typeof handler !== 'object') {
|
||||
throw new InvalidArgumentError('handler must be an object')
|
||||
}
|
||||
|
||||
if (typeof handler.onRequestStart === 'function') {
|
||||
// TODO (fix): More checks...
|
||||
return
|
||||
}
|
||||
|
||||
if (typeof handler.onConnect !== 'function') {
|
||||
throw new InvalidArgumentError('invalid onConnect method')
|
||||
}
|
||||
@ -327,35 +547,33 @@ function validateHandler (handler, method, upgrade) {
|
||||
}
|
||||
}
|
||||
|
||||
// A body is disturbed if it has been read from and it cannot
|
||||
// be re-used without losing state or data.
|
||||
/**
|
||||
* A body is disturbed if it has been read from and it cannot be re-used without
|
||||
* losing state or data.
|
||||
* @param {import('node:stream').Readable} body
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isDisturbed (body) {
|
||||
return !!(body && (
|
||||
stream.isDisturbed
|
||||
? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed?
|
||||
: body[kBodyUsed] ||
|
||||
body.readableDidRead ||
|
||||
(body._readableState && body._readableState.dataEmitted) ||
|
||||
isReadableAborted(body)
|
||||
))
|
||||
// TODO (fix): Why is body[kBodyUsed] needed?
|
||||
return !!(body && (stream.isDisturbed(body) || body[kBodyUsed]))
|
||||
}
|
||||
|
||||
function isErrored (body) {
|
||||
return !!(body && (
|
||||
stream.isErrored
|
||||
? stream.isErrored(body)
|
||||
: /state: 'errored'/.test(nodeUtil.inspect(body)
|
||||
)))
|
||||
}
|
||||
|
||||
function isReadable (body) {
|
||||
return !!(body && (
|
||||
stream.isReadable
|
||||
? stream.isReadable(body)
|
||||
: /state: 'readable'/.test(nodeUtil.inspect(body)
|
||||
)))
|
||||
}
|
||||
/**
|
||||
* @typedef {object} SocketInfo
|
||||
* @property {string} [localAddress]
|
||||
* @property {number} [localPort]
|
||||
* @property {string} [remoteAddress]
|
||||
* @property {number} [remotePort]
|
||||
* @property {string} [remoteFamily]
|
||||
* @property {number} [timeout]
|
||||
* @property {number} bytesWritten
|
||||
* @property {number} bytesRead
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {import('net').Socket} socket
|
||||
* @returns {SocketInfo}
|
||||
*/
|
||||
function getSocketInfo (socket) {
|
||||
return {
|
||||
localAddress: socket.localAddress,
|
||||
@ -369,21 +587,12 @@ function getSocketInfo (socket) {
|
||||
}
|
||||
}
|
||||
|
||||
async function * convertIterableToBuffer (iterable) {
|
||||
for await (const chunk of iterable) {
|
||||
yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)
|
||||
}
|
||||
}
|
||||
|
||||
let ReadableStream
|
||||
/**
|
||||
* @param {Iterable} iterable
|
||||
* @returns {ReadableStream}
|
||||
*/
|
||||
function ReadableStreamFrom (iterable) {
|
||||
if (!ReadableStream) {
|
||||
ReadableStream = require('stream/web').ReadableStream
|
||||
}
|
||||
|
||||
if (ReadableStream.from) {
|
||||
return ReadableStream.from(convertIterableToBuffer(iterable))
|
||||
}
|
||||
// We cannot use ReadableStream.from here because it does not return a byte stream.
|
||||
|
||||
let iterator
|
||||
return new ReadableStream(
|
||||
@ -391,28 +600,40 @@ function ReadableStreamFrom (iterable) {
|
||||
async start () {
|
||||
iterator = iterable[Symbol.asyncIterator]()
|
||||
},
|
||||
async pull (controller) {
|
||||
const { done, value } = await iterator.next()
|
||||
if (done) {
|
||||
queueMicrotask(() => {
|
||||
controller.close()
|
||||
})
|
||||
} else {
|
||||
const buf = Buffer.isBuffer(value) ? value : Buffer.from(value)
|
||||
controller.enqueue(new Uint8Array(buf))
|
||||
pull (controller) {
|
||||
async function pull () {
|
||||
const { done, value } = await iterator.next()
|
||||
if (done) {
|
||||
queueMicrotask(() => {
|
||||
controller.close()
|
||||
controller.byobRequest?.respond(0)
|
||||
})
|
||||
} else {
|
||||
const buf = Buffer.isBuffer(value) ? value : Buffer.from(value)
|
||||
if (buf.byteLength) {
|
||||
controller.enqueue(new Uint8Array(buf))
|
||||
} else {
|
||||
return await pull()
|
||||
}
|
||||
}
|
||||
}
|
||||
return controller.desiredSize > 0
|
||||
|
||||
return pull()
|
||||
},
|
||||
async cancel (reason) {
|
||||
async cancel () {
|
||||
await iterator.return()
|
||||
}
|
||||
},
|
||||
0
|
||||
},
|
||||
type: 'bytes'
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// The chunk should be a FormData instance and contains
|
||||
// all the required methods.
|
||||
/**
|
||||
* The object should be a FormData instance and contains all the required
|
||||
* methods.
|
||||
* @param {*} object
|
||||
* @returns {object is FormData}
|
||||
*/
|
||||
function isFormDataLike (object) {
|
||||
return (
|
||||
object &&
|
||||
@ -427,50 +648,143 @@ function isFormDataLike (object) {
|
||||
)
|
||||
}
|
||||
|
||||
function throwIfAborted (signal) {
|
||||
if (!signal) { return }
|
||||
if (typeof signal.throwIfAborted === 'function') {
|
||||
signal.throwIfAborted()
|
||||
} else {
|
||||
if (signal.aborted) {
|
||||
// DOMException not available < v17.0.0
|
||||
const err = new Error('The operation was aborted')
|
||||
err.name = 'AbortError'
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function addAbortListener (signal, listener) {
|
||||
if ('addEventListener' in signal) {
|
||||
signal.addEventListener('abort', listener, { once: true })
|
||||
return () => signal.removeEventListener('abort', listener)
|
||||
}
|
||||
signal.addListener('abort', listener)
|
||||
signal.once('abort', listener)
|
||||
return () => signal.removeListener('abort', listener)
|
||||
}
|
||||
|
||||
const hasToWellFormed = !!String.prototype.toWellFormed
|
||||
/**
|
||||
* @function
|
||||
* @param {string} value
|
||||
* @returns {string}
|
||||
*/
|
||||
const toUSVString = (() => {
|
||||
if (typeof String.prototype.toWellFormed === 'function') {
|
||||
/**
|
||||
* @param {string} value
|
||||
* @returns {string}
|
||||
*/
|
||||
return (value) => `${value}`.toWellFormed()
|
||||
} else {
|
||||
/**
|
||||
* @param {string} value
|
||||
* @returns {string}
|
||||
*/
|
||||
return nodeUtil.toUSVString
|
||||
}
|
||||
})()
|
||||
|
||||
/**
|
||||
* @param {string} val
|
||||
* @param {*} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function toUSVString (val) {
|
||||
if (hasToWellFormed) {
|
||||
return `${val}`.toWellFormed()
|
||||
} else if (nodeUtil.toUSVString) {
|
||||
return nodeUtil.toUSVString(val)
|
||||
// TODO: move this to webidl
|
||||
const isUSVString = (() => {
|
||||
if (typeof String.prototype.isWellFormed === 'function') {
|
||||
/**
|
||||
* @param {*} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
return (value) => `${value}`.isWellFormed()
|
||||
} else {
|
||||
/**
|
||||
* @param {*} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
return (value) => toUSVString(value) === `${value}`
|
||||
}
|
||||
})()
|
||||
|
||||
return `${val}`
|
||||
/**
|
||||
* @see https://tools.ietf.org/html/rfc7230#section-3.2.6
|
||||
* @param {number} c
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isTokenCharCode (c) {
|
||||
switch (c) {
|
||||
case 0x22:
|
||||
case 0x28:
|
||||
case 0x29:
|
||||
case 0x2c:
|
||||
case 0x2f:
|
||||
case 0x3a:
|
||||
case 0x3b:
|
||||
case 0x3c:
|
||||
case 0x3d:
|
||||
case 0x3e:
|
||||
case 0x3f:
|
||||
case 0x40:
|
||||
case 0x5b:
|
||||
case 0x5c:
|
||||
case 0x5d:
|
||||
case 0x7b:
|
||||
case 0x7d:
|
||||
// DQUOTE and "(),/:;<=>?@[\]{}"
|
||||
return false
|
||||
default:
|
||||
// VCHAR %x21-7E
|
||||
return c >= 0x21 && c <= 0x7e
|
||||
}
|
||||
}
|
||||
|
||||
// Parsed accordingly to RFC 9110
|
||||
// https://www.rfc-editor.org/rfc/rfc9110#field.content-range
|
||||
/**
|
||||
* @param {string} characters
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isValidHTTPToken (characters) {
|
||||
if (characters.length === 0) {
|
||||
return false
|
||||
}
|
||||
for (let i = 0; i < characters.length; ++i) {
|
||||
if (!isTokenCharCode(characters.charCodeAt(i))) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// headerCharRegex have been lifted from
|
||||
// https://github.com/nodejs/node/blob/main/lib/_http_common.js
|
||||
|
||||
/**
|
||||
* Matches if val contains an invalid field-vchar
|
||||
* field-value = *( field-content / obs-fold )
|
||||
* field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
|
||||
* field-vchar = VCHAR / obs-text
|
||||
*/
|
||||
const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/
|
||||
|
||||
/**
|
||||
* @param {string} characters
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isValidHeaderValue (characters) {
|
||||
return !headerCharRegex.test(characters)
|
||||
}
|
||||
|
||||
const rangeHeaderRegex = /^bytes (\d+)-(\d+)\/(\d+)?$/
|
||||
|
||||
/**
|
||||
* @typedef {object} RangeHeader
|
||||
* @property {number} start
|
||||
* @property {number | null} end
|
||||
* @property {number | null} size
|
||||
*/
|
||||
|
||||
/**
|
||||
* Parse accordingly to RFC 9110
|
||||
* @see https://www.rfc-editor.org/rfc/rfc9110#field.content-range
|
||||
* @param {string} [range]
|
||||
* @returns {RangeHeader|null}
|
||||
*/
|
||||
function parseRangeHeader (range) {
|
||||
if (range == null || range === '') return { start: 0, end: null, size: null }
|
||||
|
||||
const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null
|
||||
const m = range ? range.match(rangeHeaderRegex) : null
|
||||
return m
|
||||
? {
|
||||
start: parseInt(m[1]),
|
||||
@ -480,17 +794,82 @@ function parseRangeHeader (range) {
|
||||
: null
|
||||
}
|
||||
|
||||
/**
|
||||
* @template {import("events").EventEmitter} T
|
||||
* @param {T} obj
|
||||
* @param {string} name
|
||||
* @param {(...args: any[]) => void} listener
|
||||
* @returns {T}
|
||||
*/
|
||||
function addListener (obj, name, listener) {
|
||||
const listeners = (obj[kListeners] ??= [])
|
||||
listeners.push([name, listener])
|
||||
obj.on(name, listener)
|
||||
return obj
|
||||
}
|
||||
|
||||
/**
|
||||
* @template {import("events").EventEmitter} T
|
||||
* @param {T} obj
|
||||
* @returns {T}
|
||||
*/
|
||||
function removeAllListeners (obj) {
|
||||
if (obj[kListeners] != null) {
|
||||
for (const [name, listener] of obj[kListeners]) {
|
||||
obj.removeListener(name, listener)
|
||||
}
|
||||
obj[kListeners] = null
|
||||
}
|
||||
return obj
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import ('../dispatcher/client')} client
|
||||
* @param {import ('../core/request')} request
|
||||
* @param {Error} err
|
||||
*/
|
||||
function errorRequest (client, request, err) {
|
||||
try {
|
||||
request.onError(err)
|
||||
assert(request.aborted)
|
||||
} catch (err) {
|
||||
client.emit('error', err)
|
||||
}
|
||||
}
|
||||
|
||||
const kEnumerableProperty = Object.create(null)
|
||||
kEnumerableProperty.enumerable = true
|
||||
|
||||
const normalizedMethodRecordsBase = {
|
||||
delete: 'DELETE',
|
||||
DELETE: 'DELETE',
|
||||
get: 'GET',
|
||||
GET: 'GET',
|
||||
head: 'HEAD',
|
||||
HEAD: 'HEAD',
|
||||
options: 'OPTIONS',
|
||||
OPTIONS: 'OPTIONS',
|
||||
post: 'POST',
|
||||
POST: 'POST',
|
||||
put: 'PUT',
|
||||
PUT: 'PUT'
|
||||
}
|
||||
|
||||
const normalizedMethodRecords = {
|
||||
...normalizedMethodRecordsBase,
|
||||
patch: 'patch',
|
||||
PATCH: 'PATCH'
|
||||
}
|
||||
|
||||
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
|
||||
Object.setPrototypeOf(normalizedMethodRecordsBase, null)
|
||||
Object.setPrototypeOf(normalizedMethodRecords, null)
|
||||
|
||||
module.exports = {
|
||||
kEnumerableProperty,
|
||||
nop,
|
||||
isDisturbed,
|
||||
isErrored,
|
||||
isReadable,
|
||||
toUSVString,
|
||||
isReadableAborted,
|
||||
isUSVString,
|
||||
isBlobLike,
|
||||
parseOrigin,
|
||||
parseURL,
|
||||
@ -500,7 +879,12 @@ module.exports = {
|
||||
isAsyncIterable,
|
||||
isDestroyed,
|
||||
headerNameToString,
|
||||
bufferToLowerCasedHeaderName,
|
||||
addListener,
|
||||
removeAllListeners,
|
||||
errorRequest,
|
||||
parseRawHeaders,
|
||||
encodeRawHeaders,
|
||||
parseHeaders,
|
||||
parseKeepAliveTimeout,
|
||||
destroy,
|
||||
@ -508,15 +892,21 @@ module.exports = {
|
||||
deepClone,
|
||||
ReadableStreamFrom,
|
||||
isBuffer,
|
||||
validateHandler,
|
||||
assertRequestHandler,
|
||||
getSocketInfo,
|
||||
isFormDataLike,
|
||||
buildURL,
|
||||
throwIfAborted,
|
||||
serializePathWithQuery,
|
||||
addAbortListener,
|
||||
isValidHTTPToken,
|
||||
isValidHeaderValue,
|
||||
isTokenCharCode,
|
||||
parseRangeHeader,
|
||||
normalizedMethodRecordsBase,
|
||||
normalizedMethodRecords,
|
||||
isValidPort,
|
||||
isHttpOrHttpsPrefixed,
|
||||
nodeMajor,
|
||||
nodeMinor,
|
||||
nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13),
|
||||
safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE']
|
||||
safeHTTPMethods: Object.freeze(['GET', 'HEAD', 'OPTIONS', 'TRACE']),
|
||||
wrapRequestBody
|
||||
}
|
||||
|
19
node_modules/undici/lib/dispatcher.js
generated
vendored
19
node_modules/undici/lib/dispatcher.js
generated
vendored
@ -1,19 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const EventEmitter = require('events')
|
||||
|
||||
class Dispatcher extends EventEmitter {
|
||||
dispatch () {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
close () {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
destroy () {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Dispatcher
|
115
node_modules/undici/lib/dispatcher/agent.js
generated
vendored
Normal file
115
node_modules/undici/lib/dispatcher/agent.js
generated
vendored
Normal file
@ -0,0 +1,115 @@
|
||||
'use strict'
|
||||
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
const { kClients, kRunning, kClose, kDestroy, kDispatch } = require('../core/symbols')
|
||||
const DispatcherBase = require('./dispatcher-base')
|
||||
const Pool = require('./pool')
|
||||
const Client = require('./client')
|
||||
const util = require('../core/util')
|
||||
|
||||
const kOnConnect = Symbol('onConnect')
|
||||
const kOnDisconnect = Symbol('onDisconnect')
|
||||
const kOnConnectionError = Symbol('onConnectionError')
|
||||
const kOnDrain = Symbol('onDrain')
|
||||
const kFactory = Symbol('factory')
|
||||
const kOptions = Symbol('options')
|
||||
|
||||
function defaultFactory (origin, opts) {
|
||||
return opts && opts.connections === 1
|
||||
? new Client(origin, opts)
|
||||
: new Pool(origin, opts)
|
||||
}
|
||||
|
||||
class Agent extends DispatcherBase {
|
||||
constructor ({ factory = defaultFactory, connect, ...options } = {}) {
|
||||
if (typeof factory !== 'function') {
|
||||
throw new InvalidArgumentError('factory must be a function.')
|
||||
}
|
||||
|
||||
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
|
||||
throw new InvalidArgumentError('connect must be a function or an object')
|
||||
}
|
||||
|
||||
super()
|
||||
|
||||
if (connect && typeof connect !== 'function') {
|
||||
connect = { ...connect }
|
||||
}
|
||||
|
||||
this[kOptions] = { ...util.deepClone(options), connect }
|
||||
this[kFactory] = factory
|
||||
this[kClients] = new Map()
|
||||
|
||||
this[kOnDrain] = (origin, targets) => {
|
||||
this.emit('drain', origin, [this, ...targets])
|
||||
}
|
||||
|
||||
this[kOnConnect] = (origin, targets) => {
|
||||
this.emit('connect', origin, [this, ...targets])
|
||||
}
|
||||
|
||||
this[kOnDisconnect] = (origin, targets, err) => {
|
||||
this.emit('disconnect', origin, [this, ...targets], err)
|
||||
}
|
||||
|
||||
this[kOnConnectionError] = (origin, targets, err) => {
|
||||
this.emit('connectionError', origin, [this, ...targets], err)
|
||||
}
|
||||
}
|
||||
|
||||
get [kRunning] () {
|
||||
let ret = 0
|
||||
for (const client of this[kClients].values()) {
|
||||
ret += client[kRunning]
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
[kDispatch] (opts, handler) {
|
||||
let key
|
||||
if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) {
|
||||
key = String(opts.origin)
|
||||
} else {
|
||||
throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.')
|
||||
}
|
||||
|
||||
let dispatcher = this[kClients].get(key)
|
||||
|
||||
if (!dispatcher) {
|
||||
dispatcher = this[kFactory](opts.origin, this[kOptions])
|
||||
.on('drain', this[kOnDrain])
|
||||
.on('connect', this[kOnConnect])
|
||||
.on('disconnect', this[kOnDisconnect])
|
||||
.on('connectionError', this[kOnConnectionError])
|
||||
|
||||
// This introduces a tiny memory leak, as dispatchers are never removed from the map.
|
||||
// TODO(mcollina): remove te timer when the client/pool do not have any more
|
||||
// active connections.
|
||||
this[kClients].set(key, dispatcher)
|
||||
}
|
||||
|
||||
return dispatcher.dispatch(opts, handler)
|
||||
}
|
||||
|
||||
async [kClose] () {
|
||||
const closePromises = []
|
||||
for (const client of this[kClients].values()) {
|
||||
closePromises.push(client.close())
|
||||
}
|
||||
this[kClients].clear()
|
||||
|
||||
await Promise.all(closePromises)
|
||||
}
|
||||
|
||||
async [kDestroy] (err) {
|
||||
const destroyPromises = []
|
||||
for (const client of this[kClients].values()) {
|
||||
destroyPromises.push(client.destroy(err))
|
||||
}
|
||||
this[kClients].clear()
|
||||
|
||||
await Promise.all(destroyPromises)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Agent
|
@ -3,7 +3,7 @@
|
||||
const {
|
||||
BalancedPoolMissingUpstreamError,
|
||||
InvalidArgumentError
|
||||
} = require('./core/errors')
|
||||
} = require('../core/errors')
|
||||
const {
|
||||
PoolBase,
|
||||
kClients,
|
||||
@ -13,8 +13,8 @@ const {
|
||||
kGetDispatcher
|
||||
} = require('./pool-base')
|
||||
const Pool = require('./pool')
|
||||
const { kUrl, kInterceptors } = require('./core/symbols')
|
||||
const { parseOrigin } = require('./core/util')
|
||||
const { kUrl } = require('../core/symbols')
|
||||
const { parseOrigin } = require('../core/util')
|
||||
const kFactory = Symbol('factory')
|
||||
|
||||
const kOptions = Symbol('options')
|
||||
@ -25,9 +25,23 @@ const kWeight = Symbol('kWeight')
|
||||
const kMaxWeightPerServer = Symbol('kMaxWeightPerServer')
|
||||
const kErrorPenalty = Symbol('kErrorPenalty')
|
||||
|
||||
/**
|
||||
* Calculate the greatest common divisor of two numbers by
|
||||
* using the Euclidean algorithm.
|
||||
*
|
||||
* @param {number} a
|
||||
* @param {number} b
|
||||
* @returns {number}
|
||||
*/
|
||||
function getGreatestCommonDivisor (a, b) {
|
||||
if (b === 0) return a
|
||||
return getGreatestCommonDivisor(b, a % b)
|
||||
if (a === 0) return b
|
||||
|
||||
while (b !== 0) {
|
||||
const t = b
|
||||
b = a % b
|
||||
a = t
|
||||
}
|
||||
return a
|
||||
}
|
||||
|
||||
function defaultFactory (origin, opts) {
|
||||
@ -36,6 +50,10 @@ function defaultFactory (origin, opts) {
|
||||
|
||||
class BalancedPool extends PoolBase {
|
||||
constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) {
|
||||
if (typeof factory !== 'function') {
|
||||
throw new InvalidArgumentError('factory must be a function.')
|
||||
}
|
||||
|
||||
super()
|
||||
|
||||
this[kOptions] = opts
|
||||
@ -49,13 +67,6 @@ class BalancedPool extends PoolBase {
|
||||
upstreams = [upstreams]
|
||||
}
|
||||
|
||||
if (typeof factory !== 'function') {
|
||||
throw new InvalidArgumentError('factory must be a function.')
|
||||
}
|
||||
|
||||
this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool)
|
||||
? opts.interceptors.BalancedPool
|
||||
: []
|
||||
this[kFactory] = factory
|
||||
|
||||
for (const upstream of upstreams) {
|
||||
@ -105,7 +116,12 @@ class BalancedPool extends PoolBase {
|
||||
}
|
||||
|
||||
_updateBalancedPoolStats () {
|
||||
this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0)
|
||||
let result = 0
|
||||
for (let i = 0; i < this[kClients].length; i++) {
|
||||
result = getGreatestCommonDivisor(this[kClients][i][kWeight], result)
|
||||
}
|
||||
|
||||
this[kGreatestCommonDivisor] = result
|
||||
}
|
||||
|
||||
removeUpstream (upstream) {
|
1615
node_modules/undici/lib/dispatcher/client-h1.js
generated
vendored
Normal file
1615
node_modules/undici/lib/dispatcher/client-h1.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
795
node_modules/undici/lib/dispatcher/client-h2.js
generated
vendored
Normal file
795
node_modules/undici/lib/dispatcher/client-h2.js
generated
vendored
Normal file
@ -0,0 +1,795 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const { pipeline } = require('node:stream')
|
||||
const util = require('../core/util.js')
|
||||
const {
|
||||
RequestContentLengthMismatchError,
|
||||
RequestAbortedError,
|
||||
SocketError,
|
||||
InformationalError
|
||||
} = require('../core/errors.js')
|
||||
const {
|
||||
kUrl,
|
||||
kReset,
|
||||
kClient,
|
||||
kRunning,
|
||||
kPending,
|
||||
kQueue,
|
||||
kPendingIdx,
|
||||
kRunningIdx,
|
||||
kError,
|
||||
kSocket,
|
||||
kStrictContentLength,
|
||||
kOnError,
|
||||
kMaxConcurrentStreams,
|
||||
kHTTP2Session,
|
||||
kResume,
|
||||
kSize,
|
||||
kHTTPContext,
|
||||
kClosed,
|
||||
kBodyTimeout
|
||||
} = require('../core/symbols.js')
|
||||
const { channels } = require('../core/diagnostics.js')
|
||||
|
||||
const kOpenStreams = Symbol('open streams')
|
||||
|
||||
let extractBody
|
||||
|
||||
/** @type {import('http2')} */
|
||||
let http2
|
||||
try {
|
||||
http2 = require('node:http2')
|
||||
} catch {
|
||||
// @ts-ignore
|
||||
http2 = { constants: {} }
|
||||
}
|
||||
|
||||
const {
|
||||
constants: {
|
||||
HTTP2_HEADER_AUTHORITY,
|
||||
HTTP2_HEADER_METHOD,
|
||||
HTTP2_HEADER_PATH,
|
||||
HTTP2_HEADER_SCHEME,
|
||||
HTTP2_HEADER_CONTENT_LENGTH,
|
||||
HTTP2_HEADER_EXPECT,
|
||||
HTTP2_HEADER_STATUS
|
||||
}
|
||||
} = http2
|
||||
|
||||
function parseH2Headers (headers) {
|
||||
const result = []
|
||||
|
||||
for (const [name, value] of Object.entries(headers)) {
|
||||
// h2 may concat the header value by array
|
||||
// e.g. Set-Cookie
|
||||
if (Array.isArray(value)) {
|
||||
for (const subvalue of value) {
|
||||
// we need to provide each header value of header name
|
||||
// because the headers handler expect name-value pair
|
||||
result.push(Buffer.from(name), Buffer.from(subvalue))
|
||||
}
|
||||
} else {
|
||||
result.push(Buffer.from(name), Buffer.from(value))
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
async function connectH2 (client, socket) {
|
||||
client[kSocket] = socket
|
||||
|
||||
const session = http2.connect(client[kUrl], {
|
||||
createConnection: () => socket,
|
||||
peerMaxConcurrentStreams: client[kMaxConcurrentStreams],
|
||||
settings: {
|
||||
// TODO(metcoder95): add support for PUSH
|
||||
enablePush: false
|
||||
}
|
||||
})
|
||||
|
||||
session[kOpenStreams] = 0
|
||||
session[kClient] = client
|
||||
session[kSocket] = socket
|
||||
session[kHTTP2Session] = null
|
||||
|
||||
util.addListener(session, 'error', onHttp2SessionError)
|
||||
util.addListener(session, 'frameError', onHttp2FrameError)
|
||||
util.addListener(session, 'end', onHttp2SessionEnd)
|
||||
util.addListener(session, 'goaway', onHttp2SessionGoAway)
|
||||
util.addListener(session, 'close', onHttp2SessionClose)
|
||||
|
||||
session.unref()
|
||||
|
||||
client[kHTTP2Session] = session
|
||||
socket[kHTTP2Session] = session
|
||||
|
||||
util.addListener(socket, 'error', onHttp2SocketError)
|
||||
util.addListener(socket, 'end', onHttp2SocketEnd)
|
||||
util.addListener(socket, 'close', onHttp2SocketClose)
|
||||
|
||||
socket[kClosed] = false
|
||||
socket.on('close', onSocketClose)
|
||||
|
||||
return {
|
||||
version: 'h2',
|
||||
defaultPipelining: Infinity,
|
||||
write (request) {
|
||||
return writeH2(client, request)
|
||||
},
|
||||
resume () {
|
||||
resumeH2(client)
|
||||
},
|
||||
destroy (err, callback) {
|
||||
if (socket[kClosed]) {
|
||||
queueMicrotask(callback)
|
||||
} else {
|
||||
socket.destroy(err).on('close', callback)
|
||||
}
|
||||
},
|
||||
get destroyed () {
|
||||
return socket.destroyed
|
||||
},
|
||||
busy () {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function resumeH2 (client) {
|
||||
const socket = client[kSocket]
|
||||
|
||||
if (socket?.destroyed === false) {
|
||||
if (client[kSize] === 0 || client[kMaxConcurrentStreams] === 0) {
|
||||
socket.unref()
|
||||
client[kHTTP2Session].unref()
|
||||
} else {
|
||||
socket.ref()
|
||||
client[kHTTP2Session].ref()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function onHttp2SessionError (err) {
|
||||
assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
|
||||
|
||||
this[kSocket][kError] = err
|
||||
this[kClient][kOnError](err)
|
||||
}
|
||||
|
||||
function onHttp2FrameError (type, code, id) {
|
||||
if (id === 0) {
|
||||
const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`)
|
||||
this[kSocket][kError] = err
|
||||
this[kClient][kOnError](err)
|
||||
}
|
||||
}
|
||||
|
||||
function onHttp2SessionEnd () {
|
||||
const err = new SocketError('other side closed', util.getSocketInfo(this[kSocket]))
|
||||
this.destroy(err)
|
||||
util.destroy(this[kSocket], err)
|
||||
}
|
||||
|
||||
/**
|
||||
* This is the root cause of #3011
|
||||
* We need to handle GOAWAY frames properly, and trigger the session close
|
||||
* along with the socket right away
|
||||
*
|
||||
* @this {import('http2').ClientHttp2Session}
|
||||
* @param {number} errorCode
|
||||
*/
|
||||
function onHttp2SessionGoAway (errorCode) {
|
||||
// TODO(mcollina): Verify if GOAWAY implements the spec correctly:
|
||||
// https://datatracker.ietf.org/doc/html/rfc7540#section-6.8
|
||||
// Specifically, we do not verify the "valid" stream id.
|
||||
|
||||
const err = this[kError] || new SocketError(`HTTP/2: "GOAWAY" frame received with code ${errorCode}`, util.getSocketInfo(this[kSocket]))
|
||||
const client = this[kClient]
|
||||
|
||||
client[kSocket] = null
|
||||
client[kHTTPContext] = null
|
||||
|
||||
// this is an HTTP2 session
|
||||
this.close()
|
||||
this[kHTTP2Session] = null
|
||||
|
||||
util.destroy(this[kSocket], err)
|
||||
|
||||
// Fail head of pipeline.
|
||||
if (client[kRunningIdx] < client[kQueue].length) {
|
||||
const request = client[kQueue][client[kRunningIdx]]
|
||||
client[kQueue][client[kRunningIdx]++] = null
|
||||
util.errorRequest(client, request, err)
|
||||
client[kPendingIdx] = client[kRunningIdx]
|
||||
}
|
||||
|
||||
assert(client[kRunning] === 0)
|
||||
|
||||
client.emit('disconnect', client[kUrl], [client], err)
|
||||
|
||||
client[kResume]()
|
||||
}
|
||||
|
||||
function onHttp2SessionClose () {
|
||||
const { [kClient]: client } = this
|
||||
const { [kSocket]: socket } = client
|
||||
|
||||
const err = this[kSocket][kError] || this[kError] || new SocketError('closed', util.getSocketInfo(socket))
|
||||
|
||||
client[kSocket] = null
|
||||
client[kHTTPContext] = null
|
||||
|
||||
if (client.destroyed) {
|
||||
assert(client[kPending] === 0)
|
||||
|
||||
// Fail entire queue.
|
||||
const requests = client[kQueue].splice(client[kRunningIdx])
|
||||
for (let i = 0; i < requests.length; i++) {
|
||||
const request = requests[i]
|
||||
util.errorRequest(client, request, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function onHttp2SocketClose () {
|
||||
const err = this[kError] || new SocketError('closed', util.getSocketInfo(this))
|
||||
|
||||
const client = this[kHTTP2Session][kClient]
|
||||
|
||||
client[kSocket] = null
|
||||
client[kHTTPContext] = null
|
||||
|
||||
if (this[kHTTP2Session] !== null) {
|
||||
this[kHTTP2Session].destroy(err)
|
||||
}
|
||||
|
||||
client[kPendingIdx] = client[kRunningIdx]
|
||||
|
||||
assert(client[kRunning] === 0)
|
||||
|
||||
client.emit('disconnect', client[kUrl], [client], err)
|
||||
|
||||
client[kResume]()
|
||||
}
|
||||
|
||||
function onHttp2SocketError (err) {
|
||||
assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
|
||||
|
||||
this[kError] = err
|
||||
|
||||
this[kClient][kOnError](err)
|
||||
}
|
||||
|
||||
function onHttp2SocketEnd () {
|
||||
util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this)))
|
||||
}
|
||||
|
||||
function onSocketClose () {
|
||||
this[kClosed] = true
|
||||
}
|
||||
|
||||
// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2
|
||||
function shouldSendContentLength (method) {
|
||||
return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT'
|
||||
}
|
||||
|
||||
function writeH2 (client, request) {
|
||||
const requestTimeout = request.bodyTimeout ?? client[kBodyTimeout]
|
||||
const session = client[kHTTP2Session]
|
||||
const { method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request
|
||||
let { body } = request
|
||||
|
||||
if (upgrade) {
|
||||
util.errorRequest(client, request, new Error('Upgrade not supported for H2'))
|
||||
return false
|
||||
}
|
||||
|
||||
const headers = {}
|
||||
for (let n = 0; n < reqHeaders.length; n += 2) {
|
||||
const key = reqHeaders[n + 0]
|
||||
const val = reqHeaders[n + 1]
|
||||
|
||||
if (Array.isArray(val)) {
|
||||
for (let i = 0; i < val.length; i++) {
|
||||
if (headers[key]) {
|
||||
headers[key] += `,${val[i]}`
|
||||
} else {
|
||||
headers[key] = val[i]
|
||||
}
|
||||
}
|
||||
} else {
|
||||
headers[key] = val
|
||||
}
|
||||
}
|
||||
|
||||
/** @type {import('node:http2').ClientHttp2Stream} */
|
||||
let stream = null
|
||||
|
||||
const { hostname, port } = client[kUrl]
|
||||
|
||||
headers[HTTP2_HEADER_AUTHORITY] = host || `${hostname}${port ? `:${port}` : ''}`
|
||||
headers[HTTP2_HEADER_METHOD] = method
|
||||
|
||||
const abort = (err) => {
|
||||
if (request.aborted || request.completed) {
|
||||
return
|
||||
}
|
||||
|
||||
err = err || new RequestAbortedError()
|
||||
|
||||
util.errorRequest(client, request, err)
|
||||
|
||||
if (stream != null) {
|
||||
// Some chunks might still come after abort,
|
||||
// let's ignore them
|
||||
stream.removeAllListeners('data')
|
||||
|
||||
// On Abort, we close the stream to send RST_STREAM frame
|
||||
stream.close()
|
||||
|
||||
// We move the running index to the next request
|
||||
client[kOnError](err)
|
||||
client[kResume]()
|
||||
}
|
||||
|
||||
// We do not destroy the socket as we can continue using the session
|
||||
// the stream gets destroyed and the session remains to create new streams
|
||||
util.destroy(body, err)
|
||||
}
|
||||
|
||||
try {
|
||||
// We are already connected, streams are pending.
|
||||
// We can call on connect, and wait for abort
|
||||
request.onConnect(abort)
|
||||
} catch (err) {
|
||||
util.errorRequest(client, request, err)
|
||||
}
|
||||
|
||||
if (request.aborted) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (method === 'CONNECT') {
|
||||
session.ref()
|
||||
// We are already connected, streams are pending, first request
|
||||
// will create a new stream. We trigger a request to create the stream and wait until
|
||||
// `ready` event is triggered
|
||||
// We disabled endStream to allow the user to write to the stream
|
||||
stream = session.request(headers, { endStream: false, signal })
|
||||
|
||||
if (!stream.pending) {
|
||||
request.onUpgrade(null, null, stream)
|
||||
++session[kOpenStreams]
|
||||
client[kQueue][client[kRunningIdx]++] = null
|
||||
} else {
|
||||
stream.once('ready', () => {
|
||||
request.onUpgrade(null, null, stream)
|
||||
++session[kOpenStreams]
|
||||
client[kQueue][client[kRunningIdx]++] = null
|
||||
})
|
||||
}
|
||||
|
||||
stream.once('close', () => {
|
||||
session[kOpenStreams] -= 1
|
||||
if (session[kOpenStreams] === 0) session.unref()
|
||||
})
|
||||
stream.setTimeout(requestTimeout)
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc7540#section-8.3
|
||||
// :path and :scheme headers must be omitted when sending CONNECT
|
||||
|
||||
headers[HTTP2_HEADER_PATH] = path
|
||||
headers[HTTP2_HEADER_SCHEME] = 'https'
|
||||
|
||||
// https://tools.ietf.org/html/rfc7231#section-4.3.1
|
||||
// https://tools.ietf.org/html/rfc7231#section-4.3.2
|
||||
// https://tools.ietf.org/html/rfc7231#section-4.3.5
|
||||
|
||||
// Sending a payload body on a request that does not
|
||||
// expect it can cause undefined behavior on some
|
||||
// servers and corrupt connection state. Do not
|
||||
// re-use the connection for further requests.
|
||||
|
||||
const expectsPayload = (
|
||||
method === 'PUT' ||
|
||||
method === 'POST' ||
|
||||
method === 'PATCH'
|
||||
)
|
||||
|
||||
if (body && typeof body.read === 'function') {
|
||||
// Try to read EOF in order to get length.
|
||||
body.read(0)
|
||||
}
|
||||
|
||||
let contentLength = util.bodyLength(body)
|
||||
|
||||
if (util.isFormDataLike(body)) {
|
||||
extractBody ??= require('../web/fetch/body.js').extractBody
|
||||
|
||||
const [bodyStream, contentType] = extractBody(body)
|
||||
headers['content-type'] = contentType
|
||||
|
||||
body = bodyStream.stream
|
||||
contentLength = bodyStream.length
|
||||
}
|
||||
|
||||
if (contentLength == null) {
|
||||
contentLength = request.contentLength
|
||||
}
|
||||
|
||||
if (contentLength === 0 || !expectsPayload) {
|
||||
// https://tools.ietf.org/html/rfc7230#section-3.3.2
|
||||
// A user agent SHOULD NOT send a Content-Length header field when
|
||||
// the request message does not contain a payload body and the method
|
||||
// semantics do not anticipate such a body.
|
||||
|
||||
contentLength = null
|
||||
}
|
||||
|
||||
// https://github.com/nodejs/undici/issues/2046
|
||||
// A user agent may send a Content-Length header with 0 value, this should be allowed.
|
||||
if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) {
|
||||
if (client[kStrictContentLength]) {
|
||||
util.errorRequest(client, request, new RequestContentLengthMismatchError())
|
||||
return false
|
||||
}
|
||||
|
||||
process.emitWarning(new RequestContentLengthMismatchError())
|
||||
}
|
||||
|
||||
if (contentLength != null) {
|
||||
assert(body, 'no body must not have content length')
|
||||
headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}`
|
||||
}
|
||||
|
||||
session.ref()
|
||||
|
||||
if (channels.sendHeaders.hasSubscribers) {
|
||||
let header = ''
|
||||
for (const key in headers) {
|
||||
header += `${key}: ${headers[key]}\r\n`
|
||||
}
|
||||
channels.sendHeaders.publish({ request, headers: header, socket: session[kSocket] })
|
||||
}
|
||||
|
||||
// TODO(metcoder95): add support for sending trailers
|
||||
const shouldEndStream = method === 'GET' || method === 'HEAD' || body === null
|
||||
if (expectContinue) {
|
||||
headers[HTTP2_HEADER_EXPECT] = '100-continue'
|
||||
stream = session.request(headers, { endStream: shouldEndStream, signal })
|
||||
|
||||
stream.once('continue', writeBodyH2)
|
||||
} else {
|
||||
stream = session.request(headers, {
|
||||
endStream: shouldEndStream,
|
||||
signal
|
||||
})
|
||||
|
||||
writeBodyH2()
|
||||
}
|
||||
|
||||
// Increment counter as we have new streams open
|
||||
++session[kOpenStreams]
|
||||
stream.setTimeout(requestTimeout)
|
||||
|
||||
stream.once('response', headers => {
|
||||
const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers
|
||||
request.onResponseStarted()
|
||||
|
||||
// Due to the stream nature, it is possible we face a race condition
|
||||
// where the stream has been assigned, but the request has been aborted
|
||||
// the request remains in-flight and headers hasn't been received yet
|
||||
// for those scenarios, best effort is to destroy the stream immediately
|
||||
// as there's no value to keep it open.
|
||||
if (request.aborted) {
|
||||
stream.removeAllListeners('data')
|
||||
return
|
||||
}
|
||||
|
||||
if (request.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream.resume.bind(stream), '') === false) {
|
||||
stream.pause()
|
||||
}
|
||||
})
|
||||
|
||||
stream.on('data', (chunk) => {
|
||||
if (request.onData(chunk) === false) {
|
||||
stream.pause()
|
||||
}
|
||||
})
|
||||
|
||||
stream.once('end', (err) => {
|
||||
stream.removeAllListeners('data')
|
||||
// When state is null, it means we haven't consumed body and the stream still do not have
|
||||
// a state.
|
||||
// Present specially when using pipeline or stream
|
||||
if (stream.state?.state == null || stream.state.state < 6) {
|
||||
// Do not complete the request if it was aborted
|
||||
// Not prone to happen for as safety net to avoid race conditions with 'trailers'
|
||||
if (!request.aborted && !request.completed) {
|
||||
request.onComplete({})
|
||||
}
|
||||
|
||||
client[kQueue][client[kRunningIdx]++] = null
|
||||
client[kResume]()
|
||||
} else {
|
||||
// Stream is closed or half-closed-remote (6), decrement counter and cleanup
|
||||
// It does not have sense to continue working with the stream as we do not
|
||||
// have yet RST_STREAM support on client-side
|
||||
--session[kOpenStreams]
|
||||
if (session[kOpenStreams] === 0) {
|
||||
session.unref()
|
||||
}
|
||||
|
||||
abort(err ?? new InformationalError('HTTP/2: stream half-closed (remote)'))
|
||||
client[kQueue][client[kRunningIdx]++] = null
|
||||
client[kPendingIdx] = client[kRunningIdx]
|
||||
client[kResume]()
|
||||
}
|
||||
})
|
||||
|
||||
stream.once('close', () => {
|
||||
stream.removeAllListeners('data')
|
||||
session[kOpenStreams] -= 1
|
||||
if (session[kOpenStreams] === 0) {
|
||||
session.unref()
|
||||
}
|
||||
})
|
||||
|
||||
stream.once('error', function (err) {
|
||||
stream.removeAllListeners('data')
|
||||
abort(err)
|
||||
})
|
||||
|
||||
stream.once('frameError', (type, code) => {
|
||||
stream.removeAllListeners('data')
|
||||
abort(new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`))
|
||||
})
|
||||
|
||||
stream.on('aborted', () => {
|
||||
stream.removeAllListeners('data')
|
||||
})
|
||||
|
||||
stream.on('timeout', () => {
|
||||
const err = new InformationalError(`HTTP/2: "stream timeout after ${requestTimeout}"`)
|
||||
stream.removeAllListeners('data')
|
||||
session[kOpenStreams] -= 1
|
||||
|
||||
if (session[kOpenStreams] === 0) {
|
||||
session.unref()
|
||||
}
|
||||
|
||||
abort(err)
|
||||
})
|
||||
|
||||
stream.once('trailers', trailers => {
|
||||
if (request.aborted || request.completed) {
|
||||
return
|
||||
}
|
||||
|
||||
request.onComplete(trailers)
|
||||
})
|
||||
|
||||
return true
|
||||
|
||||
function writeBodyH2 () {
|
||||
/* istanbul ignore else: assertion */
|
||||
if (!body || contentLength === 0) {
|
||||
writeBuffer(
|
||||
abort,
|
||||
stream,
|
||||
null,
|
||||
client,
|
||||
request,
|
||||
client[kSocket],
|
||||
contentLength,
|
||||
expectsPayload
|
||||
)
|
||||
} else if (util.isBuffer(body)) {
|
||||
writeBuffer(
|
||||
abort,
|
||||
stream,
|
||||
body,
|
||||
client,
|
||||
request,
|
||||
client[kSocket],
|
||||
contentLength,
|
||||
expectsPayload
|
||||
)
|
||||
} else if (util.isBlobLike(body)) {
|
||||
if (typeof body.stream === 'function') {
|
||||
writeIterable(
|
||||
abort,
|
||||
stream,
|
||||
body.stream(),
|
||||
client,
|
||||
request,
|
||||
client[kSocket],
|
||||
contentLength,
|
||||
expectsPayload
|
||||
)
|
||||
} else {
|
||||
writeBlob(
|
||||
abort,
|
||||
stream,
|
||||
body,
|
||||
client,
|
||||
request,
|
||||
client[kSocket],
|
||||
contentLength,
|
||||
expectsPayload
|
||||
)
|
||||
}
|
||||
} else if (util.isStream(body)) {
|
||||
writeStream(
|
||||
abort,
|
||||
client[kSocket],
|
||||
expectsPayload,
|
||||
stream,
|
||||
body,
|
||||
client,
|
||||
request,
|
||||
contentLength
|
||||
)
|
||||
} else if (util.isIterable(body)) {
|
||||
writeIterable(
|
||||
abort,
|
||||
stream,
|
||||
body,
|
||||
client,
|
||||
request,
|
||||
client[kSocket],
|
||||
contentLength,
|
||||
expectsPayload
|
||||
)
|
||||
} else {
|
||||
assert(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function writeBuffer (abort, h2stream, body, client, request, socket, contentLength, expectsPayload) {
|
||||
try {
|
||||
if (body != null && util.isBuffer(body)) {
|
||||
assert(contentLength === body.byteLength, 'buffer body must have content length')
|
||||
h2stream.cork()
|
||||
h2stream.write(body)
|
||||
h2stream.uncork()
|
||||
h2stream.end()
|
||||
|
||||
request.onBodySent(body)
|
||||
}
|
||||
|
||||
if (!expectsPayload) {
|
||||
socket[kReset] = true
|
||||
}
|
||||
|
||||
request.onRequestSent()
|
||||
client[kResume]()
|
||||
} catch (error) {
|
||||
abort(error)
|
||||
}
|
||||
}
|
||||
|
||||
function writeStream (abort, socket, expectsPayload, h2stream, body, client, request, contentLength) {
|
||||
assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined')
|
||||
|
||||
// For HTTP/2, is enough to pipe the stream
|
||||
const pipe = pipeline(
|
||||
body,
|
||||
h2stream,
|
||||
(err) => {
|
||||
if (err) {
|
||||
util.destroy(pipe, err)
|
||||
abort(err)
|
||||
} else {
|
||||
util.removeAllListeners(pipe)
|
||||
request.onRequestSent()
|
||||
|
||||
if (!expectsPayload) {
|
||||
socket[kReset] = true
|
||||
}
|
||||
|
||||
client[kResume]()
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
util.addListener(pipe, 'data', onPipeData)
|
||||
|
||||
function onPipeData (chunk) {
|
||||
request.onBodySent(chunk)
|
||||
}
|
||||
}
|
||||
|
||||
async function writeBlob (abort, h2stream, body, client, request, socket, contentLength, expectsPayload) {
|
||||
assert(contentLength === body.size, 'blob body must have content length')
|
||||
|
||||
try {
|
||||
if (contentLength != null && contentLength !== body.size) {
|
||||
throw new RequestContentLengthMismatchError()
|
||||
}
|
||||
|
||||
const buffer = Buffer.from(await body.arrayBuffer())
|
||||
|
||||
h2stream.cork()
|
||||
h2stream.write(buffer)
|
||||
h2stream.uncork()
|
||||
h2stream.end()
|
||||
|
||||
request.onBodySent(buffer)
|
||||
request.onRequestSent()
|
||||
|
||||
if (!expectsPayload) {
|
||||
socket[kReset] = true
|
||||
}
|
||||
|
||||
client[kResume]()
|
||||
} catch (err) {
|
||||
abort(err)
|
||||
}
|
||||
}
|
||||
|
||||
async function writeIterable (abort, h2stream, body, client, request, socket, contentLength, expectsPayload) {
|
||||
assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined')
|
||||
|
||||
let callback = null
|
||||
function onDrain () {
|
||||
if (callback) {
|
||||
const cb = callback
|
||||
callback = null
|
||||
cb()
|
||||
}
|
||||
}
|
||||
|
||||
const waitForDrain = () => new Promise((resolve, reject) => {
|
||||
assert(callback === null)
|
||||
|
||||
if (socket[kError]) {
|
||||
reject(socket[kError])
|
||||
} else {
|
||||
callback = resolve
|
||||
}
|
||||
})
|
||||
|
||||
h2stream
|
||||
.on('close', onDrain)
|
||||
.on('drain', onDrain)
|
||||
|
||||
try {
|
||||
// It's up to the user to somehow abort the async iterable.
|
||||
for await (const chunk of body) {
|
||||
if (socket[kError]) {
|
||||
throw socket[kError]
|
||||
}
|
||||
|
||||
const res = h2stream.write(chunk)
|
||||
request.onBodySent(chunk)
|
||||
if (!res) {
|
||||
await waitForDrain()
|
||||
}
|
||||
}
|
||||
|
||||
h2stream.end()
|
||||
|
||||
request.onRequestSent()
|
||||
|
||||
if (!expectsPayload) {
|
||||
socket[kReset] = true
|
||||
}
|
||||
|
||||
client[kResume]()
|
||||
} catch (err) {
|
||||
abort(err)
|
||||
} finally {
|
||||
h2stream
|
||||
.off('close', onDrain)
|
||||
.off('drain', onDrain)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = connectH2
|
609
node_modules/undici/lib/dispatcher/client.js
generated
vendored
Normal file
609
node_modules/undici/lib/dispatcher/client.js
generated
vendored
Normal file
@ -0,0 +1,609 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const net = require('node:net')
|
||||
const http = require('node:http')
|
||||
const util = require('../core/util.js')
|
||||
const { channels } = require('../core/diagnostics.js')
|
||||
const Request = require('../core/request.js')
|
||||
const DispatcherBase = require('./dispatcher-base')
|
||||
const {
|
||||
InvalidArgumentError,
|
||||
InformationalError,
|
||||
ClientDestroyedError
|
||||
} = require('../core/errors.js')
|
||||
const buildConnector = require('../core/connect.js')
|
||||
const {
|
||||
kUrl,
|
||||
kServerName,
|
||||
kClient,
|
||||
kBusy,
|
||||
kConnect,
|
||||
kResuming,
|
||||
kRunning,
|
||||
kPending,
|
||||
kSize,
|
||||
kQueue,
|
||||
kConnected,
|
||||
kConnecting,
|
||||
kNeedDrain,
|
||||
kKeepAliveDefaultTimeout,
|
||||
kHostHeader,
|
||||
kPendingIdx,
|
||||
kRunningIdx,
|
||||
kError,
|
||||
kPipelining,
|
||||
kKeepAliveTimeoutValue,
|
||||
kMaxHeadersSize,
|
||||
kKeepAliveMaxTimeout,
|
||||
kKeepAliveTimeoutThreshold,
|
||||
kHeadersTimeout,
|
||||
kBodyTimeout,
|
||||
kStrictContentLength,
|
||||
kConnector,
|
||||
kMaxRequests,
|
||||
kCounter,
|
||||
kClose,
|
||||
kDestroy,
|
||||
kDispatch,
|
||||
kLocalAddress,
|
||||
kMaxResponseSize,
|
||||
kOnError,
|
||||
kHTTPContext,
|
||||
kMaxConcurrentStreams,
|
||||
kResume
|
||||
} = require('../core/symbols.js')
|
||||
const connectH1 = require('./client-h1.js')
|
||||
const connectH2 = require('./client-h2.js')
|
||||
|
||||
const kClosedResolve = Symbol('kClosedResolve')
|
||||
|
||||
const getDefaultNodeMaxHeaderSize = http &&
|
||||
http.maxHeaderSize &&
|
||||
Number.isInteger(http.maxHeaderSize) &&
|
||||
http.maxHeaderSize > 0
|
||||
? () => http.maxHeaderSize
|
||||
: () => { throw new InvalidArgumentError('http module not available or http.maxHeaderSize invalid') }
|
||||
|
||||
const noop = () => {}
|
||||
|
||||
function getPipelining (client) {
|
||||
return client[kPipelining] ?? client[kHTTPContext]?.defaultPipelining ?? 1
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {import('../../types/client.js').default}
|
||||
*/
|
||||
class Client extends DispatcherBase {
|
||||
/**
|
||||
*
|
||||
* @param {string|URL} url
|
||||
* @param {import('../../types/client.js').Client.Options} options
|
||||
*/
|
||||
constructor (url, {
|
||||
maxHeaderSize,
|
||||
headersTimeout,
|
||||
socketTimeout,
|
||||
requestTimeout,
|
||||
connectTimeout,
|
||||
bodyTimeout,
|
||||
idleTimeout,
|
||||
keepAlive,
|
||||
keepAliveTimeout,
|
||||
maxKeepAliveTimeout,
|
||||
keepAliveMaxTimeout,
|
||||
keepAliveTimeoutThreshold,
|
||||
socketPath,
|
||||
pipelining,
|
||||
tls,
|
||||
strictContentLength,
|
||||
maxCachedSessions,
|
||||
connect,
|
||||
maxRequestsPerClient,
|
||||
localAddress,
|
||||
maxResponseSize,
|
||||
autoSelectFamily,
|
||||
autoSelectFamilyAttemptTimeout,
|
||||
// h2
|
||||
maxConcurrentStreams,
|
||||
allowH2
|
||||
} = {}) {
|
||||
if (keepAlive !== undefined) {
|
||||
throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead')
|
||||
}
|
||||
|
||||
if (socketTimeout !== undefined) {
|
||||
throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead')
|
||||
}
|
||||
|
||||
if (requestTimeout !== undefined) {
|
||||
throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead')
|
||||
}
|
||||
|
||||
if (idleTimeout !== undefined) {
|
||||
throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead')
|
||||
}
|
||||
|
||||
if (maxKeepAliveTimeout !== undefined) {
|
||||
throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead')
|
||||
}
|
||||
|
||||
if (maxHeaderSize != null) {
|
||||
if (!Number.isInteger(maxHeaderSize) || maxHeaderSize < 1) {
|
||||
throw new InvalidArgumentError('invalid maxHeaderSize')
|
||||
}
|
||||
} else {
|
||||
// If maxHeaderSize is not provided, use the default value from the http module
|
||||
// or if that is not available, throw an error.
|
||||
maxHeaderSize = getDefaultNodeMaxHeaderSize()
|
||||
}
|
||||
|
||||
if (socketPath != null && typeof socketPath !== 'string') {
|
||||
throw new InvalidArgumentError('invalid socketPath')
|
||||
}
|
||||
|
||||
if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) {
|
||||
throw new InvalidArgumentError('invalid connectTimeout')
|
||||
}
|
||||
|
||||
if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) {
|
||||
throw new InvalidArgumentError('invalid keepAliveTimeout')
|
||||
}
|
||||
|
||||
if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) {
|
||||
throw new InvalidArgumentError('invalid keepAliveMaxTimeout')
|
||||
}
|
||||
|
||||
if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) {
|
||||
throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold')
|
||||
}
|
||||
|
||||
if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) {
|
||||
throw new InvalidArgumentError('headersTimeout must be a positive integer or zero')
|
||||
}
|
||||
|
||||
if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) {
|
||||
throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero')
|
||||
}
|
||||
|
||||
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
|
||||
throw new InvalidArgumentError('connect must be a function or an object')
|
||||
}
|
||||
|
||||
if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) {
|
||||
throw new InvalidArgumentError('maxRequestsPerClient must be a positive number')
|
||||
}
|
||||
|
||||
if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) {
|
||||
throw new InvalidArgumentError('localAddress must be valid string IP address')
|
||||
}
|
||||
|
||||
if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) {
|
||||
throw new InvalidArgumentError('maxResponseSize must be a positive number')
|
||||
}
|
||||
|
||||
if (
|
||||
autoSelectFamilyAttemptTimeout != null &&
|
||||
(!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1)
|
||||
) {
|
||||
throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number')
|
||||
}
|
||||
|
||||
// h2
|
||||
if (allowH2 != null && typeof allowH2 !== 'boolean') {
|
||||
throw new InvalidArgumentError('allowH2 must be a valid boolean value')
|
||||
}
|
||||
|
||||
if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) {
|
||||
throw new InvalidArgumentError('maxConcurrentStreams must be a positive integer, greater than 0')
|
||||
}
|
||||
|
||||
super()
|
||||
|
||||
if (typeof connect !== 'function') {
|
||||
connect = buildConnector({
|
||||
...tls,
|
||||
maxCachedSessions,
|
||||
allowH2,
|
||||
socketPath,
|
||||
timeout: connectTimeout,
|
||||
...(typeof autoSelectFamily === 'boolean' ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
|
||||
...connect
|
||||
})
|
||||
}
|
||||
|
||||
this[kUrl] = util.parseOrigin(url)
|
||||
this[kConnector] = connect
|
||||
this[kPipelining] = pipelining != null ? pipelining : 1
|
||||
this[kMaxHeadersSize] = maxHeaderSize
|
||||
this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout
|
||||
this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout
|
||||
this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 2e3 : keepAliveTimeoutThreshold
|
||||
this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout]
|
||||
this[kServerName] = null
|
||||
this[kLocalAddress] = localAddress != null ? localAddress : null
|
||||
this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming
|
||||
this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming
|
||||
this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n`
|
||||
this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3
|
||||
this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3
|
||||
this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength
|
||||
this[kMaxRequests] = maxRequestsPerClient
|
||||
this[kClosedResolve] = null
|
||||
this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1
|
||||
this[kMaxConcurrentStreams] = maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server
|
||||
this[kHTTPContext] = null
|
||||
|
||||
// kQueue is built up of 3 sections separated by
|
||||
// the kRunningIdx and kPendingIdx indices.
|
||||
// | complete | running | pending |
|
||||
// ^ kRunningIdx ^ kPendingIdx ^ kQueue.length
|
||||
// kRunningIdx points to the first running element.
|
||||
// kPendingIdx points to the first pending element.
|
||||
// This implements a fast queue with an amortized
|
||||
// time of O(1).
|
||||
|
||||
this[kQueue] = []
|
||||
this[kRunningIdx] = 0
|
||||
this[kPendingIdx] = 0
|
||||
|
||||
this[kResume] = (sync) => resume(this, sync)
|
||||
this[kOnError] = (err) => onError(this, err)
|
||||
}
|
||||
|
||||
get pipelining () {
|
||||
return this[kPipelining]
|
||||
}
|
||||
|
||||
set pipelining (value) {
|
||||
this[kPipelining] = value
|
||||
this[kResume](true)
|
||||
}
|
||||
|
||||
get [kPending] () {
|
||||
return this[kQueue].length - this[kPendingIdx]
|
||||
}
|
||||
|
||||
get [kRunning] () {
|
||||
return this[kPendingIdx] - this[kRunningIdx]
|
||||
}
|
||||
|
||||
get [kSize] () {
|
||||
return this[kQueue].length - this[kRunningIdx]
|
||||
}
|
||||
|
||||
get [kConnected] () {
|
||||
return !!this[kHTTPContext] && !this[kConnecting] && !this[kHTTPContext].destroyed
|
||||
}
|
||||
|
||||
get [kBusy] () {
|
||||
return Boolean(
|
||||
this[kHTTPContext]?.busy(null) ||
|
||||
(this[kSize] >= (getPipelining(this) || 1)) ||
|
||||
this[kPending] > 0
|
||||
)
|
||||
}
|
||||
|
||||
/* istanbul ignore: only used for test */
|
||||
[kConnect] (cb) {
|
||||
connect(this)
|
||||
this.once('connect', cb)
|
||||
}
|
||||
|
||||
[kDispatch] (opts, handler) {
|
||||
const origin = opts.origin || this[kUrl].origin
|
||||
const request = new Request(origin, opts, handler)
|
||||
|
||||
this[kQueue].push(request)
|
||||
if (this[kResuming]) {
|
||||
// Do nothing.
|
||||
} else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) {
|
||||
// Wait a tick in case stream/iterator is ended in the same tick.
|
||||
this[kResuming] = 1
|
||||
queueMicrotask(() => resume(this))
|
||||
} else {
|
||||
this[kResume](true)
|
||||
}
|
||||
|
||||
if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) {
|
||||
this[kNeedDrain] = 2
|
||||
}
|
||||
|
||||
return this[kNeedDrain] < 2
|
||||
}
|
||||
|
||||
async [kClose] () {
|
||||
// TODO: for H2 we need to gracefully flush the remaining enqueued
|
||||
// request and close each stream.
|
||||
return new Promise((resolve) => {
|
||||
if (this[kSize]) {
|
||||
this[kClosedResolve] = resolve
|
||||
} else {
|
||||
resolve(null)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async [kDestroy] (err) {
|
||||
return new Promise((resolve) => {
|
||||
const requests = this[kQueue].splice(this[kPendingIdx])
|
||||
for (let i = 0; i < requests.length; i++) {
|
||||
const request = requests[i]
|
||||
util.errorRequest(this, request, err)
|
||||
}
|
||||
|
||||
const callback = () => {
|
||||
if (this[kClosedResolve]) {
|
||||
// TODO (fix): Should we error here with ClientDestroyedError?
|
||||
this[kClosedResolve]()
|
||||
this[kClosedResolve] = null
|
||||
}
|
||||
resolve(null)
|
||||
}
|
||||
|
||||
if (this[kHTTPContext]) {
|
||||
this[kHTTPContext].destroy(err, callback)
|
||||
this[kHTTPContext] = null
|
||||
} else {
|
||||
queueMicrotask(callback)
|
||||
}
|
||||
|
||||
this[kResume]()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function onError (client, err) {
|
||||
if (
|
||||
client[kRunning] === 0 &&
|
||||
err.code !== 'UND_ERR_INFO' &&
|
||||
err.code !== 'UND_ERR_SOCKET'
|
||||
) {
|
||||
// Error is not caused by running request and not a recoverable
|
||||
// socket error.
|
||||
|
||||
assert(client[kPendingIdx] === client[kRunningIdx])
|
||||
|
||||
const requests = client[kQueue].splice(client[kRunningIdx])
|
||||
|
||||
for (let i = 0; i < requests.length; i++) {
|
||||
const request = requests[i]
|
||||
util.errorRequest(client, request, err)
|
||||
}
|
||||
assert(client[kSize] === 0)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Client} client
|
||||
* @returns
|
||||
*/
|
||||
async function connect (client) {
|
||||
assert(!client[kConnecting])
|
||||
assert(!client[kHTTPContext])
|
||||
|
||||
let { host, hostname, protocol, port } = client[kUrl]
|
||||
|
||||
// Resolve ipv6
|
||||
if (hostname[0] === '[') {
|
||||
const idx = hostname.indexOf(']')
|
||||
|
||||
assert(idx !== -1)
|
||||
const ip = hostname.substring(1, idx)
|
||||
|
||||
assert(net.isIPv6(ip))
|
||||
hostname = ip
|
||||
}
|
||||
|
||||
client[kConnecting] = true
|
||||
|
||||
if (channels.beforeConnect.hasSubscribers) {
|
||||
channels.beforeConnect.publish({
|
||||
connectParams: {
|
||||
host,
|
||||
hostname,
|
||||
protocol,
|
||||
port,
|
||||
version: client[kHTTPContext]?.version,
|
||||
servername: client[kServerName],
|
||||
localAddress: client[kLocalAddress]
|
||||
},
|
||||
connector: client[kConnector]
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
const socket = await new Promise((resolve, reject) => {
|
||||
client[kConnector]({
|
||||
host,
|
||||
hostname,
|
||||
protocol,
|
||||
port,
|
||||
servername: client[kServerName],
|
||||
localAddress: client[kLocalAddress]
|
||||
}, (err, socket) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
} else {
|
||||
resolve(socket)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
if (client.destroyed) {
|
||||
util.destroy(socket.on('error', noop), new ClientDestroyedError())
|
||||
return
|
||||
}
|
||||
|
||||
assert(socket)
|
||||
|
||||
try {
|
||||
client[kHTTPContext] = socket.alpnProtocol === 'h2'
|
||||
? await connectH2(client, socket)
|
||||
: await connectH1(client, socket)
|
||||
} catch (err) {
|
||||
socket.destroy().on('error', noop)
|
||||
throw err
|
||||
}
|
||||
|
||||
client[kConnecting] = false
|
||||
|
||||
socket[kCounter] = 0
|
||||
socket[kMaxRequests] = client[kMaxRequests]
|
||||
socket[kClient] = client
|
||||
socket[kError] = null
|
||||
|
||||
if (channels.connected.hasSubscribers) {
|
||||
channels.connected.publish({
|
||||
connectParams: {
|
||||
host,
|
||||
hostname,
|
||||
protocol,
|
||||
port,
|
||||
version: client[kHTTPContext]?.version,
|
||||
servername: client[kServerName],
|
||||
localAddress: client[kLocalAddress]
|
||||
},
|
||||
connector: client[kConnector],
|
||||
socket
|
||||
})
|
||||
}
|
||||
client.emit('connect', client[kUrl], [client])
|
||||
} catch (err) {
|
||||
if (client.destroyed) {
|
||||
return
|
||||
}
|
||||
|
||||
client[kConnecting] = false
|
||||
|
||||
if (channels.connectError.hasSubscribers) {
|
||||
channels.connectError.publish({
|
||||
connectParams: {
|
||||
host,
|
||||
hostname,
|
||||
protocol,
|
||||
port,
|
||||
version: client[kHTTPContext]?.version,
|
||||
servername: client[kServerName],
|
||||
localAddress: client[kLocalAddress]
|
||||
},
|
||||
connector: client[kConnector],
|
||||
error: err
|
||||
})
|
||||
}
|
||||
|
||||
if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {
|
||||
assert(client[kRunning] === 0)
|
||||
while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) {
|
||||
const request = client[kQueue][client[kPendingIdx]++]
|
||||
util.errorRequest(client, request, err)
|
||||
}
|
||||
} else {
|
||||
onError(client, err)
|
||||
}
|
||||
|
||||
client.emit('connectionError', client[kUrl], [client], err)
|
||||
}
|
||||
|
||||
client[kResume]()
|
||||
}
|
||||
|
||||
function emitDrain (client) {
|
||||
client[kNeedDrain] = 0
|
||||
client.emit('drain', client[kUrl], [client])
|
||||
}
|
||||
|
||||
function resume (client, sync) {
|
||||
if (client[kResuming] === 2) {
|
||||
return
|
||||
}
|
||||
|
||||
client[kResuming] = 2
|
||||
|
||||
_resume(client, sync)
|
||||
client[kResuming] = 0
|
||||
|
||||
if (client[kRunningIdx] > 256) {
|
||||
client[kQueue].splice(0, client[kRunningIdx])
|
||||
client[kPendingIdx] -= client[kRunningIdx]
|
||||
client[kRunningIdx] = 0
|
||||
}
|
||||
}
|
||||
|
||||
function _resume (client, sync) {
|
||||
while (true) {
|
||||
if (client.destroyed) {
|
||||
assert(client[kPending] === 0)
|
||||
return
|
||||
}
|
||||
|
||||
if (client[kClosedResolve] && !client[kSize]) {
|
||||
client[kClosedResolve]()
|
||||
client[kClosedResolve] = null
|
||||
return
|
||||
}
|
||||
|
||||
if (client[kHTTPContext]) {
|
||||
client[kHTTPContext].resume()
|
||||
}
|
||||
|
||||
if (client[kBusy]) {
|
||||
client[kNeedDrain] = 2
|
||||
} else if (client[kNeedDrain] === 2) {
|
||||
if (sync) {
|
||||
client[kNeedDrain] = 1
|
||||
queueMicrotask(() => emitDrain(client))
|
||||
} else {
|
||||
emitDrain(client)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (client[kPending] === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
if (client[kRunning] >= (getPipelining(client) || 1)) {
|
||||
return
|
||||
}
|
||||
|
||||
const request = client[kQueue][client[kPendingIdx]]
|
||||
|
||||
if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) {
|
||||
if (client[kRunning] > 0) {
|
||||
return
|
||||
}
|
||||
|
||||
client[kServerName] = request.servername
|
||||
client[kHTTPContext]?.destroy(new InformationalError('servername changed'), () => {
|
||||
client[kHTTPContext] = null
|
||||
resume(client)
|
||||
})
|
||||
}
|
||||
|
||||
if (client[kConnecting]) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!client[kHTTPContext]) {
|
||||
connect(client)
|
||||
return
|
||||
}
|
||||
|
||||
if (client[kHTTPContext].destroyed) {
|
||||
return
|
||||
}
|
||||
|
||||
if (client[kHTTPContext].busy(request)) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!request.aborted && client[kHTTPContext].write(request)) {
|
||||
client[kPendingIdx]++
|
||||
} else {
|
||||
client[kQueue].splice(client[kPendingIdx], 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Client
|
@ -1,18 +1,16 @@
|
||||
'use strict'
|
||||
|
||||
const Dispatcher = require('./dispatcher')
|
||||
const UnwrapHandler = require('../handler/unwrap-handler')
|
||||
const {
|
||||
ClientDestroyedError,
|
||||
ClientClosedError,
|
||||
InvalidArgumentError
|
||||
} = require('./core/errors')
|
||||
const { kDestroy, kClose, kDispatch, kInterceptors } = require('./core/symbols')
|
||||
} = require('../core/errors')
|
||||
const { kDestroy, kClose, kClosed, kDestroyed, kDispatch } = require('../core/symbols')
|
||||
|
||||
const kDestroyed = Symbol('destroyed')
|
||||
const kClosed = Symbol('closed')
|
||||
const kOnDestroyed = Symbol('onDestroyed')
|
||||
const kOnClosed = Symbol('onClosed')
|
||||
const kInterceptedDispatch = Symbol('Intercepted Dispatch')
|
||||
|
||||
class DispatcherBase extends Dispatcher {
|
||||
constructor () {
|
||||
@ -32,23 +30,6 @@ class DispatcherBase extends Dispatcher {
|
||||
return this[kClosed]
|
||||
}
|
||||
|
||||
get interceptors () {
|
||||
return this[kInterceptors]
|
||||
}
|
||||
|
||||
set interceptors (newInterceptors) {
|
||||
if (newInterceptors) {
|
||||
for (let i = newInterceptors.length - 1; i >= 0; i--) {
|
||||
const interceptor = this[kInterceptors][i]
|
||||
if (typeof interceptor !== 'function') {
|
||||
throw new InvalidArgumentError('interceptor must be an function')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this[kInterceptors] = newInterceptors
|
||||
}
|
||||
|
||||
close (callback) {
|
||||
if (callback === undefined) {
|
||||
return new Promise((resolve, reject) => {
|
||||
@ -144,25 +125,13 @@ class DispatcherBase extends Dispatcher {
|
||||
})
|
||||
}
|
||||
|
||||
[kInterceptedDispatch] (opts, handler) {
|
||||
if (!this[kInterceptors] || this[kInterceptors].length === 0) {
|
||||
this[kInterceptedDispatch] = this[kDispatch]
|
||||
return this[kDispatch](opts, handler)
|
||||
}
|
||||
|
||||
let dispatch = this[kDispatch].bind(this)
|
||||
for (let i = this[kInterceptors].length - 1; i >= 0; i--) {
|
||||
dispatch = this[kInterceptors][i](dispatch)
|
||||
}
|
||||
this[kInterceptedDispatch] = dispatch
|
||||
return dispatch(opts, handler)
|
||||
}
|
||||
|
||||
dispatch (opts, handler) {
|
||||
if (!handler || typeof handler !== 'object') {
|
||||
throw new InvalidArgumentError('handler must be an object')
|
||||
}
|
||||
|
||||
handler = UnwrapHandler.unwrap(handler)
|
||||
|
||||
try {
|
||||
if (!opts || typeof opts !== 'object') {
|
||||
throw new InvalidArgumentError('opts must be an object.')
|
||||
@ -176,10 +145,10 @@ class DispatcherBase extends Dispatcher {
|
||||
throw new ClientClosedError()
|
||||
}
|
||||
|
||||
return this[kInterceptedDispatch](opts, handler)
|
||||
return this[kDispatch](opts, handler)
|
||||
} catch (err) {
|
||||
if (typeof handler.onError !== 'function') {
|
||||
throw new InvalidArgumentError('invalid onError method')
|
||||
throw err
|
||||
}
|
||||
|
||||
handler.onError(err)
|
48
node_modules/undici/lib/dispatcher/dispatcher.js
generated
vendored
Normal file
48
node_modules/undici/lib/dispatcher/dispatcher.js
generated
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
'use strict'
|
||||
const EventEmitter = require('node:events')
|
||||
const WrapHandler = require('../handler/wrap-handler')
|
||||
|
||||
const wrapInterceptor = (dispatch) => (opts, handler) => dispatch(opts, WrapHandler.wrap(handler))
|
||||
|
||||
class Dispatcher extends EventEmitter {
|
||||
dispatch () {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
close () {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
destroy () {
|
||||
throw new Error('not implemented')
|
||||
}
|
||||
|
||||
compose (...args) {
|
||||
// So we handle [interceptor1, interceptor2] or interceptor1, interceptor2, ...
|
||||
const interceptors = Array.isArray(args[0]) ? args[0] : args
|
||||
let dispatch = this.dispatch.bind(this)
|
||||
|
||||
for (const interceptor of interceptors) {
|
||||
if (interceptor == null) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (typeof interceptor !== 'function') {
|
||||
throw new TypeError(`invalid interceptor, expected function received ${typeof interceptor}`)
|
||||
}
|
||||
|
||||
dispatch = interceptor(dispatch)
|
||||
dispatch = wrapInterceptor(dispatch)
|
||||
|
||||
if (dispatch == null || typeof dispatch !== 'function' || dispatch.length !== 2) {
|
||||
throw new TypeError('invalid interceptor')
|
||||
}
|
||||
}
|
||||
|
||||
return new Proxy(this, {
|
||||
get: (target, key) => key === 'dispatch' ? dispatch : target[key]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Dispatcher
|
151
node_modules/undici/lib/dispatcher/env-http-proxy-agent.js
generated
vendored
Normal file
151
node_modules/undici/lib/dispatcher/env-http-proxy-agent.js
generated
vendored
Normal file
@ -0,0 +1,151 @@
|
||||
'use strict'
|
||||
|
||||
const DispatcherBase = require('./dispatcher-base')
|
||||
const { kClose, kDestroy, kClosed, kDestroyed, kDispatch, kNoProxyAgent, kHttpProxyAgent, kHttpsProxyAgent } = require('../core/symbols')
|
||||
const ProxyAgent = require('./proxy-agent')
|
||||
const Agent = require('./agent')
|
||||
|
||||
const DEFAULT_PORTS = {
|
||||
'http:': 80,
|
||||
'https:': 443
|
||||
}
|
||||
|
||||
class EnvHttpProxyAgent extends DispatcherBase {
|
||||
#noProxyValue = null
|
||||
#noProxyEntries = null
|
||||
#opts = null
|
||||
|
||||
constructor (opts = {}) {
|
||||
super()
|
||||
this.#opts = opts
|
||||
|
||||
const { httpProxy, httpsProxy, noProxy, ...agentOpts } = opts
|
||||
|
||||
this[kNoProxyAgent] = new Agent(agentOpts)
|
||||
|
||||
const HTTP_PROXY = httpProxy ?? process.env.http_proxy ?? process.env.HTTP_PROXY
|
||||
if (HTTP_PROXY) {
|
||||
this[kHttpProxyAgent] = new ProxyAgent({ ...agentOpts, uri: HTTP_PROXY })
|
||||
} else {
|
||||
this[kHttpProxyAgent] = this[kNoProxyAgent]
|
||||
}
|
||||
|
||||
const HTTPS_PROXY = httpsProxy ?? process.env.https_proxy ?? process.env.HTTPS_PROXY
|
||||
if (HTTPS_PROXY) {
|
||||
this[kHttpsProxyAgent] = new ProxyAgent({ ...agentOpts, uri: HTTPS_PROXY })
|
||||
} else {
|
||||
this[kHttpsProxyAgent] = this[kHttpProxyAgent]
|
||||
}
|
||||
|
||||
this.#parseNoProxy()
|
||||
}
|
||||
|
||||
[kDispatch] (opts, handler) {
|
||||
const url = new URL(opts.origin)
|
||||
const agent = this.#getProxyAgentForUrl(url)
|
||||
return agent.dispatch(opts, handler)
|
||||
}
|
||||
|
||||
async [kClose] () {
|
||||
await this[kNoProxyAgent].close()
|
||||
if (!this[kHttpProxyAgent][kClosed]) {
|
||||
await this[kHttpProxyAgent].close()
|
||||
}
|
||||
if (!this[kHttpsProxyAgent][kClosed]) {
|
||||
await this[kHttpsProxyAgent].close()
|
||||
}
|
||||
}
|
||||
|
||||
async [kDestroy] (err) {
|
||||
await this[kNoProxyAgent].destroy(err)
|
||||
if (!this[kHttpProxyAgent][kDestroyed]) {
|
||||
await this[kHttpProxyAgent].destroy(err)
|
||||
}
|
||||
if (!this[kHttpsProxyAgent][kDestroyed]) {
|
||||
await this[kHttpsProxyAgent].destroy(err)
|
||||
}
|
||||
}
|
||||
|
||||
#getProxyAgentForUrl (url) {
|
||||
let { protocol, host: hostname, port } = url
|
||||
|
||||
// Stripping ports in this way instead of using parsedUrl.hostname to make
|
||||
// sure that the brackets around IPv6 addresses are kept.
|
||||
hostname = hostname.replace(/:\d*$/, '').toLowerCase()
|
||||
port = Number.parseInt(port, 10) || DEFAULT_PORTS[protocol] || 0
|
||||
if (!this.#shouldProxy(hostname, port)) {
|
||||
return this[kNoProxyAgent]
|
||||
}
|
||||
if (protocol === 'https:') {
|
||||
return this[kHttpsProxyAgent]
|
||||
}
|
||||
return this[kHttpProxyAgent]
|
||||
}
|
||||
|
||||
#shouldProxy (hostname, port) {
|
||||
if (this.#noProxyChanged) {
|
||||
this.#parseNoProxy()
|
||||
}
|
||||
|
||||
if (this.#noProxyEntries.length === 0) {
|
||||
return true // Always proxy if NO_PROXY is not set or empty.
|
||||
}
|
||||
if (this.#noProxyValue === '*') {
|
||||
return false // Never proxy if wildcard is set.
|
||||
}
|
||||
|
||||
for (let i = 0; i < this.#noProxyEntries.length; i++) {
|
||||
const entry = this.#noProxyEntries[i]
|
||||
if (entry.port && entry.port !== port) {
|
||||
continue // Skip if ports don't match.
|
||||
}
|
||||
if (!/^[.*]/.test(entry.hostname)) {
|
||||
// No wildcards, so don't proxy only if there is not an exact match.
|
||||
if (hostname === entry.hostname) {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
// Don't proxy if the hostname ends with the no_proxy host.
|
||||
if (hostname.endsWith(entry.hostname.replace(/^\*/, ''))) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
#parseNoProxy () {
|
||||
const noProxyValue = this.#opts.noProxy ?? this.#noProxyEnv
|
||||
const noProxySplit = noProxyValue.split(/[,\s]/)
|
||||
const noProxyEntries = []
|
||||
|
||||
for (let i = 0; i < noProxySplit.length; i++) {
|
||||
const entry = noProxySplit[i]
|
||||
if (!entry) {
|
||||
continue
|
||||
}
|
||||
const parsed = entry.match(/^(.+):(\d+)$/)
|
||||
noProxyEntries.push({
|
||||
hostname: (parsed ? parsed[1] : entry).toLowerCase(),
|
||||
port: parsed ? Number.parseInt(parsed[2], 10) : 0
|
||||
})
|
||||
}
|
||||
|
||||
this.#noProxyValue = noProxyValue
|
||||
this.#noProxyEntries = noProxyEntries
|
||||
}
|
||||
|
||||
get #noProxyChanged () {
|
||||
if (this.#opts.noProxy !== undefined) {
|
||||
return false
|
||||
}
|
||||
return this.#noProxyValue !== this.#noProxyEnv
|
||||
}
|
||||
|
||||
get #noProxyEnv () {
|
||||
return process.env.no_proxy ?? process.env.NO_PROXY ?? ''
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = EnvHttpProxyAgent
|
@ -1,12 +1,10 @@
|
||||
/* eslint-disable */
|
||||
|
||||
'use strict'
|
||||
|
||||
// Extracted from node/lib/internal/fixed_queue.js
|
||||
|
||||
// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two.
|
||||
const kSize = 2048;
|
||||
const kMask = kSize - 1;
|
||||
const kSize = 2048
|
||||
const kMask = kSize - 1
|
||||
|
||||
// The FixedQueue is implemented as a singly-linked list of fixed-size
|
||||
// circular buffers. It looks something like this:
|
||||
@ -17,18 +15,18 @@ const kMask = kSize - 1;
|
||||
// +-----------+ <-----\ +-----------+ <------\ +-----------+
|
||||
// | [null] | \----- | next | \------- | next |
|
||||
// +-----------+ +-----------+ +-----------+
|
||||
// | item | <-- bottom | item | <-- bottom | [empty] |
|
||||
// | item | | item | | [empty] |
|
||||
// | item | | item | | [empty] |
|
||||
// | item | | item | | [empty] |
|
||||
// | item | <-- bottom | item | <-- bottom | undefined |
|
||||
// | item | | item | | undefined |
|
||||
// | item | | item | | undefined |
|
||||
// | item | | item | | undefined |
|
||||
// | item | | item | bottom --> | item |
|
||||
// | item | | item | | item |
|
||||
// | ... | | ... | | ... |
|
||||
// | item | | item | | item |
|
||||
// | item | | item | | item |
|
||||
// | [empty] | <-- top | item | | item |
|
||||
// | [empty] | | item | | item |
|
||||
// | [empty] | | [empty] | <-- top top --> | [empty] |
|
||||
// | undefined | <-- top | item | | item |
|
||||
// | undefined | | item | | item |
|
||||
// | undefined | | undefined | <-- top top --> | undefined |
|
||||
// +-----------+ +-----------+ +-----------+
|
||||
//
|
||||
// Or, if there is only one circular buffer, it looks something
|
||||
@ -40,12 +38,12 @@ const kMask = kSize - 1;
|
||||
// +-----------+ +-----------+
|
||||
// | [null] | | [null] |
|
||||
// +-----------+ +-----------+
|
||||
// | [empty] | | item |
|
||||
// | [empty] | | item |
|
||||
// | item | <-- bottom top --> | [empty] |
|
||||
// | item | | [empty] |
|
||||
// | [empty] | <-- top bottom --> | item |
|
||||
// | [empty] | | item |
|
||||
// | undefined | | item |
|
||||
// | undefined | | item |
|
||||
// | item | <-- bottom top --> | undefined |
|
||||
// | item | | undefined |
|
||||
// | undefined | <-- top bottom --> | item |
|
||||
// | undefined | | item |
|
||||
// +-----------+ +-----------+
|
||||
//
|
||||
// Adding a value means moving `top` forward by one, removing means
|
||||
@ -56,62 +54,106 @@ const kMask = kSize - 1;
|
||||
// `top + 1 === bottom` it's full. This wastes a single space of storage
|
||||
// but allows much quicker checks.
|
||||
|
||||
/**
|
||||
* @type {FixedCircularBuffer}
|
||||
* @template T
|
||||
*/
|
||||
class FixedCircularBuffer {
|
||||
constructor() {
|
||||
this.bottom = 0;
|
||||
this.top = 0;
|
||||
this.list = new Array(kSize);
|
||||
this.next = null;
|
||||
constructor () {
|
||||
/**
|
||||
* @type {number}
|
||||
*/
|
||||
this.bottom = 0
|
||||
/**
|
||||
* @type {number}
|
||||
*/
|
||||
this.top = 0
|
||||
/**
|
||||
* @type {Array<T|undefined>}
|
||||
*/
|
||||
this.list = new Array(kSize).fill(undefined)
|
||||
/**
|
||||
* @type {T|null}
|
||||
*/
|
||||
this.next = null
|
||||
}
|
||||
|
||||
isEmpty() {
|
||||
return this.top === this.bottom;
|
||||
/**
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isEmpty () {
|
||||
return this.top === this.bottom
|
||||
}
|
||||
|
||||
isFull() {
|
||||
return ((this.top + 1) & kMask) === this.bottom;
|
||||
/**
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isFull () {
|
||||
return ((this.top + 1) & kMask) === this.bottom
|
||||
}
|
||||
|
||||
push(data) {
|
||||
this.list[this.top] = data;
|
||||
this.top = (this.top + 1) & kMask;
|
||||
/**
|
||||
* @param {T} data
|
||||
* @returns {void}
|
||||
*/
|
||||
push (data) {
|
||||
this.list[this.top] = data
|
||||
this.top = (this.top + 1) & kMask
|
||||
}
|
||||
|
||||
shift() {
|
||||
const nextItem = this.list[this.bottom];
|
||||
if (nextItem === undefined)
|
||||
return null;
|
||||
this.list[this.bottom] = undefined;
|
||||
this.bottom = (this.bottom + 1) & kMask;
|
||||
return nextItem;
|
||||
/**
|
||||
* @returns {T|null}
|
||||
*/
|
||||
shift () {
|
||||
const nextItem = this.list[this.bottom]
|
||||
if (nextItem === undefined) { return null }
|
||||
this.list[this.bottom] = undefined
|
||||
this.bottom = (this.bottom + 1) & kMask
|
||||
return nextItem
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @template T
|
||||
*/
|
||||
module.exports = class FixedQueue {
|
||||
constructor() {
|
||||
this.head = this.tail = new FixedCircularBuffer();
|
||||
constructor () {
|
||||
/**
|
||||
* @type {FixedCircularBuffer<T>}
|
||||
*/
|
||||
this.head = this.tail = new FixedCircularBuffer()
|
||||
}
|
||||
|
||||
isEmpty() {
|
||||
return this.head.isEmpty();
|
||||
/**
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isEmpty () {
|
||||
return this.head.isEmpty()
|
||||
}
|
||||
|
||||
push(data) {
|
||||
/**
|
||||
* @param {T} data
|
||||
*/
|
||||
push (data) {
|
||||
if (this.head.isFull()) {
|
||||
// Head is full: Creates a new queue, sets the old queue's `.next` to it,
|
||||
// and sets it as the new main queue.
|
||||
this.head = this.head.next = new FixedCircularBuffer();
|
||||
this.head = this.head.next = new FixedCircularBuffer()
|
||||
}
|
||||
this.head.push(data);
|
||||
this.head.push(data)
|
||||
}
|
||||
|
||||
shift() {
|
||||
const tail = this.tail;
|
||||
const next = tail.shift();
|
||||
/**
|
||||
* @returns {T|null}
|
||||
*/
|
||||
shift () {
|
||||
const tail = this.tail
|
||||
const next = tail.shift()
|
||||
if (tail.isEmpty() && tail.next !== null) {
|
||||
// If there is another queue, it forms the new tail.
|
||||
this.tail = tail.next;
|
||||
this.tail = tail.next
|
||||
tail.next = null
|
||||
}
|
||||
return next;
|
||||
return next
|
||||
}
|
||||
};
|
||||
}
|
12
node_modules/undici/lib/pool-base.js → node_modules/undici/lib/dispatcher/pool-base.js
generated
vendored
12
node_modules/undici/lib/pool-base.js → node_modules/undici/lib/dispatcher/pool-base.js
generated
vendored
@ -1,8 +1,8 @@
|
||||
'use strict'
|
||||
|
||||
const DispatcherBase = require('./dispatcher-base')
|
||||
const FixedQueue = require('./node/fixed-queue')
|
||||
const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = require('./core/symbols')
|
||||
const FixedQueue = require('./fixed-queue')
|
||||
const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = require('../core/symbols')
|
||||
const PoolStats = require('./pool-stats')
|
||||
|
||||
const kClients = Symbol('clients')
|
||||
@ -113,9 +113,9 @@ class PoolBase extends DispatcherBase {
|
||||
|
||||
async [kClose] () {
|
||||
if (this[kQueue].isEmpty()) {
|
||||
return Promise.all(this[kClients].map(c => c.close()))
|
||||
await Promise.all(this[kClients].map(c => c.close()))
|
||||
} else {
|
||||
return new Promise((resolve) => {
|
||||
await new Promise((resolve) => {
|
||||
this[kClosedResolve] = resolve
|
||||
})
|
||||
}
|
||||
@ -130,7 +130,7 @@ class PoolBase extends DispatcherBase {
|
||||
item.handler.onError(err)
|
||||
}
|
||||
|
||||
return Promise.all(this[kClients].map(c => c.destroy(err)))
|
||||
await Promise.all(this[kClients].map(c => c.destroy(err)))
|
||||
}
|
||||
|
||||
[kDispatch] (opts, handler) {
|
||||
@ -158,7 +158,7 @@ class PoolBase extends DispatcherBase {
|
||||
this[kClients].push(client)
|
||||
|
||||
if (this[kNeedDrain]) {
|
||||
process.nextTick(() => {
|
||||
queueMicrotask(() => {
|
||||
if (this[kNeedDrain]) {
|
||||
this[kOnDrain](client[kUrl], [this, client])
|
||||
}
|
@ -1,4 +1,6 @@
|
||||
const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = require('./core/symbols')
|
||||
'use strict'
|
||||
|
||||
const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = require('../core/symbols')
|
||||
const kPool = Symbol('pool')
|
||||
|
||||
class PoolStats {
|
44
node_modules/undici/lib/pool.js → node_modules/undici/lib/dispatcher/pool.js
generated
vendored
44
node_modules/undici/lib/pool.js → node_modules/undici/lib/dispatcher/pool.js
generated
vendored
@ -10,10 +10,10 @@ const {
|
||||
const Client = require('./client')
|
||||
const {
|
||||
InvalidArgumentError
|
||||
} = require('./core/errors')
|
||||
const util = require('./core/util')
|
||||
const { kUrl, kInterceptors } = require('./core/symbols')
|
||||
const buildConnector = require('./core/connect')
|
||||
} = require('../core/errors')
|
||||
const util = require('../core/util')
|
||||
const { kUrl } = require('../core/symbols')
|
||||
const buildConnector = require('../core/connect')
|
||||
|
||||
const kOptions = Symbol('options')
|
||||
const kConnections = Symbol('connections')
|
||||
@ -37,8 +37,6 @@ class Pool extends PoolBase {
|
||||
allowH2,
|
||||
...options
|
||||
} = {}) {
|
||||
super()
|
||||
|
||||
if (connections != null && (!Number.isFinite(connections) || connections < 0)) {
|
||||
throw new InvalidArgumentError('invalid connections')
|
||||
}
|
||||
@ -51,6 +49,8 @@ class Pool extends PoolBase {
|
||||
throw new InvalidArgumentError('connect must be a function or an object')
|
||||
}
|
||||
|
||||
super()
|
||||
|
||||
if (typeof connect !== 'function') {
|
||||
connect = buildConnector({
|
||||
...tls,
|
||||
@ -58,14 +58,11 @@ class Pool extends PoolBase {
|
||||
allowH2,
|
||||
socketPath,
|
||||
timeout: connectTimeout,
|
||||
...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
|
||||
...(typeof autoSelectFamily === 'boolean' ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
|
||||
...connect
|
||||
})
|
||||
}
|
||||
|
||||
this[kInterceptors] = options.interceptors && options.interceptors.Pool && Array.isArray(options.interceptors.Pool)
|
||||
? options.interceptors.Pool
|
||||
: []
|
||||
this[kConnections] = connections || null
|
||||
this[kUrl] = util.parseOrigin(origin)
|
||||
this[kOptions] = { ...util.deepClone(options), connect, allowH2 }
|
||||
@ -73,21 +70,34 @@ class Pool extends PoolBase {
|
||||
? { ...options.interceptors }
|
||||
: undefined
|
||||
this[kFactory] = factory
|
||||
|
||||
this.on('connectionError', (origin, targets, error) => {
|
||||
// If a connection error occurs, we remove the client from the pool,
|
||||
// and emit a connectionError event. They will not be re-used.
|
||||
// Fixes https://github.com/nodejs/undici/issues/3895
|
||||
for (const target of targets) {
|
||||
// Do not use kRemoveClient here, as it will close the client,
|
||||
// but the client cannot be closed in this state.
|
||||
const idx = this[kClients].indexOf(target)
|
||||
if (idx !== -1) {
|
||||
this[kClients].splice(idx, 1)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
[kGetDispatcher] () {
|
||||
let dispatcher = this[kClients].find(dispatcher => !dispatcher[kNeedDrain])
|
||||
|
||||
if (dispatcher) {
|
||||
return dispatcher
|
||||
for (const client of this[kClients]) {
|
||||
if (!client[kNeedDrain]) {
|
||||
return client
|
||||
}
|
||||
}
|
||||
|
||||
if (!this[kConnections] || this[kClients].length < this[kConnections]) {
|
||||
dispatcher = this[kFactory](this[kUrl], this[kOptions])
|
||||
const dispatcher = this[kFactory](this[kUrl], this[kOptions])
|
||||
this[kAddClient](dispatcher)
|
||||
return dispatcher
|
||||
}
|
||||
|
||||
return dispatcher
|
||||
}
|
||||
}
|
||||
|
@ -1,12 +1,12 @@
|
||||
'use strict'
|
||||
|
||||
const { kProxy, kClose, kDestroy, kInterceptors } = require('./core/symbols')
|
||||
const { URL } = require('url')
|
||||
const { kProxy, kClose, kDestroy } = require('../core/symbols')
|
||||
const { URL } = require('node:url')
|
||||
const Agent = require('./agent')
|
||||
const Pool = require('./pool')
|
||||
const DispatcherBase = require('./dispatcher-base')
|
||||
const { InvalidArgumentError, RequestAbortedError } = require('./core/errors')
|
||||
const buildConnector = require('./core/connect')
|
||||
const { InvalidArgumentError, RequestAbortedError, SecureProxyConnectionError } = require('../core/errors')
|
||||
const buildConnector = require('../core/connect')
|
||||
|
||||
const kAgent = Symbol('proxy agent')
|
||||
const kClient = Symbol('proxy client')
|
||||
@ -19,55 +19,33 @@ function defaultProtocolPort (protocol) {
|
||||
return protocol === 'https:' ? 443 : 80
|
||||
}
|
||||
|
||||
function buildProxyOptions (opts) {
|
||||
if (typeof opts === 'string') {
|
||||
opts = { uri: opts }
|
||||
}
|
||||
|
||||
if (!opts || !opts.uri) {
|
||||
throw new InvalidArgumentError('Proxy opts.uri is mandatory')
|
||||
}
|
||||
|
||||
return {
|
||||
uri: opts.uri,
|
||||
protocol: opts.protocol || 'https'
|
||||
}
|
||||
}
|
||||
|
||||
function defaultFactory (origin, opts) {
|
||||
return new Pool(origin, opts)
|
||||
}
|
||||
|
||||
const noop = () => {}
|
||||
|
||||
class ProxyAgent extends DispatcherBase {
|
||||
constructor (opts) {
|
||||
super(opts)
|
||||
this[kProxy] = buildProxyOptions(opts)
|
||||
this[kAgent] = new Agent(opts)
|
||||
this[kInterceptors] = opts.interceptors && opts.interceptors.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent)
|
||||
? opts.interceptors.ProxyAgent
|
||||
: []
|
||||
|
||||
if (typeof opts === 'string') {
|
||||
opts = { uri: opts }
|
||||
}
|
||||
|
||||
if (!opts || !opts.uri) {
|
||||
throw new InvalidArgumentError('Proxy opts.uri is mandatory')
|
||||
if (!opts || (typeof opts === 'object' && !(opts instanceof URL) && !opts.uri)) {
|
||||
throw new InvalidArgumentError('Proxy uri is mandatory')
|
||||
}
|
||||
|
||||
const { clientFactory = defaultFactory } = opts
|
||||
|
||||
if (typeof clientFactory !== 'function') {
|
||||
throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.')
|
||||
}
|
||||
|
||||
super()
|
||||
|
||||
const url = this.#getUrl(opts)
|
||||
const { href, origin, port, protocol, username, password, hostname: proxyHostname } = url
|
||||
|
||||
this[kProxy] = { uri: href, protocol }
|
||||
this[kRequestTls] = opts.requestTls
|
||||
this[kProxyTls] = opts.proxyTls
|
||||
this[kProxyHeaders] = opts.headers || {}
|
||||
|
||||
const resolvedUrl = new URL(opts.uri)
|
||||
const { origin, port, host, username, password } = resolvedUrl
|
||||
|
||||
if (opts.auth && opts.token) {
|
||||
throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token')
|
||||
} else if (opts.auth) {
|
||||
@ -81,27 +59,28 @@ class ProxyAgent extends DispatcherBase {
|
||||
|
||||
const connect = buildConnector({ ...opts.proxyTls })
|
||||
this[kConnectEndpoint] = buildConnector({ ...opts.requestTls })
|
||||
this[kClient] = clientFactory(resolvedUrl, { connect })
|
||||
this[kClient] = clientFactory(url, { connect })
|
||||
this[kAgent] = new Agent({
|
||||
...opts,
|
||||
connect: async (opts, callback) => {
|
||||
let requestedHost = opts.host
|
||||
let requestedPath = opts.host
|
||||
if (!opts.port) {
|
||||
requestedHost += `:${defaultProtocolPort(opts.protocol)}`
|
||||
requestedPath += `:${defaultProtocolPort(opts.protocol)}`
|
||||
}
|
||||
try {
|
||||
const { socket, statusCode } = await this[kClient].connect({
|
||||
origin,
|
||||
port,
|
||||
path: requestedHost,
|
||||
path: requestedPath,
|
||||
signal: opts.signal,
|
||||
headers: {
|
||||
...this[kProxyHeaders],
|
||||
host
|
||||
}
|
||||
host: opts.host
|
||||
},
|
||||
servername: this[kProxyTls]?.servername || proxyHostname
|
||||
})
|
||||
if (statusCode !== 200) {
|
||||
socket.on('error', () => {}).destroy()
|
||||
socket.on('error', noop).destroy()
|
||||
callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`))
|
||||
}
|
||||
if (opts.protocol !== 'https:') {
|
||||
@ -116,28 +95,49 @@ class ProxyAgent extends DispatcherBase {
|
||||
}
|
||||
this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback)
|
||||
} catch (err) {
|
||||
callback(err)
|
||||
if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {
|
||||
// Throw a custom error to avoid loop in client.js#connect
|
||||
callback(new SecureProxyConnectionError(err))
|
||||
} else {
|
||||
callback(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
dispatch (opts, handler) {
|
||||
const { host } = new URL(opts.origin)
|
||||
const headers = buildHeaders(opts.headers)
|
||||
throwIfProxyAuthIsSent(headers)
|
||||
|
||||
if (headers && !('host' in headers) && !('Host' in headers)) {
|
||||
const { host } = new URL(opts.origin)
|
||||
headers.host = host
|
||||
}
|
||||
|
||||
return this[kAgent].dispatch(
|
||||
{
|
||||
...opts,
|
||||
headers: {
|
||||
...headers,
|
||||
host
|
||||
}
|
||||
headers
|
||||
},
|
||||
handler
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../types/proxy-agent').ProxyAgent.Options | string | URL} opts
|
||||
* @returns {URL}
|
||||
*/
|
||||
#getUrl (opts) {
|
||||
if (typeof opts === 'string') {
|
||||
return new URL(opts)
|
||||
} else if (opts instanceof URL) {
|
||||
return opts
|
||||
} else {
|
||||
return new URL(opts.uri)
|
||||
}
|
||||
}
|
||||
|
||||
async [kClose] () {
|
||||
await this[kAgent].close()
|
||||
await this[kClient].close()
|
35
node_modules/undici/lib/dispatcher/retry-agent.js
generated
vendored
Normal file
35
node_modules/undici/lib/dispatcher/retry-agent.js
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
'use strict'
|
||||
|
||||
const Dispatcher = require('./dispatcher')
|
||||
const RetryHandler = require('../handler/retry-handler')
|
||||
|
||||
class RetryAgent extends Dispatcher {
|
||||
#agent = null
|
||||
#options = null
|
||||
constructor (agent, options = {}) {
|
||||
super(options)
|
||||
this.#agent = agent
|
||||
this.#options = options
|
||||
}
|
||||
|
||||
dispatch (opts, handler) {
|
||||
const retry = new RetryHandler({
|
||||
...opts,
|
||||
retryOptions: this.#options
|
||||
}, {
|
||||
dispatch: this.#agent.dispatch.bind(this.#agent),
|
||||
handler
|
||||
})
|
||||
return this.#agent.dispatch(opts, retry)
|
||||
}
|
||||
|
||||
close () {
|
||||
return this.#agent.close()
|
||||
}
|
||||
|
||||
destroy () {
|
||||
return this.#agent.destroy()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RetryAgent
|
151
node_modules/undici/lib/fetch/constants.js
generated
vendored
151
node_modules/undici/lib/fetch/constants.js
generated
vendored
@ -1,151 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const { MessageChannel, receiveMessageOnPort } = require('worker_threads')
|
||||
|
||||
const corsSafeListedMethods = ['GET', 'HEAD', 'POST']
|
||||
const corsSafeListedMethodsSet = new Set(corsSafeListedMethods)
|
||||
|
||||
const nullBodyStatus = [101, 204, 205, 304]
|
||||
|
||||
const redirectStatus = [301, 302, 303, 307, 308]
|
||||
const redirectStatusSet = new Set(redirectStatus)
|
||||
|
||||
// https://fetch.spec.whatwg.org/#block-bad-port
|
||||
const badPorts = [
|
||||
'1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79',
|
||||
'87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137',
|
||||
'139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532',
|
||||
'540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723',
|
||||
'2049', '3659', '4045', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6697',
|
||||
'10080'
|
||||
]
|
||||
|
||||
const badPortsSet = new Set(badPorts)
|
||||
|
||||
// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies
|
||||
const referrerPolicy = [
|
||||
'',
|
||||
'no-referrer',
|
||||
'no-referrer-when-downgrade',
|
||||
'same-origin',
|
||||
'origin',
|
||||
'strict-origin',
|
||||
'origin-when-cross-origin',
|
||||
'strict-origin-when-cross-origin',
|
||||
'unsafe-url'
|
||||
]
|
||||
const referrerPolicySet = new Set(referrerPolicy)
|
||||
|
||||
const requestRedirect = ['follow', 'manual', 'error']
|
||||
|
||||
const safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE']
|
||||
const safeMethodsSet = new Set(safeMethods)
|
||||
|
||||
const requestMode = ['navigate', 'same-origin', 'no-cors', 'cors']
|
||||
|
||||
const requestCredentials = ['omit', 'same-origin', 'include']
|
||||
|
||||
const requestCache = [
|
||||
'default',
|
||||
'no-store',
|
||||
'reload',
|
||||
'no-cache',
|
||||
'force-cache',
|
||||
'only-if-cached'
|
||||
]
|
||||
|
||||
// https://fetch.spec.whatwg.org/#request-body-header-name
|
||||
const requestBodyHeader = [
|
||||
'content-encoding',
|
||||
'content-language',
|
||||
'content-location',
|
||||
'content-type',
|
||||
// See https://github.com/nodejs/undici/issues/2021
|
||||
// 'Content-Length' is a forbidden header name, which is typically
|
||||
// removed in the Headers implementation. However, undici doesn't
|
||||
// filter out headers, so we add it here.
|
||||
'content-length'
|
||||
]
|
||||
|
||||
// https://fetch.spec.whatwg.org/#enumdef-requestduplex
|
||||
const requestDuplex = [
|
||||
'half'
|
||||
]
|
||||
|
||||
// http://fetch.spec.whatwg.org/#forbidden-method
|
||||
const forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK']
|
||||
const forbiddenMethodsSet = new Set(forbiddenMethods)
|
||||
|
||||
const subresource = [
|
||||
'audio',
|
||||
'audioworklet',
|
||||
'font',
|
||||
'image',
|
||||
'manifest',
|
||||
'paintworklet',
|
||||
'script',
|
||||
'style',
|
||||
'track',
|
||||
'video',
|
||||
'xslt',
|
||||
''
|
||||
]
|
||||
const subresourceSet = new Set(subresource)
|
||||
|
||||
/** @type {globalThis['DOMException']} */
|
||||
const DOMException = globalThis.DOMException ?? (() => {
|
||||
// DOMException was only made a global in Node v17.0.0,
|
||||
// but fetch supports >= v16.8.
|
||||
try {
|
||||
atob('~')
|
||||
} catch (err) {
|
||||
return Object.getPrototypeOf(err).constructor
|
||||
}
|
||||
})()
|
||||
|
||||
let channel
|
||||
|
||||
/** @type {globalThis['structuredClone']} */
|
||||
const structuredClone =
|
||||
globalThis.structuredClone ??
|
||||
// https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js
|
||||
// structuredClone was added in v17.0.0, but fetch supports v16.8
|
||||
function structuredClone (value, options = undefined) {
|
||||
if (arguments.length === 0) {
|
||||
throw new TypeError('missing argument')
|
||||
}
|
||||
|
||||
if (!channel) {
|
||||
channel = new MessageChannel()
|
||||
}
|
||||
channel.port1.unref()
|
||||
channel.port2.unref()
|
||||
channel.port1.postMessage(value, options?.transfer)
|
||||
return receiveMessageOnPort(channel.port2).message
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
DOMException,
|
||||
structuredClone,
|
||||
subresource,
|
||||
forbiddenMethods,
|
||||
requestBodyHeader,
|
||||
referrerPolicy,
|
||||
requestRedirect,
|
||||
requestMode,
|
||||
requestCredentials,
|
||||
requestCache,
|
||||
redirectStatus,
|
||||
corsSafeListedMethods,
|
||||
nullBodyStatus,
|
||||
safeMethods,
|
||||
badPorts,
|
||||
requestDuplex,
|
||||
subresourceSet,
|
||||
badPortsSet,
|
||||
redirectStatusSet,
|
||||
corsSafeListedMethodsSet,
|
||||
safeMethodsSet,
|
||||
forbiddenMethodsSet,
|
||||
referrerPolicySet
|
||||
}
|
344
node_modules/undici/lib/fetch/file.js
generated
vendored
344
node_modules/undici/lib/fetch/file.js
generated
vendored
@ -1,344 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const { Blob, File: NativeFile } = require('buffer')
|
||||
const { types } = require('util')
|
||||
const { kState } = require('./symbols')
|
||||
const { isBlobLike } = require('./util')
|
||||
const { webidl } = require('./webidl')
|
||||
const { parseMIMEType, serializeAMimeType } = require('./dataURL')
|
||||
const { kEnumerableProperty } = require('../core/util')
|
||||
const encoder = new TextEncoder()
|
||||
|
||||
class File extends Blob {
|
||||
constructor (fileBits, fileName, options = {}) {
|
||||
// The File constructor is invoked with two or three parameters, depending
|
||||
// on whether the optional dictionary parameter is used. When the File()
|
||||
// constructor is invoked, user agents must run the following steps:
|
||||
webidl.argumentLengthCheck(arguments, 2, { header: 'File constructor' })
|
||||
|
||||
fileBits = webidl.converters['sequence<BlobPart>'](fileBits)
|
||||
fileName = webidl.converters.USVString(fileName)
|
||||
options = webidl.converters.FilePropertyBag(options)
|
||||
|
||||
// 1. Let bytes be the result of processing blob parts given fileBits and
|
||||
// options.
|
||||
// Note: Blob handles this for us
|
||||
|
||||
// 2. Let n be the fileName argument to the constructor.
|
||||
const n = fileName
|
||||
|
||||
// 3. Process FilePropertyBag dictionary argument by running the following
|
||||
// substeps:
|
||||
|
||||
// 1. If the type member is provided and is not the empty string, let t
|
||||
// be set to the type dictionary member. If t contains any characters
|
||||
// outside the range U+0020 to U+007E, then set t to the empty string
|
||||
// and return from these substeps.
|
||||
// 2. Convert every character in t to ASCII lowercase.
|
||||
let t = options.type
|
||||
let d
|
||||
|
||||
// eslint-disable-next-line no-labels
|
||||
substep: {
|
||||
if (t) {
|
||||
t = parseMIMEType(t)
|
||||
|
||||
if (t === 'failure') {
|
||||
t = ''
|
||||
// eslint-disable-next-line no-labels
|
||||
break substep
|
||||
}
|
||||
|
||||
t = serializeAMimeType(t).toLowerCase()
|
||||
}
|
||||
|
||||
// 3. If the lastModified member is provided, let d be set to the
|
||||
// lastModified dictionary member. If it is not provided, set d to the
|
||||
// current date and time represented as the number of milliseconds since
|
||||
// the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).
|
||||
d = options.lastModified
|
||||
}
|
||||
|
||||
// 4. Return a new File object F such that:
|
||||
// F refers to the bytes byte sequence.
|
||||
// F.size is set to the number of total bytes in bytes.
|
||||
// F.name is set to n.
|
||||
// F.type is set to t.
|
||||
// F.lastModified is set to d.
|
||||
|
||||
super(processBlobParts(fileBits, options), { type: t })
|
||||
this[kState] = {
|
||||
name: n,
|
||||
lastModified: d,
|
||||
type: t
|
||||
}
|
||||
}
|
||||
|
||||
get name () {
|
||||
webidl.brandCheck(this, File)
|
||||
|
||||
return this[kState].name
|
||||
}
|
||||
|
||||
get lastModified () {
|
||||
webidl.brandCheck(this, File)
|
||||
|
||||
return this[kState].lastModified
|
||||
}
|
||||
|
||||
get type () {
|
||||
webidl.brandCheck(this, File)
|
||||
|
||||
return this[kState].type
|
||||
}
|
||||
}
|
||||
|
||||
class FileLike {
|
||||
constructor (blobLike, fileName, options = {}) {
|
||||
// TODO: argument idl type check
|
||||
|
||||
// The File constructor is invoked with two or three parameters, depending
|
||||
// on whether the optional dictionary parameter is used. When the File()
|
||||
// constructor is invoked, user agents must run the following steps:
|
||||
|
||||
// 1. Let bytes be the result of processing blob parts given fileBits and
|
||||
// options.
|
||||
|
||||
// 2. Let n be the fileName argument to the constructor.
|
||||
const n = fileName
|
||||
|
||||
// 3. Process FilePropertyBag dictionary argument by running the following
|
||||
// substeps:
|
||||
|
||||
// 1. If the type member is provided and is not the empty string, let t
|
||||
// be set to the type dictionary member. If t contains any characters
|
||||
// outside the range U+0020 to U+007E, then set t to the empty string
|
||||
// and return from these substeps.
|
||||
// TODO
|
||||
const t = options.type
|
||||
|
||||
// 2. Convert every character in t to ASCII lowercase.
|
||||
// TODO
|
||||
|
||||
// 3. If the lastModified member is provided, let d be set to the
|
||||
// lastModified dictionary member. If it is not provided, set d to the
|
||||
// current date and time represented as the number of milliseconds since
|
||||
// the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).
|
||||
const d = options.lastModified ?? Date.now()
|
||||
|
||||
// 4. Return a new File object F such that:
|
||||
// F refers to the bytes byte sequence.
|
||||
// F.size is set to the number of total bytes in bytes.
|
||||
// F.name is set to n.
|
||||
// F.type is set to t.
|
||||
// F.lastModified is set to d.
|
||||
|
||||
this[kState] = {
|
||||
blobLike,
|
||||
name: n,
|
||||
type: t,
|
||||
lastModified: d
|
||||
}
|
||||
}
|
||||
|
||||
stream (...args) {
|
||||
webidl.brandCheck(this, FileLike)
|
||||
|
||||
return this[kState].blobLike.stream(...args)
|
||||
}
|
||||
|
||||
arrayBuffer (...args) {
|
||||
webidl.brandCheck(this, FileLike)
|
||||
|
||||
return this[kState].blobLike.arrayBuffer(...args)
|
||||
}
|
||||
|
||||
slice (...args) {
|
||||
webidl.brandCheck(this, FileLike)
|
||||
|
||||
return this[kState].blobLike.slice(...args)
|
||||
}
|
||||
|
||||
text (...args) {
|
||||
webidl.brandCheck(this, FileLike)
|
||||
|
||||
return this[kState].blobLike.text(...args)
|
||||
}
|
||||
|
||||
get size () {
|
||||
webidl.brandCheck(this, FileLike)
|
||||
|
||||
return this[kState].blobLike.size
|
||||
}
|
||||
|
||||
get type () {
|
||||
webidl.brandCheck(this, FileLike)
|
||||
|
||||
return this[kState].blobLike.type
|
||||
}
|
||||
|
||||
get name () {
|
||||
webidl.brandCheck(this, FileLike)
|
||||
|
||||
return this[kState].name
|
||||
}
|
||||
|
||||
get lastModified () {
|
||||
webidl.brandCheck(this, FileLike)
|
||||
|
||||
return this[kState].lastModified
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag] () {
|
||||
return 'File'
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperties(File.prototype, {
|
||||
[Symbol.toStringTag]: {
|
||||
value: 'File',
|
||||
configurable: true
|
||||
},
|
||||
name: kEnumerableProperty,
|
||||
lastModified: kEnumerableProperty
|
||||
})
|
||||
|
||||
webidl.converters.Blob = webidl.interfaceConverter(Blob)
|
||||
|
||||
webidl.converters.BlobPart = function (V, opts) {
|
||||
if (webidl.util.Type(V) === 'Object') {
|
||||
if (isBlobLike(V)) {
|
||||
return webidl.converters.Blob(V, { strict: false })
|
||||
}
|
||||
|
||||
if (
|
||||
ArrayBuffer.isView(V) ||
|
||||
types.isAnyArrayBuffer(V)
|
||||
) {
|
||||
return webidl.converters.BufferSource(V, opts)
|
||||
}
|
||||
}
|
||||
|
||||
return webidl.converters.USVString(V, opts)
|
||||
}
|
||||
|
||||
webidl.converters['sequence<BlobPart>'] = webidl.sequenceConverter(
|
||||
webidl.converters.BlobPart
|
||||
)
|
||||
|
||||
// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag
|
||||
webidl.converters.FilePropertyBag = webidl.dictionaryConverter([
|
||||
{
|
||||
key: 'lastModified',
|
||||
converter: webidl.converters['long long'],
|
||||
get defaultValue () {
|
||||
return Date.now()
|
||||
}
|
||||
},
|
||||
{
|
||||
key: 'type',
|
||||
converter: webidl.converters.DOMString,
|
||||
defaultValue: ''
|
||||
},
|
||||
{
|
||||
key: 'endings',
|
||||
converter: (value) => {
|
||||
value = webidl.converters.DOMString(value)
|
||||
value = value.toLowerCase()
|
||||
|
||||
if (value !== 'native') {
|
||||
value = 'transparent'
|
||||
}
|
||||
|
||||
return value
|
||||
},
|
||||
defaultValue: 'transparent'
|
||||
}
|
||||
])
|
||||
|
||||
/**
|
||||
* @see https://www.w3.org/TR/FileAPI/#process-blob-parts
|
||||
* @param {(NodeJS.TypedArray|Blob|string)[]} parts
|
||||
* @param {{ type: string, endings: string }} options
|
||||
*/
|
||||
function processBlobParts (parts, options) {
|
||||
// 1. Let bytes be an empty sequence of bytes.
|
||||
/** @type {NodeJS.TypedArray[]} */
|
||||
const bytes = []
|
||||
|
||||
// 2. For each element in parts:
|
||||
for (const element of parts) {
|
||||
// 1. If element is a USVString, run the following substeps:
|
||||
if (typeof element === 'string') {
|
||||
// 1. Let s be element.
|
||||
let s = element
|
||||
|
||||
// 2. If the endings member of options is "native", set s
|
||||
// to the result of converting line endings to native
|
||||
// of element.
|
||||
if (options.endings === 'native') {
|
||||
s = convertLineEndingsNative(s)
|
||||
}
|
||||
|
||||
// 3. Append the result of UTF-8 encoding s to bytes.
|
||||
bytes.push(encoder.encode(s))
|
||||
} else if (
|
||||
types.isAnyArrayBuffer(element) ||
|
||||
types.isTypedArray(element)
|
||||
) {
|
||||
// 2. If element is a BufferSource, get a copy of the
|
||||
// bytes held by the buffer source, and append those
|
||||
// bytes to bytes.
|
||||
if (!element.buffer) { // ArrayBuffer
|
||||
bytes.push(new Uint8Array(element))
|
||||
} else {
|
||||
bytes.push(
|
||||
new Uint8Array(element.buffer, element.byteOffset, element.byteLength)
|
||||
)
|
||||
}
|
||||
} else if (isBlobLike(element)) {
|
||||
// 3. If element is a Blob, append the bytes it represents
|
||||
// to bytes.
|
||||
bytes.push(element)
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Return bytes.
|
||||
return bytes
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://www.w3.org/TR/FileAPI/#convert-line-endings-to-native
|
||||
* @param {string} s
|
||||
*/
|
||||
function convertLineEndingsNative (s) {
|
||||
// 1. Let native line ending be be the code point U+000A LF.
|
||||
let nativeLineEnding = '\n'
|
||||
|
||||
// 2. If the underlying platform’s conventions are to
|
||||
// represent newlines as a carriage return and line feed
|
||||
// sequence, set native line ending to the code point
|
||||
// U+000D CR followed by the code point U+000A LF.
|
||||
if (process.platform === 'win32') {
|
||||
nativeLineEnding = '\r\n'
|
||||
}
|
||||
|
||||
return s.replace(/\r?\n/g, nativeLineEnding)
|
||||
}
|
||||
|
||||
// If this function is moved to ./util.js, some tools (such as
|
||||
// rollup) will warn about circular dependencies. See:
|
||||
// https://github.com/nodejs/undici/issues/1629
|
||||
function isFileLike (object) {
|
||||
return (
|
||||
(NativeFile && object instanceof NativeFile) ||
|
||||
object instanceof File || (
|
||||
object &&
|
||||
(typeof object.stream === 'function' ||
|
||||
typeof object.arrayBuffer === 'function') &&
|
||||
object[Symbol.toStringTag] === 'File'
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
module.exports = { File, FileLike, isFileLike }
|
589
node_modules/undici/lib/fetch/headers.js
generated
vendored
589
node_modules/undici/lib/fetch/headers.js
generated
vendored
@ -1,589 +0,0 @@
|
||||
// https://github.com/Ethan-Arrowood/undici-fetch
|
||||
|
||||
'use strict'
|
||||
|
||||
const { kHeadersList, kConstruct } = require('../core/symbols')
|
||||
const { kGuard } = require('./symbols')
|
||||
const { kEnumerableProperty } = require('../core/util')
|
||||
const {
|
||||
makeIterator,
|
||||
isValidHeaderName,
|
||||
isValidHeaderValue
|
||||
} = require('./util')
|
||||
const { webidl } = require('./webidl')
|
||||
const assert = require('assert')
|
||||
|
||||
const kHeadersMap = Symbol('headers map')
|
||||
const kHeadersSortedMap = Symbol('headers map sorted')
|
||||
|
||||
/**
|
||||
* @param {number} code
|
||||
*/
|
||||
function isHTTPWhiteSpaceCharCode (code) {
|
||||
return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#concept-header-value-normalize
|
||||
* @param {string} potentialValue
|
||||
*/
|
||||
function headerValueNormalize (potentialValue) {
|
||||
// To normalize a byte sequence potentialValue, remove
|
||||
// any leading and trailing HTTP whitespace bytes from
|
||||
// potentialValue.
|
||||
let i = 0; let j = potentialValue.length
|
||||
|
||||
while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j
|
||||
while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i
|
||||
|
||||
return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j)
|
||||
}
|
||||
|
||||
function fill (headers, object) {
|
||||
// To fill a Headers object headers with a given object object, run these steps:
|
||||
|
||||
// 1. If object is a sequence, then for each header in object:
|
||||
// Note: webidl conversion to array has already been done.
|
||||
if (Array.isArray(object)) {
|
||||
for (let i = 0; i < object.length; ++i) {
|
||||
const header = object[i]
|
||||
// 1. If header does not contain exactly two items, then throw a TypeError.
|
||||
if (header.length !== 2) {
|
||||
throw webidl.errors.exception({
|
||||
header: 'Headers constructor',
|
||||
message: `expected name/value pair to be length 2, found ${header.length}.`
|
||||
})
|
||||
}
|
||||
|
||||
// 2. Append (header’s first item, header’s second item) to headers.
|
||||
appendHeader(headers, header[0], header[1])
|
||||
}
|
||||
} else if (typeof object === 'object' && object !== null) {
|
||||
// Note: null should throw
|
||||
|
||||
// 2. Otherwise, object is a record, then for each key → value in object,
|
||||
// append (key, value) to headers
|
||||
const keys = Object.keys(object)
|
||||
for (let i = 0; i < keys.length; ++i) {
|
||||
appendHeader(headers, keys[i], object[keys[i]])
|
||||
}
|
||||
} else {
|
||||
throw webidl.errors.conversionFailed({
|
||||
prefix: 'Headers constructor',
|
||||
argument: 'Argument 1',
|
||||
types: ['sequence<sequence<ByteString>>', 'record<ByteString, ByteString>']
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#concept-headers-append
|
||||
*/
|
||||
function appendHeader (headers, name, value) {
|
||||
// 1. Normalize value.
|
||||
value = headerValueNormalize(value)
|
||||
|
||||
// 2. If name is not a header name or value is not a
|
||||
// header value, then throw a TypeError.
|
||||
if (!isValidHeaderName(name)) {
|
||||
throw webidl.errors.invalidArgument({
|
||||
prefix: 'Headers.append',
|
||||
value: name,
|
||||
type: 'header name'
|
||||
})
|
||||
} else if (!isValidHeaderValue(value)) {
|
||||
throw webidl.errors.invalidArgument({
|
||||
prefix: 'Headers.append',
|
||||
value,
|
||||
type: 'header value'
|
||||
})
|
||||
}
|
||||
|
||||
// 3. If headers’s guard is "immutable", then throw a TypeError.
|
||||
// 4. Otherwise, if headers’s guard is "request" and name is a
|
||||
// forbidden header name, return.
|
||||
// Note: undici does not implement forbidden header names
|
||||
if (headers[kGuard] === 'immutable') {
|
||||
throw new TypeError('immutable')
|
||||
} else if (headers[kGuard] === 'request-no-cors') {
|
||||
// 5. Otherwise, if headers’s guard is "request-no-cors":
|
||||
// TODO
|
||||
}
|
||||
|
||||
// 6. Otherwise, if headers’s guard is "response" and name is a
|
||||
// forbidden response-header name, return.
|
||||
|
||||
// 7. Append (name, value) to headers’s header list.
|
||||
return headers[kHeadersList].append(name, value)
|
||||
|
||||
// 8. If headers’s guard is "request-no-cors", then remove
|
||||
// privileged no-CORS request headers from headers
|
||||
}
|
||||
|
||||
class HeadersList {
|
||||
/** @type {[string, string][]|null} */
|
||||
cookies = null
|
||||
|
||||
constructor (init) {
|
||||
if (init instanceof HeadersList) {
|
||||
this[kHeadersMap] = new Map(init[kHeadersMap])
|
||||
this[kHeadersSortedMap] = init[kHeadersSortedMap]
|
||||
this.cookies = init.cookies === null ? null : [...init.cookies]
|
||||
} else {
|
||||
this[kHeadersMap] = new Map(init)
|
||||
this[kHeadersSortedMap] = null
|
||||
}
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#header-list-contains
|
||||
contains (name) {
|
||||
// A header list list contains a header name name if list
|
||||
// contains a header whose name is a byte-case-insensitive
|
||||
// match for name.
|
||||
name = name.toLowerCase()
|
||||
|
||||
return this[kHeadersMap].has(name)
|
||||
}
|
||||
|
||||
clear () {
|
||||
this[kHeadersMap].clear()
|
||||
this[kHeadersSortedMap] = null
|
||||
this.cookies = null
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#concept-header-list-append
|
||||
append (name, value) {
|
||||
this[kHeadersSortedMap] = null
|
||||
|
||||
// 1. If list contains name, then set name to the first such
|
||||
// header’s name.
|
||||
const lowercaseName = name.toLowerCase()
|
||||
const exists = this[kHeadersMap].get(lowercaseName)
|
||||
|
||||
// 2. Append (name, value) to list.
|
||||
if (exists) {
|
||||
const delimiter = lowercaseName === 'cookie' ? '; ' : ', '
|
||||
this[kHeadersMap].set(lowercaseName, {
|
||||
name: exists.name,
|
||||
value: `${exists.value}${delimiter}${value}`
|
||||
})
|
||||
} else {
|
||||
this[kHeadersMap].set(lowercaseName, { name, value })
|
||||
}
|
||||
|
||||
if (lowercaseName === 'set-cookie') {
|
||||
this.cookies ??= []
|
||||
this.cookies.push(value)
|
||||
}
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#concept-header-list-set
|
||||
set (name, value) {
|
||||
this[kHeadersSortedMap] = null
|
||||
const lowercaseName = name.toLowerCase()
|
||||
|
||||
if (lowercaseName === 'set-cookie') {
|
||||
this.cookies = [value]
|
||||
}
|
||||
|
||||
// 1. If list contains name, then set the value of
|
||||
// the first such header to value and remove the
|
||||
// others.
|
||||
// 2. Otherwise, append header (name, value) to list.
|
||||
this[kHeadersMap].set(lowercaseName, { name, value })
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#concept-header-list-delete
|
||||
delete (name) {
|
||||
this[kHeadersSortedMap] = null
|
||||
|
||||
name = name.toLowerCase()
|
||||
|
||||
if (name === 'set-cookie') {
|
||||
this.cookies = null
|
||||
}
|
||||
|
||||
this[kHeadersMap].delete(name)
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#concept-header-list-get
|
||||
get (name) {
|
||||
const value = this[kHeadersMap].get(name.toLowerCase())
|
||||
|
||||
// 1. If list does not contain name, then return null.
|
||||
// 2. Return the values of all headers in list whose name
|
||||
// is a byte-case-insensitive match for name,
|
||||
// separated from each other by 0x2C 0x20, in order.
|
||||
return value === undefined ? null : value.value
|
||||
}
|
||||
|
||||
* [Symbol.iterator] () {
|
||||
// use the lowercased name
|
||||
for (const [name, { value }] of this[kHeadersMap]) {
|
||||
yield [name, value]
|
||||
}
|
||||
}
|
||||
|
||||
get entries () {
|
||||
const headers = {}
|
||||
|
||||
if (this[kHeadersMap].size) {
|
||||
for (const { name, value } of this[kHeadersMap].values()) {
|
||||
headers[name] = value
|
||||
}
|
||||
}
|
||||
|
||||
return headers
|
||||
}
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#headers-class
|
||||
class Headers {
|
||||
constructor (init = undefined) {
|
||||
if (init === kConstruct) {
|
||||
return
|
||||
}
|
||||
this[kHeadersList] = new HeadersList()
|
||||
|
||||
// The new Headers(init) constructor steps are:
|
||||
|
||||
// 1. Set this’s guard to "none".
|
||||
this[kGuard] = 'none'
|
||||
|
||||
// 2. If init is given, then fill this with init.
|
||||
if (init !== undefined) {
|
||||
init = webidl.converters.HeadersInit(init)
|
||||
fill(this, init)
|
||||
}
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-headers-append
|
||||
append (name, value) {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.append' })
|
||||
|
||||
name = webidl.converters.ByteString(name)
|
||||
value = webidl.converters.ByteString(value)
|
||||
|
||||
return appendHeader(this, name, value)
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-headers-delete
|
||||
delete (name) {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.delete' })
|
||||
|
||||
name = webidl.converters.ByteString(name)
|
||||
|
||||
// 1. If name is not a header name, then throw a TypeError.
|
||||
if (!isValidHeaderName(name)) {
|
||||
throw webidl.errors.invalidArgument({
|
||||
prefix: 'Headers.delete',
|
||||
value: name,
|
||||
type: 'header name'
|
||||
})
|
||||
}
|
||||
|
||||
// 2. If this’s guard is "immutable", then throw a TypeError.
|
||||
// 3. Otherwise, if this’s guard is "request" and name is a
|
||||
// forbidden header name, return.
|
||||
// 4. Otherwise, if this’s guard is "request-no-cors", name
|
||||
// is not a no-CORS-safelisted request-header name, and
|
||||
// name is not a privileged no-CORS request-header name,
|
||||
// return.
|
||||
// 5. Otherwise, if this’s guard is "response" and name is
|
||||
// a forbidden response-header name, return.
|
||||
// Note: undici does not implement forbidden header names
|
||||
if (this[kGuard] === 'immutable') {
|
||||
throw new TypeError('immutable')
|
||||
} else if (this[kGuard] === 'request-no-cors') {
|
||||
// TODO
|
||||
}
|
||||
|
||||
// 6. If this’s header list does not contain name, then
|
||||
// return.
|
||||
if (!this[kHeadersList].contains(name)) {
|
||||
return
|
||||
}
|
||||
|
||||
// 7. Delete name from this’s header list.
|
||||
// 8. If this’s guard is "request-no-cors", then remove
|
||||
// privileged no-CORS request headers from this.
|
||||
this[kHeadersList].delete(name)
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-headers-get
|
||||
get (name) {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.get' })
|
||||
|
||||
name = webidl.converters.ByteString(name)
|
||||
|
||||
// 1. If name is not a header name, then throw a TypeError.
|
||||
if (!isValidHeaderName(name)) {
|
||||
throw webidl.errors.invalidArgument({
|
||||
prefix: 'Headers.get',
|
||||
value: name,
|
||||
type: 'header name'
|
||||
})
|
||||
}
|
||||
|
||||
// 2. Return the result of getting name from this’s header
|
||||
// list.
|
||||
return this[kHeadersList].get(name)
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-headers-has
|
||||
has (name) {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.has' })
|
||||
|
||||
name = webidl.converters.ByteString(name)
|
||||
|
||||
// 1. If name is not a header name, then throw a TypeError.
|
||||
if (!isValidHeaderName(name)) {
|
||||
throw webidl.errors.invalidArgument({
|
||||
prefix: 'Headers.has',
|
||||
value: name,
|
||||
type: 'header name'
|
||||
})
|
||||
}
|
||||
|
||||
// 2. Return true if this’s header list contains name;
|
||||
// otherwise false.
|
||||
return this[kHeadersList].contains(name)
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-headers-set
|
||||
set (name, value) {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.set' })
|
||||
|
||||
name = webidl.converters.ByteString(name)
|
||||
value = webidl.converters.ByteString(value)
|
||||
|
||||
// 1. Normalize value.
|
||||
value = headerValueNormalize(value)
|
||||
|
||||
// 2. If name is not a header name or value is not a
|
||||
// header value, then throw a TypeError.
|
||||
if (!isValidHeaderName(name)) {
|
||||
throw webidl.errors.invalidArgument({
|
||||
prefix: 'Headers.set',
|
||||
value: name,
|
||||
type: 'header name'
|
||||
})
|
||||
} else if (!isValidHeaderValue(value)) {
|
||||
throw webidl.errors.invalidArgument({
|
||||
prefix: 'Headers.set',
|
||||
value,
|
||||
type: 'header value'
|
||||
})
|
||||
}
|
||||
|
||||
// 3. If this’s guard is "immutable", then throw a TypeError.
|
||||
// 4. Otherwise, if this’s guard is "request" and name is a
|
||||
// forbidden header name, return.
|
||||
// 5. Otherwise, if this’s guard is "request-no-cors" and
|
||||
// name/value is not a no-CORS-safelisted request-header,
|
||||
// return.
|
||||
// 6. Otherwise, if this’s guard is "response" and name is a
|
||||
// forbidden response-header name, return.
|
||||
// Note: undici does not implement forbidden header names
|
||||
if (this[kGuard] === 'immutable') {
|
||||
throw new TypeError('immutable')
|
||||
} else if (this[kGuard] === 'request-no-cors') {
|
||||
// TODO
|
||||
}
|
||||
|
||||
// 7. Set (name, value) in this’s header list.
|
||||
// 8. If this’s guard is "request-no-cors", then remove
|
||||
// privileged no-CORS request headers from this
|
||||
this[kHeadersList].set(name, value)
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#dom-headers-getsetcookie
|
||||
getSetCookie () {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
// 1. If this’s header list does not contain `Set-Cookie`, then return « ».
|
||||
// 2. Return the values of all headers in this’s header list whose name is
|
||||
// a byte-case-insensitive match for `Set-Cookie`, in order.
|
||||
|
||||
const list = this[kHeadersList].cookies
|
||||
|
||||
if (list) {
|
||||
return [...list]
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine
|
||||
get [kHeadersSortedMap] () {
|
||||
if (this[kHeadersList][kHeadersSortedMap]) {
|
||||
return this[kHeadersList][kHeadersSortedMap]
|
||||
}
|
||||
|
||||
// 1. Let headers be an empty list of headers with the key being the name
|
||||
// and value the value.
|
||||
const headers = []
|
||||
|
||||
// 2. Let names be the result of convert header names to a sorted-lowercase
|
||||
// set with all the names of the headers in list.
|
||||
const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1)
|
||||
const cookies = this[kHeadersList].cookies
|
||||
|
||||
// 3. For each name of names:
|
||||
for (let i = 0; i < names.length; ++i) {
|
||||
const [name, value] = names[i]
|
||||
// 1. If name is `set-cookie`, then:
|
||||
if (name === 'set-cookie') {
|
||||
// 1. Let values be a list of all values of headers in list whose name
|
||||
// is a byte-case-insensitive match for name, in order.
|
||||
|
||||
// 2. For each value of values:
|
||||
// 1. Append (name, value) to headers.
|
||||
for (let j = 0; j < cookies.length; ++j) {
|
||||
headers.push([name, cookies[j]])
|
||||
}
|
||||
} else {
|
||||
// 2. Otherwise:
|
||||
|
||||
// 1. Let value be the result of getting name from list.
|
||||
|
||||
// 2. Assert: value is non-null.
|
||||
assert(value !== null)
|
||||
|
||||
// 3. Append (name, value) to headers.
|
||||
headers.push([name, value])
|
||||
}
|
||||
}
|
||||
|
||||
this[kHeadersList][kHeadersSortedMap] = headers
|
||||
|
||||
// 4. Return headers.
|
||||
return headers
|
||||
}
|
||||
|
||||
keys () {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
if (this[kGuard] === 'immutable') {
|
||||
const value = this[kHeadersSortedMap]
|
||||
return makeIterator(() => value, 'Headers',
|
||||
'key')
|
||||
}
|
||||
|
||||
return makeIterator(
|
||||
() => [...this[kHeadersSortedMap].values()],
|
||||
'Headers',
|
||||
'key'
|
||||
)
|
||||
}
|
||||
|
||||
values () {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
if (this[kGuard] === 'immutable') {
|
||||
const value = this[kHeadersSortedMap]
|
||||
return makeIterator(() => value, 'Headers',
|
||||
'value')
|
||||
}
|
||||
|
||||
return makeIterator(
|
||||
() => [...this[kHeadersSortedMap].values()],
|
||||
'Headers',
|
||||
'value'
|
||||
)
|
||||
}
|
||||
|
||||
entries () {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
if (this[kGuard] === 'immutable') {
|
||||
const value = this[kHeadersSortedMap]
|
||||
return makeIterator(() => value, 'Headers',
|
||||
'key+value')
|
||||
}
|
||||
|
||||
return makeIterator(
|
||||
() => [...this[kHeadersSortedMap].values()],
|
||||
'Headers',
|
||||
'key+value'
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {(value: string, key: string, self: Headers) => void} callbackFn
|
||||
* @param {unknown} thisArg
|
||||
*/
|
||||
forEach (callbackFn, thisArg = globalThis) {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.forEach' })
|
||||
|
||||
if (typeof callbackFn !== 'function') {
|
||||
throw new TypeError(
|
||||
"Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'."
|
||||
)
|
||||
}
|
||||
|
||||
for (const [key, value] of this) {
|
||||
callbackFn.apply(thisArg, [value, key, this])
|
||||
}
|
||||
}
|
||||
|
||||
[Symbol.for('nodejs.util.inspect.custom')] () {
|
||||
webidl.brandCheck(this, Headers)
|
||||
|
||||
return this[kHeadersList]
|
||||
}
|
||||
}
|
||||
|
||||
Headers.prototype[Symbol.iterator] = Headers.prototype.entries
|
||||
|
||||
Object.defineProperties(Headers.prototype, {
|
||||
append: kEnumerableProperty,
|
||||
delete: kEnumerableProperty,
|
||||
get: kEnumerableProperty,
|
||||
has: kEnumerableProperty,
|
||||
set: kEnumerableProperty,
|
||||
getSetCookie: kEnumerableProperty,
|
||||
keys: kEnumerableProperty,
|
||||
values: kEnumerableProperty,
|
||||
entries: kEnumerableProperty,
|
||||
forEach: kEnumerableProperty,
|
||||
[Symbol.iterator]: { enumerable: false },
|
||||
[Symbol.toStringTag]: {
|
||||
value: 'Headers',
|
||||
configurable: true
|
||||
}
|
||||
})
|
||||
|
||||
webidl.converters.HeadersInit = function (V) {
|
||||
if (webidl.util.Type(V) === 'Object') {
|
||||
if (V[Symbol.iterator]) {
|
||||
return webidl.converters['sequence<sequence<ByteString>>'](V)
|
||||
}
|
||||
|
||||
return webidl.converters['record<ByteString, ByteString>'](V)
|
||||
}
|
||||
|
||||
throw webidl.errors.conversionFailed({
|
||||
prefix: 'Headers constructor',
|
||||
argument: 'Argument 1',
|
||||
types: ['sequence<sequence<ByteString>>', 'record<ByteString, ByteString>']
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fill,
|
||||
Headers,
|
||||
HeadersList
|
||||
}
|
10
node_modules/undici/lib/fetch/symbols.js
generated
vendored
10
node_modules/undici/lib/fetch/symbols.js
generated
vendored
@ -1,10 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
kUrl: Symbol('url'),
|
||||
kHeaders: Symbol('headers'),
|
||||
kSignal: Symbol('signal'),
|
||||
kState: Symbol('state'),
|
||||
kGuard: Symbol('guard'),
|
||||
kRealm: Symbol('realm')
|
||||
}
|
1144
node_modules/undici/lib/fetch/util.js
generated
vendored
1144
node_modules/undici/lib/fetch/util.js
generated
vendored
File diff suppressed because it is too large
Load Diff
290
node_modules/undici/lib/fileapi/encoding.js
generated
vendored
290
node_modules/undici/lib/fileapi/encoding.js
generated
vendored
@ -1,290 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* @see https://encoding.spec.whatwg.org/#concept-encoding-get
|
||||
* @param {string|undefined} label
|
||||
*/
|
||||
function getEncoding (label) {
|
||||
if (!label) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
// 1. Remove any leading and trailing ASCII whitespace from label.
|
||||
// 2. If label is an ASCII case-insensitive match for any of the
|
||||
// labels listed in the table below, then return the
|
||||
// corresponding encoding; otherwise return failure.
|
||||
switch (label.trim().toLowerCase()) {
|
||||
case 'unicode-1-1-utf-8':
|
||||
case 'unicode11utf8':
|
||||
case 'unicode20utf8':
|
||||
case 'utf-8':
|
||||
case 'utf8':
|
||||
case 'x-unicode20utf8':
|
||||
return 'UTF-8'
|
||||
case '866':
|
||||
case 'cp866':
|
||||
case 'csibm866':
|
||||
case 'ibm866':
|
||||
return 'IBM866'
|
||||
case 'csisolatin2':
|
||||
case 'iso-8859-2':
|
||||
case 'iso-ir-101':
|
||||
case 'iso8859-2':
|
||||
case 'iso88592':
|
||||
case 'iso_8859-2':
|
||||
case 'iso_8859-2:1987':
|
||||
case 'l2':
|
||||
case 'latin2':
|
||||
return 'ISO-8859-2'
|
||||
case 'csisolatin3':
|
||||
case 'iso-8859-3':
|
||||
case 'iso-ir-109':
|
||||
case 'iso8859-3':
|
||||
case 'iso88593':
|
||||
case 'iso_8859-3':
|
||||
case 'iso_8859-3:1988':
|
||||
case 'l3':
|
||||
case 'latin3':
|
||||
return 'ISO-8859-3'
|
||||
case 'csisolatin4':
|
||||
case 'iso-8859-4':
|
||||
case 'iso-ir-110':
|
||||
case 'iso8859-4':
|
||||
case 'iso88594':
|
||||
case 'iso_8859-4':
|
||||
case 'iso_8859-4:1988':
|
||||
case 'l4':
|
||||
case 'latin4':
|
||||
return 'ISO-8859-4'
|
||||
case 'csisolatincyrillic':
|
||||
case 'cyrillic':
|
||||
case 'iso-8859-5':
|
||||
case 'iso-ir-144':
|
||||
case 'iso8859-5':
|
||||
case 'iso88595':
|
||||
case 'iso_8859-5':
|
||||
case 'iso_8859-5:1988':
|
||||
return 'ISO-8859-5'
|
||||
case 'arabic':
|
||||
case 'asmo-708':
|
||||
case 'csiso88596e':
|
||||
case 'csiso88596i':
|
||||
case 'csisolatinarabic':
|
||||
case 'ecma-114':
|
||||
case 'iso-8859-6':
|
||||
case 'iso-8859-6-e':
|
||||
case 'iso-8859-6-i':
|
||||
case 'iso-ir-127':
|
||||
case 'iso8859-6':
|
||||
case 'iso88596':
|
||||
case 'iso_8859-6':
|
||||
case 'iso_8859-6:1987':
|
||||
return 'ISO-8859-6'
|
||||
case 'csisolatingreek':
|
||||
case 'ecma-118':
|
||||
case 'elot_928':
|
||||
case 'greek':
|
||||
case 'greek8':
|
||||
case 'iso-8859-7':
|
||||
case 'iso-ir-126':
|
||||
case 'iso8859-7':
|
||||
case 'iso88597':
|
||||
case 'iso_8859-7':
|
||||
case 'iso_8859-7:1987':
|
||||
case 'sun_eu_greek':
|
||||
return 'ISO-8859-7'
|
||||
case 'csiso88598e':
|
||||
case 'csisolatinhebrew':
|
||||
case 'hebrew':
|
||||
case 'iso-8859-8':
|
||||
case 'iso-8859-8-e':
|
||||
case 'iso-ir-138':
|
||||
case 'iso8859-8':
|
||||
case 'iso88598':
|
||||
case 'iso_8859-8':
|
||||
case 'iso_8859-8:1988':
|
||||
case 'visual':
|
||||
return 'ISO-8859-8'
|
||||
case 'csiso88598i':
|
||||
case 'iso-8859-8-i':
|
||||
case 'logical':
|
||||
return 'ISO-8859-8-I'
|
||||
case 'csisolatin6':
|
||||
case 'iso-8859-10':
|
||||
case 'iso-ir-157':
|
||||
case 'iso8859-10':
|
||||
case 'iso885910':
|
||||
case 'l6':
|
||||
case 'latin6':
|
||||
return 'ISO-8859-10'
|
||||
case 'iso-8859-13':
|
||||
case 'iso8859-13':
|
||||
case 'iso885913':
|
||||
return 'ISO-8859-13'
|
||||
case 'iso-8859-14':
|
||||
case 'iso8859-14':
|
||||
case 'iso885914':
|
||||
return 'ISO-8859-14'
|
||||
case 'csisolatin9':
|
||||
case 'iso-8859-15':
|
||||
case 'iso8859-15':
|
||||
case 'iso885915':
|
||||
case 'iso_8859-15':
|
||||
case 'l9':
|
||||
return 'ISO-8859-15'
|
||||
case 'iso-8859-16':
|
||||
return 'ISO-8859-16'
|
||||
case 'cskoi8r':
|
||||
case 'koi':
|
||||
case 'koi8':
|
||||
case 'koi8-r':
|
||||
case 'koi8_r':
|
||||
return 'KOI8-R'
|
||||
case 'koi8-ru':
|
||||
case 'koi8-u':
|
||||
return 'KOI8-U'
|
||||
case 'csmacintosh':
|
||||
case 'mac':
|
||||
case 'macintosh':
|
||||
case 'x-mac-roman':
|
||||
return 'macintosh'
|
||||
case 'iso-8859-11':
|
||||
case 'iso8859-11':
|
||||
case 'iso885911':
|
||||
case 'tis-620':
|
||||
case 'windows-874':
|
||||
return 'windows-874'
|
||||
case 'cp1250':
|
||||
case 'windows-1250':
|
||||
case 'x-cp1250':
|
||||
return 'windows-1250'
|
||||
case 'cp1251':
|
||||
case 'windows-1251':
|
||||
case 'x-cp1251':
|
||||
return 'windows-1251'
|
||||
case 'ansi_x3.4-1968':
|
||||
case 'ascii':
|
||||
case 'cp1252':
|
||||
case 'cp819':
|
||||
case 'csisolatin1':
|
||||
case 'ibm819':
|
||||
case 'iso-8859-1':
|
||||
case 'iso-ir-100':
|
||||
case 'iso8859-1':
|
||||
case 'iso88591':
|
||||
case 'iso_8859-1':
|
||||
case 'iso_8859-1:1987':
|
||||
case 'l1':
|
||||
case 'latin1':
|
||||
case 'us-ascii':
|
||||
case 'windows-1252':
|
||||
case 'x-cp1252':
|
||||
return 'windows-1252'
|
||||
case 'cp1253':
|
||||
case 'windows-1253':
|
||||
case 'x-cp1253':
|
||||
return 'windows-1253'
|
||||
case 'cp1254':
|
||||
case 'csisolatin5':
|
||||
case 'iso-8859-9':
|
||||
case 'iso-ir-148':
|
||||
case 'iso8859-9':
|
||||
case 'iso88599':
|
||||
case 'iso_8859-9':
|
||||
case 'iso_8859-9:1989':
|
||||
case 'l5':
|
||||
case 'latin5':
|
||||
case 'windows-1254':
|
||||
case 'x-cp1254':
|
||||
return 'windows-1254'
|
||||
case 'cp1255':
|
||||
case 'windows-1255':
|
||||
case 'x-cp1255':
|
||||
return 'windows-1255'
|
||||
case 'cp1256':
|
||||
case 'windows-1256':
|
||||
case 'x-cp1256':
|
||||
return 'windows-1256'
|
||||
case 'cp1257':
|
||||
case 'windows-1257':
|
||||
case 'x-cp1257':
|
||||
return 'windows-1257'
|
||||
case 'cp1258':
|
||||
case 'windows-1258':
|
||||
case 'x-cp1258':
|
||||
return 'windows-1258'
|
||||
case 'x-mac-cyrillic':
|
||||
case 'x-mac-ukrainian':
|
||||
return 'x-mac-cyrillic'
|
||||
case 'chinese':
|
||||
case 'csgb2312':
|
||||
case 'csiso58gb231280':
|
||||
case 'gb2312':
|
||||
case 'gb_2312':
|
||||
case 'gb_2312-80':
|
||||
case 'gbk':
|
||||
case 'iso-ir-58':
|
||||
case 'x-gbk':
|
||||
return 'GBK'
|
||||
case 'gb18030':
|
||||
return 'gb18030'
|
||||
case 'big5':
|
||||
case 'big5-hkscs':
|
||||
case 'cn-big5':
|
||||
case 'csbig5':
|
||||
case 'x-x-big5':
|
||||
return 'Big5'
|
||||
case 'cseucpkdfmtjapanese':
|
||||
case 'euc-jp':
|
||||
case 'x-euc-jp':
|
||||
return 'EUC-JP'
|
||||
case 'csiso2022jp':
|
||||
case 'iso-2022-jp':
|
||||
return 'ISO-2022-JP'
|
||||
case 'csshiftjis':
|
||||
case 'ms932':
|
||||
case 'ms_kanji':
|
||||
case 'shift-jis':
|
||||
case 'shift_jis':
|
||||
case 'sjis':
|
||||
case 'windows-31j':
|
||||
case 'x-sjis':
|
||||
return 'Shift_JIS'
|
||||
case 'cseuckr':
|
||||
case 'csksc56011987':
|
||||
case 'euc-kr':
|
||||
case 'iso-ir-149':
|
||||
case 'korean':
|
||||
case 'ks_c_5601-1987':
|
||||
case 'ks_c_5601-1989':
|
||||
case 'ksc5601':
|
||||
case 'ksc_5601':
|
||||
case 'windows-949':
|
||||
return 'EUC-KR'
|
||||
case 'csiso2022kr':
|
||||
case 'hz-gb-2312':
|
||||
case 'iso-2022-cn':
|
||||
case 'iso-2022-cn-ext':
|
||||
case 'iso-2022-kr':
|
||||
case 'replacement':
|
||||
return 'replacement'
|
||||
case 'unicodefffe':
|
||||
case 'utf-16be':
|
||||
return 'UTF-16BE'
|
||||
case 'csunicode':
|
||||
case 'iso-10646-ucs-2':
|
||||
case 'ucs-2':
|
||||
case 'unicode':
|
||||
case 'unicodefeff':
|
||||
case 'utf-16':
|
||||
case 'utf-16le':
|
||||
return 'UTF-16LE'
|
||||
case 'x-user-defined':
|
||||
return 'x-user-defined'
|
||||
default: return 'failure'
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getEncoding
|
||||
}
|
344
node_modules/undici/lib/fileapi/filereader.js
generated
vendored
344
node_modules/undici/lib/fileapi/filereader.js
generated
vendored
@ -1,344 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
staticPropertyDescriptors,
|
||||
readOperation,
|
||||
fireAProgressEvent
|
||||
} = require('./util')
|
||||
const {
|
||||
kState,
|
||||
kError,
|
||||
kResult,
|
||||
kEvents,
|
||||
kAborted
|
||||
} = require('./symbols')
|
||||
const { webidl } = require('../fetch/webidl')
|
||||
const { kEnumerableProperty } = require('../core/util')
|
||||
|
||||
class FileReader extends EventTarget {
|
||||
constructor () {
|
||||
super()
|
||||
|
||||
this[kState] = 'empty'
|
||||
this[kResult] = null
|
||||
this[kError] = null
|
||||
this[kEvents] = {
|
||||
loadend: null,
|
||||
error: null,
|
||||
abort: null,
|
||||
load: null,
|
||||
progress: null,
|
||||
loadstart: null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer
|
||||
* @param {import('buffer').Blob} blob
|
||||
*/
|
||||
readAsArrayBuffer (blob) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsArrayBuffer' })
|
||||
|
||||
blob = webidl.converters.Blob(blob, { strict: false })
|
||||
|
||||
// The readAsArrayBuffer(blob) method, when invoked,
|
||||
// must initiate a read operation for blob with ArrayBuffer.
|
||||
readOperation(this, blob, 'ArrayBuffer')
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#readAsBinaryString
|
||||
* @param {import('buffer').Blob} blob
|
||||
*/
|
||||
readAsBinaryString (blob) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsBinaryString' })
|
||||
|
||||
blob = webidl.converters.Blob(blob, { strict: false })
|
||||
|
||||
// The readAsBinaryString(blob) method, when invoked,
|
||||
// must initiate a read operation for blob with BinaryString.
|
||||
readOperation(this, blob, 'BinaryString')
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#readAsDataText
|
||||
* @param {import('buffer').Blob} blob
|
||||
* @param {string?} encoding
|
||||
*/
|
||||
readAsText (blob, encoding = undefined) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsText' })
|
||||
|
||||
blob = webidl.converters.Blob(blob, { strict: false })
|
||||
|
||||
if (encoding !== undefined) {
|
||||
encoding = webidl.converters.DOMString(encoding)
|
||||
}
|
||||
|
||||
// The readAsText(blob, encoding) method, when invoked,
|
||||
// must initiate a read operation for blob with Text and encoding.
|
||||
readOperation(this, blob, 'Text', encoding)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#dfn-readAsDataURL
|
||||
* @param {import('buffer').Blob} blob
|
||||
*/
|
||||
readAsDataURL (blob) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsDataURL' })
|
||||
|
||||
blob = webidl.converters.Blob(blob, { strict: false })
|
||||
|
||||
// The readAsDataURL(blob) method, when invoked, must
|
||||
// initiate a read operation for blob with DataURL.
|
||||
readOperation(this, blob, 'DataURL')
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#dfn-abort
|
||||
*/
|
||||
abort () {
|
||||
// 1. If this's state is "empty" or if this's state is
|
||||
// "done" set this's result to null and terminate
|
||||
// this algorithm.
|
||||
if (this[kState] === 'empty' || this[kState] === 'done') {
|
||||
this[kResult] = null
|
||||
return
|
||||
}
|
||||
|
||||
// 2. If this's state is "loading" set this's state to
|
||||
// "done" and set this's result to null.
|
||||
if (this[kState] === 'loading') {
|
||||
this[kState] = 'done'
|
||||
this[kResult] = null
|
||||
}
|
||||
|
||||
// 3. If there are any tasks from this on the file reading
|
||||
// task source in an affiliated task queue, then remove
|
||||
// those tasks from that task queue.
|
||||
this[kAborted] = true
|
||||
|
||||
// 4. Terminate the algorithm for the read method being processed.
|
||||
// TODO
|
||||
|
||||
// 5. Fire a progress event called abort at this.
|
||||
fireAProgressEvent('abort', this)
|
||||
|
||||
// 6. If this's state is not "loading", fire a progress
|
||||
// event called loadend at this.
|
||||
if (this[kState] !== 'loading') {
|
||||
fireAProgressEvent('loadend', this)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#dom-filereader-readystate
|
||||
*/
|
||||
get readyState () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
switch (this[kState]) {
|
||||
case 'empty': return this.EMPTY
|
||||
case 'loading': return this.LOADING
|
||||
case 'done': return this.DONE
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#dom-filereader-result
|
||||
*/
|
||||
get result () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
// The result attribute’s getter, when invoked, must return
|
||||
// this's result.
|
||||
return this[kResult]
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#dom-filereader-error
|
||||
*/
|
||||
get error () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
// The error attribute’s getter, when invoked, must return
|
||||
// this's error.
|
||||
return this[kError]
|
||||
}
|
||||
|
||||
get onloadend () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
return this[kEvents].loadend
|
||||
}
|
||||
|
||||
set onloadend (fn) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
if (this[kEvents].loadend) {
|
||||
this.removeEventListener('loadend', this[kEvents].loadend)
|
||||
}
|
||||
|
||||
if (typeof fn === 'function') {
|
||||
this[kEvents].loadend = fn
|
||||
this.addEventListener('loadend', fn)
|
||||
} else {
|
||||
this[kEvents].loadend = null
|
||||
}
|
||||
}
|
||||
|
||||
get onerror () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
return this[kEvents].error
|
||||
}
|
||||
|
||||
set onerror (fn) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
if (this[kEvents].error) {
|
||||
this.removeEventListener('error', this[kEvents].error)
|
||||
}
|
||||
|
||||
if (typeof fn === 'function') {
|
||||
this[kEvents].error = fn
|
||||
this.addEventListener('error', fn)
|
||||
} else {
|
||||
this[kEvents].error = null
|
||||
}
|
||||
}
|
||||
|
||||
get onloadstart () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
return this[kEvents].loadstart
|
||||
}
|
||||
|
||||
set onloadstart (fn) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
if (this[kEvents].loadstart) {
|
||||
this.removeEventListener('loadstart', this[kEvents].loadstart)
|
||||
}
|
||||
|
||||
if (typeof fn === 'function') {
|
||||
this[kEvents].loadstart = fn
|
||||
this.addEventListener('loadstart', fn)
|
||||
} else {
|
||||
this[kEvents].loadstart = null
|
||||
}
|
||||
}
|
||||
|
||||
get onprogress () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
return this[kEvents].progress
|
||||
}
|
||||
|
||||
set onprogress (fn) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
if (this[kEvents].progress) {
|
||||
this.removeEventListener('progress', this[kEvents].progress)
|
||||
}
|
||||
|
||||
if (typeof fn === 'function') {
|
||||
this[kEvents].progress = fn
|
||||
this.addEventListener('progress', fn)
|
||||
} else {
|
||||
this[kEvents].progress = null
|
||||
}
|
||||
}
|
||||
|
||||
get onload () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
return this[kEvents].load
|
||||
}
|
||||
|
||||
set onload (fn) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
if (this[kEvents].load) {
|
||||
this.removeEventListener('load', this[kEvents].load)
|
||||
}
|
||||
|
||||
if (typeof fn === 'function') {
|
||||
this[kEvents].load = fn
|
||||
this.addEventListener('load', fn)
|
||||
} else {
|
||||
this[kEvents].load = null
|
||||
}
|
||||
}
|
||||
|
||||
get onabort () {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
return this[kEvents].abort
|
||||
}
|
||||
|
||||
set onabort (fn) {
|
||||
webidl.brandCheck(this, FileReader)
|
||||
|
||||
if (this[kEvents].abort) {
|
||||
this.removeEventListener('abort', this[kEvents].abort)
|
||||
}
|
||||
|
||||
if (typeof fn === 'function') {
|
||||
this[kEvents].abort = fn
|
||||
this.addEventListener('abort', fn)
|
||||
} else {
|
||||
this[kEvents].abort = null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// https://w3c.github.io/FileAPI/#dom-filereader-empty
|
||||
FileReader.EMPTY = FileReader.prototype.EMPTY = 0
|
||||
// https://w3c.github.io/FileAPI/#dom-filereader-loading
|
||||
FileReader.LOADING = FileReader.prototype.LOADING = 1
|
||||
// https://w3c.github.io/FileAPI/#dom-filereader-done
|
||||
FileReader.DONE = FileReader.prototype.DONE = 2
|
||||
|
||||
Object.defineProperties(FileReader.prototype, {
|
||||
EMPTY: staticPropertyDescriptors,
|
||||
LOADING: staticPropertyDescriptors,
|
||||
DONE: staticPropertyDescriptors,
|
||||
readAsArrayBuffer: kEnumerableProperty,
|
||||
readAsBinaryString: kEnumerableProperty,
|
||||
readAsText: kEnumerableProperty,
|
||||
readAsDataURL: kEnumerableProperty,
|
||||
abort: kEnumerableProperty,
|
||||
readyState: kEnumerableProperty,
|
||||
result: kEnumerableProperty,
|
||||
error: kEnumerableProperty,
|
||||
onloadstart: kEnumerableProperty,
|
||||
onprogress: kEnumerableProperty,
|
||||
onload: kEnumerableProperty,
|
||||
onabort: kEnumerableProperty,
|
||||
onerror: kEnumerableProperty,
|
||||
onloadend: kEnumerableProperty,
|
||||
[Symbol.toStringTag]: {
|
||||
value: 'FileReader',
|
||||
writable: false,
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
}
|
||||
})
|
||||
|
||||
Object.defineProperties(FileReader, {
|
||||
EMPTY: staticPropertyDescriptors,
|
||||
LOADING: staticPropertyDescriptors,
|
||||
DONE: staticPropertyDescriptors
|
||||
})
|
||||
|
||||
module.exports = {
|
||||
FileReader
|
||||
}
|
78
node_modules/undici/lib/fileapi/progressevent.js
generated
vendored
78
node_modules/undici/lib/fileapi/progressevent.js
generated
vendored
@ -1,78 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const { webidl } = require('../fetch/webidl')
|
||||
|
||||
const kState = Symbol('ProgressEvent state')
|
||||
|
||||
/**
|
||||
* @see https://xhr.spec.whatwg.org/#progressevent
|
||||
*/
|
||||
class ProgressEvent extends Event {
|
||||
constructor (type, eventInitDict = {}) {
|
||||
type = webidl.converters.DOMString(type)
|
||||
eventInitDict = webidl.converters.ProgressEventInit(eventInitDict ?? {})
|
||||
|
||||
super(type, eventInitDict)
|
||||
|
||||
this[kState] = {
|
||||
lengthComputable: eventInitDict.lengthComputable,
|
||||
loaded: eventInitDict.loaded,
|
||||
total: eventInitDict.total
|
||||
}
|
||||
}
|
||||
|
||||
get lengthComputable () {
|
||||
webidl.brandCheck(this, ProgressEvent)
|
||||
|
||||
return this[kState].lengthComputable
|
||||
}
|
||||
|
||||
get loaded () {
|
||||
webidl.brandCheck(this, ProgressEvent)
|
||||
|
||||
return this[kState].loaded
|
||||
}
|
||||
|
||||
get total () {
|
||||
webidl.brandCheck(this, ProgressEvent)
|
||||
|
||||
return this[kState].total
|
||||
}
|
||||
}
|
||||
|
||||
webidl.converters.ProgressEventInit = webidl.dictionaryConverter([
|
||||
{
|
||||
key: 'lengthComputable',
|
||||
converter: webidl.converters.boolean,
|
||||
defaultValue: false
|
||||
},
|
||||
{
|
||||
key: 'loaded',
|
||||
converter: webidl.converters['unsigned long long'],
|
||||
defaultValue: 0
|
||||
},
|
||||
{
|
||||
key: 'total',
|
||||
converter: webidl.converters['unsigned long long'],
|
||||
defaultValue: 0
|
||||
},
|
||||
{
|
||||
key: 'bubbles',
|
||||
converter: webidl.converters.boolean,
|
||||
defaultValue: false
|
||||
},
|
||||
{
|
||||
key: 'cancelable',
|
||||
converter: webidl.converters.boolean,
|
||||
defaultValue: false
|
||||
},
|
||||
{
|
||||
key: 'composed',
|
||||
converter: webidl.converters.boolean,
|
||||
defaultValue: false
|
||||
}
|
||||
])
|
||||
|
||||
module.exports = {
|
||||
ProgressEvent
|
||||
}
|
10
node_modules/undici/lib/fileapi/symbols.js
generated
vendored
10
node_modules/undici/lib/fileapi/symbols.js
generated
vendored
@ -1,10 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
kState: Symbol('FileReader state'),
|
||||
kResult: Symbol('FileReader result'),
|
||||
kError: Symbol('FileReader error'),
|
||||
kLastProgressEventFired: Symbol('FileReader last progress event fired timestamp'),
|
||||
kEvents: Symbol('FileReader events'),
|
||||
kAborted: Symbol('FileReader aborted')
|
||||
}
|
392
node_modules/undici/lib/fileapi/util.js
generated
vendored
392
node_modules/undici/lib/fileapi/util.js
generated
vendored
@ -1,392 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
kState,
|
||||
kError,
|
||||
kResult,
|
||||
kAborted,
|
||||
kLastProgressEventFired
|
||||
} = require('./symbols')
|
||||
const { ProgressEvent } = require('./progressevent')
|
||||
const { getEncoding } = require('./encoding')
|
||||
const { DOMException } = require('../fetch/constants')
|
||||
const { serializeAMimeType, parseMIMEType } = require('../fetch/dataURL')
|
||||
const { types } = require('util')
|
||||
const { StringDecoder } = require('string_decoder')
|
||||
const { btoa } = require('buffer')
|
||||
|
||||
/** @type {PropertyDescriptor} */
|
||||
const staticPropertyDescriptors = {
|
||||
enumerable: true,
|
||||
writable: false,
|
||||
configurable: false
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#readOperation
|
||||
* @param {import('./filereader').FileReader} fr
|
||||
* @param {import('buffer').Blob} blob
|
||||
* @param {string} type
|
||||
* @param {string?} encodingName
|
||||
*/
|
||||
function readOperation (fr, blob, type, encodingName) {
|
||||
// 1. If fr’s state is "loading", throw an InvalidStateError
|
||||
// DOMException.
|
||||
if (fr[kState] === 'loading') {
|
||||
throw new DOMException('Invalid state', 'InvalidStateError')
|
||||
}
|
||||
|
||||
// 2. Set fr’s state to "loading".
|
||||
fr[kState] = 'loading'
|
||||
|
||||
// 3. Set fr’s result to null.
|
||||
fr[kResult] = null
|
||||
|
||||
// 4. Set fr’s error to null.
|
||||
fr[kError] = null
|
||||
|
||||
// 5. Let stream be the result of calling get stream on blob.
|
||||
/** @type {import('stream/web').ReadableStream} */
|
||||
const stream = blob.stream()
|
||||
|
||||
// 6. Let reader be the result of getting a reader from stream.
|
||||
const reader = stream.getReader()
|
||||
|
||||
// 7. Let bytes be an empty byte sequence.
|
||||
/** @type {Uint8Array[]} */
|
||||
const bytes = []
|
||||
|
||||
// 8. Let chunkPromise be the result of reading a chunk from
|
||||
// stream with reader.
|
||||
let chunkPromise = reader.read()
|
||||
|
||||
// 9. Let isFirstChunk be true.
|
||||
let isFirstChunk = true
|
||||
|
||||
// 10. In parallel, while true:
|
||||
// Note: "In parallel" just means non-blocking
|
||||
// Note 2: readOperation itself cannot be async as double
|
||||
// reading the body would then reject the promise, instead
|
||||
// of throwing an error.
|
||||
;(async () => {
|
||||
while (!fr[kAborted]) {
|
||||
// 1. Wait for chunkPromise to be fulfilled or rejected.
|
||||
try {
|
||||
const { done, value } = await chunkPromise
|
||||
|
||||
// 2. If chunkPromise is fulfilled, and isFirstChunk is
|
||||
// true, queue a task to fire a progress event called
|
||||
// loadstart at fr.
|
||||
if (isFirstChunk && !fr[kAborted]) {
|
||||
queueMicrotask(() => {
|
||||
fireAProgressEvent('loadstart', fr)
|
||||
})
|
||||
}
|
||||
|
||||
// 3. Set isFirstChunk to false.
|
||||
isFirstChunk = false
|
||||
|
||||
// 4. If chunkPromise is fulfilled with an object whose
|
||||
// done property is false and whose value property is
|
||||
// a Uint8Array object, run these steps:
|
||||
if (!done && types.isUint8Array(value)) {
|
||||
// 1. Let bs be the byte sequence represented by the
|
||||
// Uint8Array object.
|
||||
|
||||
// 2. Append bs to bytes.
|
||||
bytes.push(value)
|
||||
|
||||
// 3. If roughly 50ms have passed since these steps
|
||||
// were last invoked, queue a task to fire a
|
||||
// progress event called progress at fr.
|
||||
if (
|
||||
(
|
||||
fr[kLastProgressEventFired] === undefined ||
|
||||
Date.now() - fr[kLastProgressEventFired] >= 50
|
||||
) &&
|
||||
!fr[kAborted]
|
||||
) {
|
||||
fr[kLastProgressEventFired] = Date.now()
|
||||
queueMicrotask(() => {
|
||||
fireAProgressEvent('progress', fr)
|
||||
})
|
||||
}
|
||||
|
||||
// 4. Set chunkPromise to the result of reading a
|
||||
// chunk from stream with reader.
|
||||
chunkPromise = reader.read()
|
||||
} else if (done) {
|
||||
// 5. Otherwise, if chunkPromise is fulfilled with an
|
||||
// object whose done property is true, queue a task
|
||||
// to run the following steps and abort this algorithm:
|
||||
queueMicrotask(() => {
|
||||
// 1. Set fr’s state to "done".
|
||||
fr[kState] = 'done'
|
||||
|
||||
// 2. Let result be the result of package data given
|
||||
// bytes, type, blob’s type, and encodingName.
|
||||
try {
|
||||
const result = packageData(bytes, type, blob.type, encodingName)
|
||||
|
||||
// 4. Else:
|
||||
|
||||
if (fr[kAborted]) {
|
||||
return
|
||||
}
|
||||
|
||||
// 1. Set fr’s result to result.
|
||||
fr[kResult] = result
|
||||
|
||||
// 2. Fire a progress event called load at the fr.
|
||||
fireAProgressEvent('load', fr)
|
||||
} catch (error) {
|
||||
// 3. If package data threw an exception error:
|
||||
|
||||
// 1. Set fr’s error to error.
|
||||
fr[kError] = error
|
||||
|
||||
// 2. Fire a progress event called error at fr.
|
||||
fireAProgressEvent('error', fr)
|
||||
}
|
||||
|
||||
// 5. If fr’s state is not "loading", fire a progress
|
||||
// event called loadend at the fr.
|
||||
if (fr[kState] !== 'loading') {
|
||||
fireAProgressEvent('loadend', fr)
|
||||
}
|
||||
})
|
||||
|
||||
break
|
||||
}
|
||||
} catch (error) {
|
||||
if (fr[kAborted]) {
|
||||
return
|
||||
}
|
||||
|
||||
// 6. Otherwise, if chunkPromise is rejected with an
|
||||
// error error, queue a task to run the following
|
||||
// steps and abort this algorithm:
|
||||
queueMicrotask(() => {
|
||||
// 1. Set fr’s state to "done".
|
||||
fr[kState] = 'done'
|
||||
|
||||
// 2. Set fr’s error to error.
|
||||
fr[kError] = error
|
||||
|
||||
// 3. Fire a progress event called error at fr.
|
||||
fireAProgressEvent('error', fr)
|
||||
|
||||
// 4. If fr’s state is not "loading", fire a progress
|
||||
// event called loadend at fr.
|
||||
if (fr[kState] !== 'loading') {
|
||||
fireAProgressEvent('loadend', fr)
|
||||
}
|
||||
})
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
})()
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#fire-a-progress-event
|
||||
* @see https://dom.spec.whatwg.org/#concept-event-fire
|
||||
* @param {string} e The name of the event
|
||||
* @param {import('./filereader').FileReader} reader
|
||||
*/
|
||||
function fireAProgressEvent (e, reader) {
|
||||
// The progress event e does not bubble. e.bubbles must be false
|
||||
// The progress event e is NOT cancelable. e.cancelable must be false
|
||||
const event = new ProgressEvent(e, {
|
||||
bubbles: false,
|
||||
cancelable: false
|
||||
})
|
||||
|
||||
reader.dispatchEvent(event)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/FileAPI/#blob-package-data
|
||||
* @param {Uint8Array[]} bytes
|
||||
* @param {string} type
|
||||
* @param {string?} mimeType
|
||||
* @param {string?} encodingName
|
||||
*/
|
||||
function packageData (bytes, type, mimeType, encodingName) {
|
||||
// 1. A Blob has an associated package data algorithm, given
|
||||
// bytes, a type, a optional mimeType, and a optional
|
||||
// encodingName, which switches on type and runs the
|
||||
// associated steps:
|
||||
|
||||
switch (type) {
|
||||
case 'DataURL': {
|
||||
// 1. Return bytes as a DataURL [RFC2397] subject to
|
||||
// the considerations below:
|
||||
// * Use mimeType as part of the Data URL if it is
|
||||
// available in keeping with the Data URL
|
||||
// specification [RFC2397].
|
||||
// * If mimeType is not available return a Data URL
|
||||
// without a media-type. [RFC2397].
|
||||
|
||||
// https://datatracker.ietf.org/doc/html/rfc2397#section-3
|
||||
// dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
|
||||
// mediatype := [ type "/" subtype ] *( ";" parameter )
|
||||
// data := *urlchar
|
||||
// parameter := attribute "=" value
|
||||
let dataURL = 'data:'
|
||||
|
||||
const parsed = parseMIMEType(mimeType || 'application/octet-stream')
|
||||
|
||||
if (parsed !== 'failure') {
|
||||
dataURL += serializeAMimeType(parsed)
|
||||
}
|
||||
|
||||
dataURL += ';base64,'
|
||||
|
||||
const decoder = new StringDecoder('latin1')
|
||||
|
||||
for (const chunk of bytes) {
|
||||
dataURL += btoa(decoder.write(chunk))
|
||||
}
|
||||
|
||||
dataURL += btoa(decoder.end())
|
||||
|
||||
return dataURL
|
||||
}
|
||||
case 'Text': {
|
||||
// 1. Let encoding be failure
|
||||
let encoding = 'failure'
|
||||
|
||||
// 2. If the encodingName is present, set encoding to the
|
||||
// result of getting an encoding from encodingName.
|
||||
if (encodingName) {
|
||||
encoding = getEncoding(encodingName)
|
||||
}
|
||||
|
||||
// 3. If encoding is failure, and mimeType is present:
|
||||
if (encoding === 'failure' && mimeType) {
|
||||
// 1. Let type be the result of parse a MIME type
|
||||
// given mimeType.
|
||||
const type = parseMIMEType(mimeType)
|
||||
|
||||
// 2. If type is not failure, set encoding to the result
|
||||
// of getting an encoding from type’s parameters["charset"].
|
||||
if (type !== 'failure') {
|
||||
encoding = getEncoding(type.parameters.get('charset'))
|
||||
}
|
||||
}
|
||||
|
||||
// 4. If encoding is failure, then set encoding to UTF-8.
|
||||
if (encoding === 'failure') {
|
||||
encoding = 'UTF-8'
|
||||
}
|
||||
|
||||
// 5. Decode bytes using fallback encoding encoding, and
|
||||
// return the result.
|
||||
return decode(bytes, encoding)
|
||||
}
|
||||
case 'ArrayBuffer': {
|
||||
// Return a new ArrayBuffer whose contents are bytes.
|
||||
const sequence = combineByteSequences(bytes)
|
||||
|
||||
return sequence.buffer
|
||||
}
|
||||
case 'BinaryString': {
|
||||
// Return bytes as a binary string, in which every byte
|
||||
// is represented by a code unit of equal value [0..255].
|
||||
let binaryString = ''
|
||||
|
||||
const decoder = new StringDecoder('latin1')
|
||||
|
||||
for (const chunk of bytes) {
|
||||
binaryString += decoder.write(chunk)
|
||||
}
|
||||
|
||||
binaryString += decoder.end()
|
||||
|
||||
return binaryString
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://encoding.spec.whatwg.org/#decode
|
||||
* @param {Uint8Array[]} ioQueue
|
||||
* @param {string} encoding
|
||||
*/
|
||||
function decode (ioQueue, encoding) {
|
||||
const bytes = combineByteSequences(ioQueue)
|
||||
|
||||
// 1. Let BOMEncoding be the result of BOM sniffing ioQueue.
|
||||
const BOMEncoding = BOMSniffing(bytes)
|
||||
|
||||
let slice = 0
|
||||
|
||||
// 2. If BOMEncoding is non-null:
|
||||
if (BOMEncoding !== null) {
|
||||
// 1. Set encoding to BOMEncoding.
|
||||
encoding = BOMEncoding
|
||||
|
||||
// 2. Read three bytes from ioQueue, if BOMEncoding is
|
||||
// UTF-8; otherwise read two bytes.
|
||||
// (Do nothing with those bytes.)
|
||||
slice = BOMEncoding === 'UTF-8' ? 3 : 2
|
||||
}
|
||||
|
||||
// 3. Process a queue with an instance of encoding’s
|
||||
// decoder, ioQueue, output, and "replacement".
|
||||
|
||||
// 4. Return output.
|
||||
|
||||
const sliced = bytes.slice(slice)
|
||||
return new TextDecoder(encoding).decode(sliced)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://encoding.spec.whatwg.org/#bom-sniff
|
||||
* @param {Uint8Array} ioQueue
|
||||
*/
|
||||
function BOMSniffing (ioQueue) {
|
||||
// 1. Let BOM be the result of peeking 3 bytes from ioQueue,
|
||||
// converted to a byte sequence.
|
||||
const [a, b, c] = ioQueue
|
||||
|
||||
// 2. For each of the rows in the table below, starting with
|
||||
// the first one and going down, if BOM starts with the
|
||||
// bytes given in the first column, then return the
|
||||
// encoding given in the cell in the second column of that
|
||||
// row. Otherwise, return null.
|
||||
if (a === 0xEF && b === 0xBB && c === 0xBF) {
|
||||
return 'UTF-8'
|
||||
} else if (a === 0xFE && b === 0xFF) {
|
||||
return 'UTF-16BE'
|
||||
} else if (a === 0xFF && b === 0xFE) {
|
||||
return 'UTF-16LE'
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array[]} sequences
|
||||
*/
|
||||
function combineByteSequences (sequences) {
|
||||
const size = sequences.reduce((a, b) => {
|
||||
return a + b.byteLength
|
||||
}, 0)
|
||||
|
||||
let offset = 0
|
||||
|
||||
return sequences.reduce((a, b) => {
|
||||
a.set(b, offset)
|
||||
offset += b.byteLength
|
||||
return a
|
||||
}, new Uint8Array(size))
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
staticPropertyDescriptors,
|
||||
readOperation,
|
||||
fireAProgressEvent
|
||||
}
|
2
node_modules/undici/lib/global.js
generated
vendored
2
node_modules/undici/lib/global.js
generated
vendored
@ -4,7 +4,7 @@
|
||||
// this version number must be increased to avoid conflicts.
|
||||
const globalDispatcher = Symbol.for('undici.globalDispatcher.1')
|
||||
const { InvalidArgumentError } = require('./core/errors')
|
||||
const Agent = require('./agent')
|
||||
const Agent = require('./dispatcher/agent')
|
||||
|
||||
if (getGlobalDispatcher() === undefined) {
|
||||
setGlobalDispatcher(new Agent())
|
||||
|
35
node_modules/undici/lib/handler/DecoratorHandler.js
generated
vendored
35
node_modules/undici/lib/handler/DecoratorHandler.js
generated
vendored
@ -1,35 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = class DecoratorHandler {
|
||||
constructor (handler) {
|
||||
this.handler = handler
|
||||
}
|
||||
|
||||
onConnect (...args) {
|
||||
return this.handler.onConnect(...args)
|
||||
}
|
||||
|
||||
onError (...args) {
|
||||
return this.handler.onError(...args)
|
||||
}
|
||||
|
||||
onUpgrade (...args) {
|
||||
return this.handler.onUpgrade(...args)
|
||||
}
|
||||
|
||||
onHeaders (...args) {
|
||||
return this.handler.onHeaders(...args)
|
||||
}
|
||||
|
||||
onData (...args) {
|
||||
return this.handler.onData(...args)
|
||||
}
|
||||
|
||||
onComplete (...args) {
|
||||
return this.handler.onComplete(...args)
|
||||
}
|
||||
|
||||
onBodySent (...args) {
|
||||
return this.handler.onBodySent(...args)
|
||||
}
|
||||
}
|
336
node_modules/undici/lib/handler/RetryHandler.js
generated
vendored
336
node_modules/undici/lib/handler/RetryHandler.js
generated
vendored
@ -1,336 +0,0 @@
|
||||
const assert = require('assert')
|
||||
|
||||
const { kRetryHandlerDefaultRetry } = require('../core/symbols')
|
||||
const { RequestRetryError } = require('../core/errors')
|
||||
const { isDisturbed, parseHeaders, parseRangeHeader } = require('../core/util')
|
||||
|
||||
function calculateRetryAfterHeader (retryAfter) {
|
||||
const current = Date.now()
|
||||
const diff = new Date(retryAfter).getTime() - current
|
||||
|
||||
return diff
|
||||
}
|
||||
|
||||
class RetryHandler {
|
||||
constructor (opts, handlers) {
|
||||
const { retryOptions, ...dispatchOpts } = opts
|
||||
const {
|
||||
// Retry scoped
|
||||
retry: retryFn,
|
||||
maxRetries,
|
||||
maxTimeout,
|
||||
minTimeout,
|
||||
timeoutFactor,
|
||||
// Response scoped
|
||||
methods,
|
||||
errorCodes,
|
||||
retryAfter,
|
||||
statusCodes
|
||||
} = retryOptions ?? {}
|
||||
|
||||
this.dispatch = handlers.dispatch
|
||||
this.handler = handlers.handler
|
||||
this.opts = dispatchOpts
|
||||
this.abort = null
|
||||
this.aborted = false
|
||||
this.retryOpts = {
|
||||
retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry],
|
||||
retryAfter: retryAfter ?? true,
|
||||
maxTimeout: maxTimeout ?? 30 * 1000, // 30s,
|
||||
timeout: minTimeout ?? 500, // .5s
|
||||
timeoutFactor: timeoutFactor ?? 2,
|
||||
maxRetries: maxRetries ?? 5,
|
||||
// What errors we should retry
|
||||
methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'],
|
||||
// Indicates which errors to retry
|
||||
statusCodes: statusCodes ?? [500, 502, 503, 504, 429],
|
||||
// List of errors to retry
|
||||
errorCodes: errorCodes ?? [
|
||||
'ECONNRESET',
|
||||
'ECONNREFUSED',
|
||||
'ENOTFOUND',
|
||||
'ENETDOWN',
|
||||
'ENETUNREACH',
|
||||
'EHOSTDOWN',
|
||||
'EHOSTUNREACH',
|
||||
'EPIPE'
|
||||
]
|
||||
}
|
||||
|
||||
this.retryCount = 0
|
||||
this.start = 0
|
||||
this.end = null
|
||||
this.etag = null
|
||||
this.resume = null
|
||||
|
||||
// Handle possible onConnect duplication
|
||||
this.handler.onConnect(reason => {
|
||||
this.aborted = true
|
||||
if (this.abort) {
|
||||
this.abort(reason)
|
||||
} else {
|
||||
this.reason = reason
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
onRequestSent () {
|
||||
if (this.handler.onRequestSent) {
|
||||
this.handler.onRequestSent()
|
||||
}
|
||||
}
|
||||
|
||||
onUpgrade (statusCode, headers, socket) {
|
||||
if (this.handler.onUpgrade) {
|
||||
this.handler.onUpgrade(statusCode, headers, socket)
|
||||
}
|
||||
}
|
||||
|
||||
onConnect (abort) {
|
||||
if (this.aborted) {
|
||||
abort(this.reason)
|
||||
} else {
|
||||
this.abort = abort
|
||||
}
|
||||
}
|
||||
|
||||
onBodySent (chunk) {
|
||||
if (this.handler.onBodySent) return this.handler.onBodySent(chunk)
|
||||
}
|
||||
|
||||
static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) {
|
||||
const { statusCode, code, headers } = err
|
||||
const { method, retryOptions } = opts
|
||||
const {
|
||||
maxRetries,
|
||||
timeout,
|
||||
maxTimeout,
|
||||
timeoutFactor,
|
||||
statusCodes,
|
||||
errorCodes,
|
||||
methods
|
||||
} = retryOptions
|
||||
let { counter, currentTimeout } = state
|
||||
|
||||
currentTimeout =
|
||||
currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout
|
||||
|
||||
// Any code that is not a Undici's originated and allowed to retry
|
||||
if (
|
||||
code &&
|
||||
code !== 'UND_ERR_REQ_RETRY' &&
|
||||
code !== 'UND_ERR_SOCKET' &&
|
||||
!errorCodes.includes(code)
|
||||
) {
|
||||
cb(err)
|
||||
return
|
||||
}
|
||||
|
||||
// If a set of method are provided and the current method is not in the list
|
||||
if (Array.isArray(methods) && !methods.includes(method)) {
|
||||
cb(err)
|
||||
return
|
||||
}
|
||||
|
||||
// If a set of status code are provided and the current status code is not in the list
|
||||
if (
|
||||
statusCode != null &&
|
||||
Array.isArray(statusCodes) &&
|
||||
!statusCodes.includes(statusCode)
|
||||
) {
|
||||
cb(err)
|
||||
return
|
||||
}
|
||||
|
||||
// If we reached the max number of retries
|
||||
if (counter > maxRetries) {
|
||||
cb(err)
|
||||
return
|
||||
}
|
||||
|
||||
let retryAfterHeader = headers != null && headers['retry-after']
|
||||
if (retryAfterHeader) {
|
||||
retryAfterHeader = Number(retryAfterHeader)
|
||||
retryAfterHeader = isNaN(retryAfterHeader)
|
||||
? calculateRetryAfterHeader(retryAfterHeader)
|
||||
: retryAfterHeader * 1e3 // Retry-After is in seconds
|
||||
}
|
||||
|
||||
const retryTimeout =
|
||||
retryAfterHeader > 0
|
||||
? Math.min(retryAfterHeader, maxTimeout)
|
||||
: Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout)
|
||||
|
||||
state.currentTimeout = retryTimeout
|
||||
|
||||
setTimeout(() => cb(null), retryTimeout)
|
||||
}
|
||||
|
||||
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
|
||||
const headers = parseHeaders(rawHeaders)
|
||||
|
||||
this.retryCount += 1
|
||||
|
||||
if (statusCode >= 300) {
|
||||
this.abort(
|
||||
new RequestRetryError('Request failed', statusCode, {
|
||||
headers,
|
||||
count: this.retryCount
|
||||
})
|
||||
)
|
||||
return false
|
||||
}
|
||||
|
||||
// Checkpoint for resume from where we left it
|
||||
if (this.resume != null) {
|
||||
this.resume = null
|
||||
|
||||
if (statusCode !== 206) {
|
||||
return true
|
||||
}
|
||||
|
||||
const contentRange = parseRangeHeader(headers['content-range'])
|
||||
// If no content range
|
||||
if (!contentRange) {
|
||||
this.abort(
|
||||
new RequestRetryError('Content-Range mismatch', statusCode, {
|
||||
headers,
|
||||
count: this.retryCount
|
||||
})
|
||||
)
|
||||
return false
|
||||
}
|
||||
|
||||
// Let's start with a weak etag check
|
||||
if (this.etag != null && this.etag !== headers.etag) {
|
||||
this.abort(
|
||||
new RequestRetryError('ETag mismatch', statusCode, {
|
||||
headers,
|
||||
count: this.retryCount
|
||||
})
|
||||
)
|
||||
return false
|
||||
}
|
||||
|
||||
const { start, size, end = size } = contentRange
|
||||
|
||||
assert(this.start === start, 'content-range mismatch')
|
||||
assert(this.end == null || this.end === end, 'content-range mismatch')
|
||||
|
||||
this.resume = resume
|
||||
return true
|
||||
}
|
||||
|
||||
if (this.end == null) {
|
||||
if (statusCode === 206) {
|
||||
// First time we receive 206
|
||||
const range = parseRangeHeader(headers['content-range'])
|
||||
|
||||
if (range == null) {
|
||||
return this.handler.onHeaders(
|
||||
statusCode,
|
||||
rawHeaders,
|
||||
resume,
|
||||
statusMessage
|
||||
)
|
||||
}
|
||||
|
||||
const { start, size, end = size } = range
|
||||
|
||||
assert(
|
||||
start != null && Number.isFinite(start) && this.start !== start,
|
||||
'content-range mismatch'
|
||||
)
|
||||
assert(Number.isFinite(start))
|
||||
assert(
|
||||
end != null && Number.isFinite(end) && this.end !== end,
|
||||
'invalid content-length'
|
||||
)
|
||||
|
||||
this.start = start
|
||||
this.end = end
|
||||
}
|
||||
|
||||
// We make our best to checkpoint the body for further range headers
|
||||
if (this.end == null) {
|
||||
const contentLength = headers['content-length']
|
||||
this.end = contentLength != null ? Number(contentLength) : null
|
||||
}
|
||||
|
||||
assert(Number.isFinite(this.start))
|
||||
assert(
|
||||
this.end == null || Number.isFinite(this.end),
|
||||
'invalid content-length'
|
||||
)
|
||||
|
||||
this.resume = resume
|
||||
this.etag = headers.etag != null ? headers.etag : null
|
||||
|
||||
return this.handler.onHeaders(
|
||||
statusCode,
|
||||
rawHeaders,
|
||||
resume,
|
||||
statusMessage
|
||||
)
|
||||
}
|
||||
|
||||
const err = new RequestRetryError('Request failed', statusCode, {
|
||||
headers,
|
||||
count: this.retryCount
|
||||
})
|
||||
|
||||
this.abort(err)
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
onData (chunk) {
|
||||
this.start += chunk.length
|
||||
|
||||
return this.handler.onData(chunk)
|
||||
}
|
||||
|
||||
onComplete (rawTrailers) {
|
||||
this.retryCount = 0
|
||||
return this.handler.onComplete(rawTrailers)
|
||||
}
|
||||
|
||||
onError (err) {
|
||||
if (this.aborted || isDisturbed(this.opts.body)) {
|
||||
return this.handler.onError(err)
|
||||
}
|
||||
|
||||
this.retryOpts.retry(
|
||||
err,
|
||||
{
|
||||
state: { counter: this.retryCount++, currentTimeout: this.retryAfter },
|
||||
opts: { retryOptions: this.retryOpts, ...this.opts }
|
||||
},
|
||||
onRetry.bind(this)
|
||||
)
|
||||
|
||||
function onRetry (err) {
|
||||
if (err != null || this.aborted || isDisturbed(this.opts.body)) {
|
||||
return this.handler.onError(err)
|
||||
}
|
||||
|
||||
if (this.start !== 0) {
|
||||
this.opts = {
|
||||
...this.opts,
|
||||
headers: {
|
||||
...this.opts.headers,
|
||||
range: `bytes=${this.start}-${this.end ?? ''}`
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
this.dispatch(this.opts, this)
|
||||
} catch (err) {
|
||||
this.handler.onError(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RetryHandler
|
448
node_modules/undici/lib/handler/cache-handler.js
generated
vendored
Normal file
448
node_modules/undici/lib/handler/cache-handler.js
generated
vendored
Normal file
@ -0,0 +1,448 @@
|
||||
'use strict'
|
||||
|
||||
const util = require('../core/util')
|
||||
const {
|
||||
parseCacheControlHeader,
|
||||
parseVaryHeader,
|
||||
isEtagUsable
|
||||
} = require('../util/cache')
|
||||
const { parseHttpDate } = require('../util/date.js')
|
||||
|
||||
function noop () {}
|
||||
|
||||
// Status codes that we can use some heuristics on to cache
|
||||
const HEURISTICALLY_CACHEABLE_STATUS_CODES = [
|
||||
200, 203, 204, 206, 300, 301, 308, 404, 405, 410, 414, 501
|
||||
]
|
||||
|
||||
const MAX_RESPONSE_AGE = 2147483647000
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/dispatcher.d.ts').default.DispatchHandler} DispatchHandler
|
||||
*
|
||||
* @implements {DispatchHandler}
|
||||
*/
|
||||
class CacheHandler {
|
||||
/**
|
||||
* @type {import('../../types/cache-interceptor.d.ts').default.CacheKey}
|
||||
*/
|
||||
#cacheKey
|
||||
|
||||
/**
|
||||
* @type {import('../../types/cache-interceptor.d.ts').default.CacheHandlerOptions['type']}
|
||||
*/
|
||||
#cacheType
|
||||
|
||||
/**
|
||||
* @type {number | undefined}
|
||||
*/
|
||||
#cacheByDefault
|
||||
|
||||
/**
|
||||
* @type {import('../../types/cache-interceptor.d.ts').default.CacheStore}
|
||||
*/
|
||||
#store
|
||||
|
||||
/**
|
||||
* @type {import('../../types/dispatcher.d.ts').default.DispatchHandler}
|
||||
*/
|
||||
#handler
|
||||
|
||||
/**
|
||||
* @type {import('node:stream').Writable | undefined}
|
||||
*/
|
||||
#writeStream
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheHandlerOptions} opts
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} cacheKey
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchHandler} handler
|
||||
*/
|
||||
constructor ({ store, type, cacheByDefault }, cacheKey, handler) {
|
||||
this.#store = store
|
||||
this.#cacheType = type
|
||||
this.#cacheByDefault = cacheByDefault
|
||||
this.#cacheKey = cacheKey
|
||||
this.#handler = handler
|
||||
}
|
||||
|
||||
onRequestStart (controller, context) {
|
||||
this.#writeStream?.destroy()
|
||||
this.#writeStream = undefined
|
||||
this.#handler.onRequestStart?.(controller, context)
|
||||
}
|
||||
|
||||
onRequestUpgrade (controller, statusCode, headers, socket) {
|
||||
this.#handler.onRequestUpgrade?.(controller, statusCode, headers, socket)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchController} controller
|
||||
* @param {number} statusCode
|
||||
* @param {import('../../types/header.d.ts').IncomingHttpHeaders} resHeaders
|
||||
* @param {string} statusMessage
|
||||
*/
|
||||
onResponseStart (
|
||||
controller,
|
||||
statusCode,
|
||||
resHeaders,
|
||||
statusMessage
|
||||
) {
|
||||
const downstreamOnHeaders = () =>
|
||||
this.#handler.onResponseStart?.(
|
||||
controller,
|
||||
statusCode,
|
||||
resHeaders,
|
||||
statusMessage
|
||||
)
|
||||
|
||||
if (
|
||||
!util.safeHTTPMethods.includes(this.#cacheKey.method) &&
|
||||
statusCode >= 200 &&
|
||||
statusCode <= 399
|
||||
) {
|
||||
// Successful response to an unsafe method, delete it from cache
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#name-invalidating-stored-response
|
||||
try {
|
||||
this.#store.delete(this.#cacheKey)?.catch?.(noop)
|
||||
} catch {
|
||||
// Fail silently
|
||||
}
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
|
||||
const cacheControlHeader = resHeaders['cache-control']
|
||||
const heuristicallyCacheable = resHeaders['last-modified'] && HEURISTICALLY_CACHEABLE_STATUS_CODES.includes(statusCode)
|
||||
if (
|
||||
!cacheControlHeader &&
|
||||
!resHeaders['expires'] &&
|
||||
!heuristicallyCacheable &&
|
||||
!this.#cacheByDefault
|
||||
) {
|
||||
// Don't have anything to tell us this response is cachable and we're not
|
||||
// caching by default
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
|
||||
const cacheControlDirectives = cacheControlHeader ? parseCacheControlHeader(cacheControlHeader) : {}
|
||||
if (!canCacheResponse(this.#cacheType, statusCode, resHeaders, cacheControlDirectives)) {
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
|
||||
const now = Date.now()
|
||||
const resAge = resHeaders.age ? getAge(resHeaders.age) : undefined
|
||||
if (resAge && resAge >= MAX_RESPONSE_AGE) {
|
||||
// Response considered stale
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
|
||||
const resDate = typeof resHeaders.date === 'string'
|
||||
? parseHttpDate(resHeaders.date)
|
||||
: undefined
|
||||
|
||||
const staleAt =
|
||||
determineStaleAt(this.#cacheType, now, resAge, resHeaders, resDate, cacheControlDirectives) ??
|
||||
this.#cacheByDefault
|
||||
if (staleAt === undefined || (resAge && resAge > staleAt)) {
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
|
||||
const baseTime = resDate ? resDate.getTime() : now
|
||||
const absoluteStaleAt = staleAt + baseTime
|
||||
if (now >= absoluteStaleAt) {
|
||||
// Response is already stale
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
|
||||
let varyDirectives
|
||||
if (this.#cacheKey.headers && resHeaders.vary) {
|
||||
varyDirectives = parseVaryHeader(resHeaders.vary, this.#cacheKey.headers)
|
||||
if (!varyDirectives) {
|
||||
// Parse error
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
}
|
||||
|
||||
const deleteAt = determineDeleteAt(baseTime, cacheControlDirectives, absoluteStaleAt)
|
||||
const strippedHeaders = stripNecessaryHeaders(resHeaders, cacheControlDirectives)
|
||||
|
||||
/**
|
||||
* @type {import('../../types/cache-interceptor.d.ts').default.CacheValue}
|
||||
*/
|
||||
const value = {
|
||||
statusCode,
|
||||
statusMessage,
|
||||
headers: strippedHeaders,
|
||||
vary: varyDirectives,
|
||||
cacheControlDirectives,
|
||||
cachedAt: resAge ? now - resAge : now,
|
||||
staleAt: absoluteStaleAt,
|
||||
deleteAt
|
||||
}
|
||||
|
||||
if (typeof resHeaders.etag === 'string' && isEtagUsable(resHeaders.etag)) {
|
||||
value.etag = resHeaders.etag
|
||||
}
|
||||
|
||||
this.#writeStream = this.#store.createWriteStream(this.#cacheKey, value)
|
||||
if (!this.#writeStream) {
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
|
||||
const handler = this
|
||||
this.#writeStream
|
||||
.on('drain', () => controller.resume())
|
||||
.on('error', function () {
|
||||
// TODO (fix): Make error somehow observable?
|
||||
handler.#writeStream = undefined
|
||||
|
||||
// Delete the value in case the cache store is holding onto state from
|
||||
// the call to createWriteStream
|
||||
handler.#store.delete(handler.#cacheKey)
|
||||
})
|
||||
.on('close', function () {
|
||||
if (handler.#writeStream === this) {
|
||||
handler.#writeStream = undefined
|
||||
}
|
||||
|
||||
// TODO (fix): Should we resume even if was paused downstream?
|
||||
controller.resume()
|
||||
})
|
||||
|
||||
return downstreamOnHeaders()
|
||||
}
|
||||
|
||||
onResponseData (controller, chunk) {
|
||||
if (this.#writeStream?.write(chunk) === false) {
|
||||
controller.pause()
|
||||
}
|
||||
|
||||
this.#handler.onResponseData?.(controller, chunk)
|
||||
}
|
||||
|
||||
onResponseEnd (controller, trailers) {
|
||||
this.#writeStream?.end()
|
||||
this.#handler.onResponseEnd?.(controller, trailers)
|
||||
}
|
||||
|
||||
onResponseError (controller, err) {
|
||||
this.#writeStream?.destroy(err)
|
||||
this.#writeStream = undefined
|
||||
this.#handler.onResponseError?.(controller, err)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://www.rfc-editor.org/rfc/rfc9111.html#name-storing-responses-to-authen
|
||||
*
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheOptions['type']} cacheType
|
||||
* @param {number} statusCode
|
||||
* @param {import('../../types/header.d.ts').IncomingHttpHeaders} resHeaders
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives} cacheControlDirectives
|
||||
*/
|
||||
function canCacheResponse (cacheType, statusCode, resHeaders, cacheControlDirectives) {
|
||||
if (statusCode !== 200 && statusCode !== 307) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (cacheControlDirectives['no-store']) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (cacheType === 'shared' && cacheControlDirectives.private === true) {
|
||||
return false
|
||||
}
|
||||
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#section-4.1-5
|
||||
if (resHeaders.vary?.includes('*')) {
|
||||
return false
|
||||
}
|
||||
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#name-storing-responses-to-authen
|
||||
if (resHeaders.authorization) {
|
||||
if (!cacheControlDirectives.public || typeof resHeaders.authorization !== 'string') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (
|
||||
Array.isArray(cacheControlDirectives['no-cache']) &&
|
||||
cacheControlDirectives['no-cache'].includes('authorization')
|
||||
) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (
|
||||
Array.isArray(cacheControlDirectives['private']) &&
|
||||
cacheControlDirectives['private'].includes('authorization')
|
||||
) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string | string[]} ageHeader
|
||||
* @returns {number | undefined}
|
||||
*/
|
||||
function getAge (ageHeader) {
|
||||
const age = parseInt(Array.isArray(ageHeader) ? ageHeader[0] : ageHeader)
|
||||
|
||||
return isNaN(age) ? undefined : age * 1000
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheOptions['type']} cacheType
|
||||
* @param {number} now
|
||||
* @param {number | undefined} age
|
||||
* @param {import('../../types/header.d.ts').IncomingHttpHeaders} resHeaders
|
||||
* @param {Date | undefined} responseDate
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives} cacheControlDirectives
|
||||
*
|
||||
* @returns {number | undefined} time that the value is stale at in seconds or undefined if it shouldn't be cached
|
||||
*/
|
||||
function determineStaleAt (cacheType, now, age, resHeaders, responseDate, cacheControlDirectives) {
|
||||
if (cacheType === 'shared') {
|
||||
// Prioritize s-maxage since we're a shared cache
|
||||
// s-maxage > max-age > Expire
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.2.10-3
|
||||
const sMaxAge = cacheControlDirectives['s-maxage']
|
||||
if (sMaxAge !== undefined) {
|
||||
return sMaxAge > 0 ? sMaxAge * 1000 : undefined
|
||||
}
|
||||
}
|
||||
|
||||
const maxAge = cacheControlDirectives['max-age']
|
||||
if (maxAge !== undefined) {
|
||||
return maxAge > 0 ? maxAge * 1000 : undefined
|
||||
}
|
||||
|
||||
if (typeof resHeaders.expires === 'string') {
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#section-5.3
|
||||
const expiresDate = parseHttpDate(resHeaders.expires)
|
||||
if (expiresDate) {
|
||||
if (now >= expiresDate.getTime()) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (responseDate) {
|
||||
if (responseDate >= expiresDate) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (age !== undefined && age > (expiresDate - responseDate)) {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
return expiresDate.getTime() - now
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof resHeaders['last-modified'] === 'string') {
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#name-calculating-heuristic-fresh
|
||||
const lastModified = new Date(resHeaders['last-modified'])
|
||||
if (isValidDate(lastModified)) {
|
||||
if (lastModified.getTime() >= now) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const responseAge = now - lastModified.getTime()
|
||||
|
||||
return responseAge * 0.1
|
||||
}
|
||||
}
|
||||
|
||||
if (cacheControlDirectives.immutable) {
|
||||
// https://www.rfc-editor.org/rfc/rfc8246.html#section-2.2
|
||||
return 31536000
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} now
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives} cacheControlDirectives
|
||||
* @param {number} staleAt
|
||||
*/
|
||||
function determineDeleteAt (now, cacheControlDirectives, staleAt) {
|
||||
let staleWhileRevalidate = -Infinity
|
||||
let staleIfError = -Infinity
|
||||
let immutable = -Infinity
|
||||
|
||||
if (cacheControlDirectives['stale-while-revalidate']) {
|
||||
staleWhileRevalidate = staleAt + (cacheControlDirectives['stale-while-revalidate'] * 1000)
|
||||
}
|
||||
|
||||
if (cacheControlDirectives['stale-if-error']) {
|
||||
staleIfError = staleAt + (cacheControlDirectives['stale-if-error'] * 1000)
|
||||
}
|
||||
|
||||
if (staleWhileRevalidate === -Infinity && staleIfError === -Infinity) {
|
||||
immutable = now + 31536000000
|
||||
}
|
||||
|
||||
return Math.max(staleAt, staleWhileRevalidate, staleIfError, immutable)
|
||||
}
|
||||
|
||||
/**
|
||||
* Strips headers required to be removed in cached responses
|
||||
* @param {import('../../types/header.d.ts').IncomingHttpHeaders} resHeaders
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives} cacheControlDirectives
|
||||
* @returns {Record<string, string | string []>}
|
||||
*/
|
||||
function stripNecessaryHeaders (resHeaders, cacheControlDirectives) {
|
||||
const headersToRemove = [
|
||||
'connection',
|
||||
'proxy-authenticate',
|
||||
'proxy-authentication-info',
|
||||
'proxy-authorization',
|
||||
'proxy-connection',
|
||||
'te',
|
||||
'transfer-encoding',
|
||||
'upgrade',
|
||||
// We'll add age back when serving it
|
||||
'age'
|
||||
]
|
||||
|
||||
if (resHeaders['connection']) {
|
||||
if (Array.isArray(resHeaders['connection'])) {
|
||||
// connection: a
|
||||
// connection: b
|
||||
headersToRemove.push(...resHeaders['connection'].map(header => header.trim()))
|
||||
} else {
|
||||
// connection: a, b
|
||||
headersToRemove.push(...resHeaders['connection'].split(',').map(header => header.trim()))
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(cacheControlDirectives['no-cache'])) {
|
||||
headersToRemove.push(...cacheControlDirectives['no-cache'])
|
||||
}
|
||||
|
||||
if (Array.isArray(cacheControlDirectives['private'])) {
|
||||
headersToRemove.push(...cacheControlDirectives['private'])
|
||||
}
|
||||
|
||||
let strippedHeaders
|
||||
for (const headerName of headersToRemove) {
|
||||
if (resHeaders[headerName]) {
|
||||
strippedHeaders ??= { ...resHeaders }
|
||||
delete strippedHeaders[headerName]
|
||||
}
|
||||
}
|
||||
|
||||
return strippedHeaders ?? resHeaders
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Date} date
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isValidDate (date) {
|
||||
return date instanceof Date && Number.isFinite(date.valueOf())
|
||||
}
|
||||
|
||||
module.exports = CacheHandler
|
124
node_modules/undici/lib/handler/cache-revalidation-handler.js
generated
vendored
Normal file
124
node_modules/undici/lib/handler/cache-revalidation-handler.js
generated
vendored
Normal file
@ -0,0 +1,124 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
|
||||
/**
|
||||
* This takes care of revalidation requests we send to the origin. If we get
|
||||
* a response indicating that what we have is cached (via a HTTP 304), we can
|
||||
* continue using the cached value. Otherwise, we'll receive the new response
|
||||
* here, which we then just pass on to the next handler (most likely a
|
||||
* CacheHandler). Note that this assumes the proper headers were already
|
||||
* included in the request to tell the origin that we want to revalidate the
|
||||
* response (i.e. if-modified-since or if-none-match).
|
||||
*
|
||||
* @see https://www.rfc-editor.org/rfc/rfc9111.html#name-validation
|
||||
*
|
||||
* @implements {import('../../types/dispatcher.d.ts').default.DispatchHandler}
|
||||
*/
|
||||
class CacheRevalidationHandler {
|
||||
#successful = false
|
||||
|
||||
/**
|
||||
* @type {((boolean, any) => void) | null}
|
||||
*/
|
||||
#callback
|
||||
|
||||
/**
|
||||
* @type {(import('../../types/dispatcher.d.ts').default.DispatchHandler)}
|
||||
*/
|
||||
#handler
|
||||
|
||||
#context
|
||||
|
||||
/**
|
||||
* @type {boolean}
|
||||
*/
|
||||
#allowErrorStatusCodes
|
||||
|
||||
/**
|
||||
* @param {(boolean) => void} callback Function to call if the cached value is valid
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchHandlers} handler
|
||||
* @param {boolean} allowErrorStatusCodes
|
||||
*/
|
||||
constructor (callback, handler, allowErrorStatusCodes) {
|
||||
if (typeof callback !== 'function') {
|
||||
throw new TypeError('callback must be a function')
|
||||
}
|
||||
|
||||
this.#callback = callback
|
||||
this.#handler = handler
|
||||
this.#allowErrorStatusCodes = allowErrorStatusCodes
|
||||
}
|
||||
|
||||
onRequestStart (_, context) {
|
||||
this.#successful = false
|
||||
this.#context = context
|
||||
}
|
||||
|
||||
onRequestUpgrade (controller, statusCode, headers, socket) {
|
||||
this.#handler.onRequestUpgrade?.(controller, statusCode, headers, socket)
|
||||
}
|
||||
|
||||
onResponseStart (
|
||||
controller,
|
||||
statusCode,
|
||||
headers,
|
||||
statusMessage
|
||||
) {
|
||||
assert(this.#callback != null)
|
||||
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#name-handling-a-validation-respo
|
||||
// https://datatracker.ietf.org/doc/html/rfc5861#section-4
|
||||
this.#successful = statusCode === 304 ||
|
||||
(this.#allowErrorStatusCodes && statusCode >= 500 && statusCode <= 504)
|
||||
this.#callback(this.#successful, this.#context)
|
||||
this.#callback = null
|
||||
|
||||
if (this.#successful) {
|
||||
return true
|
||||
}
|
||||
|
||||
this.#handler.onRequestStart?.(controller, this.#context)
|
||||
this.#handler.onResponseStart?.(
|
||||
controller,
|
||||
statusCode,
|
||||
headers,
|
||||
statusMessage
|
||||
)
|
||||
}
|
||||
|
||||
onResponseData (controller, chunk) {
|
||||
if (this.#successful) {
|
||||
return
|
||||
}
|
||||
|
||||
return this.#handler.onResponseData?.(controller, chunk)
|
||||
}
|
||||
|
||||
onResponseEnd (controller, trailers) {
|
||||
if (this.#successful) {
|
||||
return
|
||||
}
|
||||
|
||||
this.#handler.onResponseEnd?.(controller, trailers)
|
||||
}
|
||||
|
||||
onResponseError (controller, err) {
|
||||
if (this.#successful) {
|
||||
return
|
||||
}
|
||||
|
||||
if (this.#callback) {
|
||||
this.#callback(false)
|
||||
this.#callback = null
|
||||
}
|
||||
|
||||
if (typeof this.#handler.onResponseError === 'function') {
|
||||
this.#handler.onResponseError(controller, err)
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CacheRevalidationHandler
|
67
node_modules/undici/lib/handler/decorator-handler.js
generated
vendored
Normal file
67
node_modules/undici/lib/handler/decorator-handler.js
generated
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const WrapHandler = require('./wrap-handler')
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
module.exports = class DecoratorHandler {
|
||||
#handler
|
||||
#onCompleteCalled = false
|
||||
#onErrorCalled = false
|
||||
#onResponseStartCalled = false
|
||||
|
||||
constructor (handler) {
|
||||
if (typeof handler !== 'object' || handler === null) {
|
||||
throw new TypeError('handler must be an object')
|
||||
}
|
||||
this.#handler = WrapHandler.wrap(handler)
|
||||
}
|
||||
|
||||
onRequestStart (...args) {
|
||||
this.#handler.onRequestStart?.(...args)
|
||||
}
|
||||
|
||||
onRequestUpgrade (...args) {
|
||||
assert(!this.#onCompleteCalled)
|
||||
assert(!this.#onErrorCalled)
|
||||
|
||||
return this.#handler.onRequestUpgrade?.(...args)
|
||||
}
|
||||
|
||||
onResponseStart (...args) {
|
||||
assert(!this.#onCompleteCalled)
|
||||
assert(!this.#onErrorCalled)
|
||||
assert(!this.#onResponseStartCalled)
|
||||
|
||||
this.#onResponseStartCalled = true
|
||||
|
||||
return this.#handler.onResponseStart?.(...args)
|
||||
}
|
||||
|
||||
onResponseData (...args) {
|
||||
assert(!this.#onCompleteCalled)
|
||||
assert(!this.#onErrorCalled)
|
||||
|
||||
return this.#handler.onResponseData?.(...args)
|
||||
}
|
||||
|
||||
onResponseEnd (...args) {
|
||||
assert(!this.#onCompleteCalled)
|
||||
assert(!this.#onErrorCalled)
|
||||
|
||||
this.#onCompleteCalled = true
|
||||
return this.#handler.onResponseEnd?.(...args)
|
||||
}
|
||||
|
||||
onResponseError (...args) {
|
||||
this.#onErrorCalled = true
|
||||
return this.#handler.onResponseError?.(...args)
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
onBodySent () {}
|
||||
}
|
@ -2,14 +2,16 @@
|
||||
|
||||
const util = require('../core/util')
|
||||
const { kBodyUsed } = require('../core/symbols')
|
||||
const assert = require('assert')
|
||||
const assert = require('node:assert')
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
const EE = require('events')
|
||||
const EE = require('node:events')
|
||||
|
||||
const redirectableStatusCodes = [300, 301, 302, 303, 307, 308]
|
||||
|
||||
const kBody = Symbol('body')
|
||||
|
||||
const noop = () => {}
|
||||
|
||||
class BodyAsyncIterable {
|
||||
constructor (body) {
|
||||
this[kBody] = body
|
||||
@ -24,16 +26,22 @@ class BodyAsyncIterable {
|
||||
}
|
||||
|
||||
class RedirectHandler {
|
||||
static buildDispatch (dispatcher, maxRedirections) {
|
||||
if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {
|
||||
throw new InvalidArgumentError('maxRedirections must be a positive number')
|
||||
}
|
||||
|
||||
const dispatch = dispatcher.dispatch.bind(dispatcher)
|
||||
return (opts, originalHandler) => dispatch(opts, new RedirectHandler(dispatch, maxRedirections, opts, originalHandler))
|
||||
}
|
||||
|
||||
constructor (dispatch, maxRedirections, opts, handler) {
|
||||
if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {
|
||||
throw new InvalidArgumentError('maxRedirections must be a positive number')
|
||||
}
|
||||
|
||||
util.validateHandler(handler, opts.method, opts.upgrade)
|
||||
|
||||
this.dispatch = dispatch
|
||||
this.location = null
|
||||
this.abort = null
|
||||
this.opts = { ...opts, maxRedirections: 0 } // opts must be a copy
|
||||
this.maxRedirections = maxRedirections
|
||||
this.handler = handler
|
||||
@ -65,7 +73,8 @@ class RedirectHandler {
|
||||
this.opts.body &&
|
||||
typeof this.opts.body !== 'string' &&
|
||||
!ArrayBuffer.isView(this.opts.body) &&
|
||||
util.isIterable(this.opts.body)
|
||||
util.isIterable(this.opts.body) &&
|
||||
!util.isFormDataLike(this.opts.body)
|
||||
) {
|
||||
// TODO: Should we allow re-using iterable if !this.opts.idempotent
|
||||
// or through some other flag?
|
||||
@ -73,30 +82,51 @@ class RedirectHandler {
|
||||
}
|
||||
}
|
||||
|
||||
onConnect (abort) {
|
||||
this.abort = abort
|
||||
this.handler.onConnect(abort, { history: this.history })
|
||||
onRequestStart (controller, context) {
|
||||
this.handler.onRequestStart?.(controller, { ...context, history: this.history })
|
||||
}
|
||||
|
||||
onUpgrade (statusCode, headers, socket) {
|
||||
this.handler.onUpgrade(statusCode, headers, socket)
|
||||
onRequestUpgrade (controller, statusCode, headers, socket) {
|
||||
this.handler.onRequestUpgrade?.(controller, statusCode, headers, socket)
|
||||
}
|
||||
|
||||
onError (error) {
|
||||
this.handler.onError(error)
|
||||
}
|
||||
onResponseStart (controller, statusCode, headers, statusMessage) {
|
||||
if (this.opts.throwOnMaxRedirect && this.history.length >= this.maxRedirections) {
|
||||
throw new Error('max redirects')
|
||||
}
|
||||
|
||||
onHeaders (statusCode, headers, resume, statusText) {
|
||||
this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body)
|
||||
// https://tools.ietf.org/html/rfc7231#section-6.4.2
|
||||
// https://fetch.spec.whatwg.org/#http-redirect-fetch
|
||||
// In case of HTTP 301 or 302 with POST, change the method to GET
|
||||
if ((statusCode === 301 || statusCode === 302) && this.opts.method === 'POST') {
|
||||
this.opts.method = 'GET'
|
||||
if (util.isStream(this.opts.body)) {
|
||||
util.destroy(this.opts.body.on('error', noop))
|
||||
}
|
||||
this.opts.body = null
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc7231#section-6.4.4
|
||||
// In case of HTTP 303, always replace method to be either HEAD or GET
|
||||
if (statusCode === 303 && this.opts.method !== 'HEAD') {
|
||||
this.opts.method = 'GET'
|
||||
if (util.isStream(this.opts.body)) {
|
||||
util.destroy(this.opts.body.on('error', noop))
|
||||
}
|
||||
this.opts.body = null
|
||||
}
|
||||
|
||||
this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body) || redirectableStatusCodes.indexOf(statusCode) === -1
|
||||
? null
|
||||
: parseLocation(statusCode, headers)
|
||||
: headers.location
|
||||
|
||||
if (this.opts.origin) {
|
||||
this.history.push(new URL(this.opts.path, this.opts.origin))
|
||||
}
|
||||
|
||||
if (!this.location) {
|
||||
return this.handler.onHeaders(statusCode, headers, resume, statusText)
|
||||
this.handler.onResponseStart?.(controller, statusCode, headers, statusMessage)
|
||||
return
|
||||
}
|
||||
|
||||
const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)))
|
||||
@ -110,23 +140,16 @@ class RedirectHandler {
|
||||
this.opts.origin = origin
|
||||
this.opts.maxRedirections = 0
|
||||
this.opts.query = null
|
||||
|
||||
// https://tools.ietf.org/html/rfc7231#section-6.4.4
|
||||
// In case of HTTP 303, always replace method to be either HEAD or GET
|
||||
if (statusCode === 303 && this.opts.method !== 'HEAD') {
|
||||
this.opts.method = 'GET'
|
||||
this.opts.body = null
|
||||
}
|
||||
}
|
||||
|
||||
onData (chunk) {
|
||||
onResponseData (controller, chunk) {
|
||||
if (this.location) {
|
||||
/*
|
||||
https://tools.ietf.org/html/rfc7231#section-6.4
|
||||
|
||||
TLDR: undici always ignores 3xx response bodies.
|
||||
|
||||
Redirection is used to serve the requested resource from another URL, so it is assumes that
|
||||
Redirection is used to serve the requested resource from another URL, so it assumes that
|
||||
no body is generated (and thus can be ignored). Even though generating a body is not prohibited.
|
||||
|
||||
For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually
|
||||
@ -135,15 +158,15 @@ class RedirectHandler {
|
||||
|
||||
For status 300, which is "Multiple Choices", the spec mentions both generating a Location
|
||||
response header AND a response body with the other possible location to follow.
|
||||
Since the spec explicitily chooses not to specify a format for such body and leave it to
|
||||
Since the spec explicitly chooses not to specify a format for such body and leave it to
|
||||
servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it.
|
||||
*/
|
||||
} else {
|
||||
return this.handler.onData(chunk)
|
||||
this.handler.onResponseData?.(controller, chunk)
|
||||
}
|
||||
}
|
||||
|
||||
onComplete (trailers) {
|
||||
onResponseEnd (controller, trailers) {
|
||||
if (this.location) {
|
||||
/*
|
||||
https://tools.ietf.org/html/rfc7231#section-6.4
|
||||
@ -151,34 +174,16 @@ class RedirectHandler {
|
||||
TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections
|
||||
and neither are useful if present.
|
||||
|
||||
See comment on onData method above for more detailed informations.
|
||||
See comment on onData method above for more detailed information.
|
||||
*/
|
||||
|
||||
this.location = null
|
||||
this.abort = null
|
||||
|
||||
this.dispatch(this.opts, this)
|
||||
} else {
|
||||
this.handler.onComplete(trailers)
|
||||
this.handler.onResponseEnd(controller, trailers)
|
||||
}
|
||||
}
|
||||
|
||||
onBodySent (chunk) {
|
||||
if (this.handler.onBodySent) {
|
||||
this.handler.onBodySent(chunk)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function parseLocation (statusCode, headers) {
|
||||
if (redirectableStatusCodes.indexOf(statusCode) === -1) {
|
||||
return null
|
||||
}
|
||||
|
||||
for (let i = 0; i < headers.length; i += 2) {
|
||||
if (headers[i].toString().toLowerCase() === 'location') {
|
||||
return headers[i + 1]
|
||||
}
|
||||
onResponseError (controller, error) {
|
||||
this.handler.onResponseError?.(controller, error)
|
||||
}
|
||||
}
|
||||
|
||||
@ -207,9 +212,10 @@ function cleanRequestHeaders (headers, removeContent, unknownOrigin) {
|
||||
}
|
||||
}
|
||||
} else if (headers && typeof headers === 'object') {
|
||||
for (const key of Object.keys(headers)) {
|
||||
const entries = typeof headers[Symbol.iterator] === 'function' ? headers : Object.entries(headers)
|
||||
for (const [key, value] of entries) {
|
||||
if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) {
|
||||
ret.push(key, headers[key])
|
||||
ret.push(key, value)
|
||||
}
|
||||
}
|
||||
} else {
|
342
node_modules/undici/lib/handler/retry-handler.js
generated
vendored
Normal file
342
node_modules/undici/lib/handler/retry-handler.js
generated
vendored
Normal file
@ -0,0 +1,342 @@
|
||||
'use strict'
|
||||
const assert = require('node:assert')
|
||||
|
||||
const { kRetryHandlerDefaultRetry } = require('../core/symbols')
|
||||
const { RequestRetryError } = require('../core/errors')
|
||||
const WrapHandler = require('./wrap-handler')
|
||||
const {
|
||||
isDisturbed,
|
||||
parseRangeHeader,
|
||||
wrapRequestBody
|
||||
} = require('../core/util')
|
||||
|
||||
function calculateRetryAfterHeader (retryAfter) {
|
||||
const retryTime = new Date(retryAfter).getTime()
|
||||
return isNaN(retryTime) ? 0 : retryTime - Date.now()
|
||||
}
|
||||
|
||||
class RetryHandler {
|
||||
constructor (opts, { dispatch, handler }) {
|
||||
const { retryOptions, ...dispatchOpts } = opts
|
||||
const {
|
||||
// Retry scoped
|
||||
retry: retryFn,
|
||||
maxRetries,
|
||||
maxTimeout,
|
||||
minTimeout,
|
||||
timeoutFactor,
|
||||
// Response scoped
|
||||
methods,
|
||||
errorCodes,
|
||||
retryAfter,
|
||||
statusCodes
|
||||
} = retryOptions ?? {}
|
||||
|
||||
this.dispatch = dispatch
|
||||
this.handler = WrapHandler.wrap(handler)
|
||||
this.opts = { ...dispatchOpts, body: wrapRequestBody(opts.body) }
|
||||
this.retryOpts = {
|
||||
retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry],
|
||||
retryAfter: retryAfter ?? true,
|
||||
maxTimeout: maxTimeout ?? 30 * 1000, // 30s,
|
||||
minTimeout: minTimeout ?? 500, // .5s
|
||||
timeoutFactor: timeoutFactor ?? 2,
|
||||
maxRetries: maxRetries ?? 5,
|
||||
// What errors we should retry
|
||||
methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'],
|
||||
// Indicates which errors to retry
|
||||
statusCodes: statusCodes ?? [500, 502, 503, 504, 429],
|
||||
// List of errors to retry
|
||||
errorCodes: errorCodes ?? [
|
||||
'ECONNRESET',
|
||||
'ECONNREFUSED',
|
||||
'ENOTFOUND',
|
||||
'ENETDOWN',
|
||||
'ENETUNREACH',
|
||||
'EHOSTDOWN',
|
||||
'EHOSTUNREACH',
|
||||
'EPIPE',
|
||||
'UND_ERR_SOCKET'
|
||||
]
|
||||
}
|
||||
|
||||
this.retryCount = 0
|
||||
this.retryCountCheckpoint = 0
|
||||
this.headersSent = false
|
||||
this.start = 0
|
||||
this.end = null
|
||||
this.etag = null
|
||||
}
|
||||
|
||||
onRequestStart (controller, context) {
|
||||
if (!this.headersSent) {
|
||||
this.handler.onRequestStart?.(controller, context)
|
||||
}
|
||||
}
|
||||
|
||||
onRequestUpgrade (controller, statusCode, headers, socket) {
|
||||
this.handler.onRequestUpgrade?.(controller, statusCode, headers, socket)
|
||||
}
|
||||
|
||||
static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) {
|
||||
const { statusCode, code, headers } = err
|
||||
const { method, retryOptions } = opts
|
||||
const {
|
||||
maxRetries,
|
||||
minTimeout,
|
||||
maxTimeout,
|
||||
timeoutFactor,
|
||||
statusCodes,
|
||||
errorCodes,
|
||||
methods
|
||||
} = retryOptions
|
||||
const { counter } = state
|
||||
|
||||
// Any code that is not a Undici's originated and allowed to retry
|
||||
if (code && code !== 'UND_ERR_REQ_RETRY' && !errorCodes.includes(code)) {
|
||||
cb(err)
|
||||
return
|
||||
}
|
||||
|
||||
// If a set of method are provided and the current method is not in the list
|
||||
if (Array.isArray(methods) && !methods.includes(method)) {
|
||||
cb(err)
|
||||
return
|
||||
}
|
||||
|
||||
// If a set of status code are provided and the current status code is not in the list
|
||||
if (
|
||||
statusCode != null &&
|
||||
Array.isArray(statusCodes) &&
|
||||
!statusCodes.includes(statusCode)
|
||||
) {
|
||||
cb(err)
|
||||
return
|
||||
}
|
||||
|
||||
// If we reached the max number of retries
|
||||
if (counter > maxRetries) {
|
||||
cb(err)
|
||||
return
|
||||
}
|
||||
|
||||
let retryAfterHeader = headers?.['retry-after']
|
||||
if (retryAfterHeader) {
|
||||
retryAfterHeader = Number(retryAfterHeader)
|
||||
retryAfterHeader = Number.isNaN(retryAfterHeader)
|
||||
? calculateRetryAfterHeader(headers['retry-after'])
|
||||
: retryAfterHeader * 1e3 // Retry-After is in seconds
|
||||
}
|
||||
|
||||
const retryTimeout =
|
||||
retryAfterHeader > 0
|
||||
? Math.min(retryAfterHeader, maxTimeout)
|
||||
: Math.min(minTimeout * timeoutFactor ** (counter - 1), maxTimeout)
|
||||
|
||||
setTimeout(() => cb(null), retryTimeout)
|
||||
}
|
||||
|
||||
onResponseStart (controller, statusCode, headers, statusMessage) {
|
||||
this.retryCount += 1
|
||||
|
||||
if (statusCode >= 300) {
|
||||
if (this.retryOpts.statusCodes.includes(statusCode) === false) {
|
||||
this.headersSent = true
|
||||
this.handler.onResponseStart?.(
|
||||
controller,
|
||||
statusCode,
|
||||
headers,
|
||||
statusMessage
|
||||
)
|
||||
return
|
||||
} else {
|
||||
throw new RequestRetryError('Request failed', statusCode, {
|
||||
headers,
|
||||
data: {
|
||||
count: this.retryCount
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Checkpoint for resume from where we left it
|
||||
if (this.headersSent) {
|
||||
// Only Partial Content 206 supposed to provide Content-Range,
|
||||
// any other status code that partially consumed the payload
|
||||
// should not be retried because it would result in downstream
|
||||
// wrongly concatenate multiple responses.
|
||||
if (statusCode !== 206 && (this.start > 0 || statusCode !== 200)) {
|
||||
throw new RequestRetryError('server does not support the range header and the payload was partially consumed', statusCode, {
|
||||
headers,
|
||||
data: { count: this.retryCount }
|
||||
})
|
||||
}
|
||||
|
||||
const contentRange = parseRangeHeader(headers['content-range'])
|
||||
// If no content range
|
||||
if (!contentRange) {
|
||||
throw new RequestRetryError('Content-Range mismatch', statusCode, {
|
||||
headers,
|
||||
data: { count: this.retryCount }
|
||||
})
|
||||
}
|
||||
|
||||
// Let's start with a weak etag check
|
||||
if (this.etag != null && this.etag !== headers.etag) {
|
||||
throw new RequestRetryError('ETag mismatch', statusCode, {
|
||||
headers,
|
||||
data: { count: this.retryCount }
|
||||
})
|
||||
}
|
||||
|
||||
const { start, size, end = size ? size - 1 : null } = contentRange
|
||||
|
||||
assert(this.start === start, 'content-range mismatch')
|
||||
assert(this.end == null || this.end === end, 'content-range mismatch')
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if (this.end == null) {
|
||||
if (statusCode === 206) {
|
||||
// First time we receive 206
|
||||
const range = parseRangeHeader(headers['content-range'])
|
||||
|
||||
if (range == null) {
|
||||
this.headersSent = true
|
||||
this.handler.onResponseStart?.(
|
||||
controller,
|
||||
statusCode,
|
||||
headers,
|
||||
statusMessage
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const { start, size, end = size ? size - 1 : null } = range
|
||||
assert(
|
||||
start != null && Number.isFinite(start),
|
||||
'content-range mismatch'
|
||||
)
|
||||
assert(end != null && Number.isFinite(end), 'invalid content-length')
|
||||
|
||||
this.start = start
|
||||
this.end = end
|
||||
}
|
||||
|
||||
// We make our best to checkpoint the body for further range headers
|
||||
if (this.end == null) {
|
||||
const contentLength = headers['content-length']
|
||||
this.end = contentLength != null ? Number(contentLength) - 1 : null
|
||||
}
|
||||
|
||||
assert(Number.isFinite(this.start))
|
||||
assert(
|
||||
this.end == null || Number.isFinite(this.end),
|
||||
'invalid content-length'
|
||||
)
|
||||
|
||||
this.resume = true
|
||||
this.etag = headers.etag != null ? headers.etag : null
|
||||
|
||||
// Weak etags are not useful for comparison nor cache
|
||||
// for instance not safe to assume if the response is byte-per-byte
|
||||
// equal
|
||||
if (
|
||||
this.etag != null &&
|
||||
this.etag[0] === 'W' &&
|
||||
this.etag[1] === '/'
|
||||
) {
|
||||
this.etag = null
|
||||
}
|
||||
|
||||
this.headersSent = true
|
||||
this.handler.onResponseStart?.(
|
||||
controller,
|
||||
statusCode,
|
||||
headers,
|
||||
statusMessage
|
||||
)
|
||||
} else {
|
||||
throw new RequestRetryError('Request failed', statusCode, {
|
||||
headers,
|
||||
data: { count: this.retryCount }
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
onResponseData (controller, chunk) {
|
||||
this.start += chunk.length
|
||||
|
||||
this.handler.onResponseData?.(controller, chunk)
|
||||
}
|
||||
|
||||
onResponseEnd (controller, trailers) {
|
||||
this.retryCount = 0
|
||||
return this.handler.onResponseEnd?.(controller, trailers)
|
||||
}
|
||||
|
||||
onResponseError (controller, err) {
|
||||
if (controller?.aborted || isDisturbed(this.opts.body)) {
|
||||
this.handler.onResponseError?.(controller, err)
|
||||
return
|
||||
}
|
||||
|
||||
// We reconcile in case of a mix between network errors
|
||||
// and server error response
|
||||
if (this.retryCount - this.retryCountCheckpoint > 0) {
|
||||
// We count the difference between the last checkpoint and the current retry count
|
||||
this.retryCount =
|
||||
this.retryCountCheckpoint +
|
||||
(this.retryCount - this.retryCountCheckpoint)
|
||||
} else {
|
||||
this.retryCount += 1
|
||||
}
|
||||
|
||||
this.retryOpts.retry(
|
||||
err,
|
||||
{
|
||||
state: { counter: this.retryCount },
|
||||
opts: { retryOptions: this.retryOpts, ...this.opts }
|
||||
},
|
||||
onRetry.bind(this)
|
||||
)
|
||||
|
||||
/**
|
||||
* @this {RetryHandler}
|
||||
* @param {Error} [err]
|
||||
* @returns
|
||||
*/
|
||||
function onRetry (err) {
|
||||
if (err != null || controller?.aborted || isDisturbed(this.opts.body)) {
|
||||
return this.handler.onResponseError?.(controller, err)
|
||||
}
|
||||
|
||||
if (this.start !== 0) {
|
||||
const headers = { range: `bytes=${this.start}-${this.end ?? ''}` }
|
||||
|
||||
// Weak etag check - weak etags will make comparison algorithms never match
|
||||
if (this.etag != null) {
|
||||
headers['if-match'] = this.etag
|
||||
}
|
||||
|
||||
this.opts = {
|
||||
...this.opts,
|
||||
headers: {
|
||||
...this.opts.headers,
|
||||
...headers
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
this.retryCountCheckpoint = this.retryCount
|
||||
this.dispatch(this.opts, this)
|
||||
} catch (err) {
|
||||
this.handler.onResponseError?.(controller, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RetryHandler
|
96
node_modules/undici/lib/handler/unwrap-handler.js
generated
vendored
Normal file
96
node_modules/undici/lib/handler/unwrap-handler.js
generated
vendored
Normal file
@ -0,0 +1,96 @@
|
||||
'use strict'
|
||||
|
||||
const { parseHeaders } = require('../core/util')
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
|
||||
const kResume = Symbol('resume')
|
||||
|
||||
class UnwrapController {
|
||||
#paused = false
|
||||
#reason = null
|
||||
#aborted = false
|
||||
#abort
|
||||
|
||||
[kResume] = null
|
||||
|
||||
constructor (abort) {
|
||||
this.#abort = abort
|
||||
}
|
||||
|
||||
pause () {
|
||||
this.#paused = true
|
||||
}
|
||||
|
||||
resume () {
|
||||
if (this.#paused) {
|
||||
this.#paused = false
|
||||
this[kResume]?.()
|
||||
}
|
||||
}
|
||||
|
||||
abort (reason) {
|
||||
if (!this.#aborted) {
|
||||
this.#aborted = true
|
||||
this.#reason = reason
|
||||
this.#abort(reason)
|
||||
}
|
||||
}
|
||||
|
||||
get aborted () {
|
||||
return this.#aborted
|
||||
}
|
||||
|
||||
get reason () {
|
||||
return this.#reason
|
||||
}
|
||||
|
||||
get paused () {
|
||||
return this.#paused
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = class UnwrapHandler {
|
||||
#handler
|
||||
#controller
|
||||
|
||||
constructor (handler) {
|
||||
this.#handler = handler
|
||||
}
|
||||
|
||||
static unwrap (handler) {
|
||||
// TODO (fix): More checks...
|
||||
return !handler.onRequestStart ? handler : new UnwrapHandler(handler)
|
||||
}
|
||||
|
||||
onConnect (abort, context) {
|
||||
this.#controller = new UnwrapController(abort)
|
||||
this.#handler.onRequestStart?.(this.#controller, context)
|
||||
}
|
||||
|
||||
onUpgrade (statusCode, rawHeaders, socket) {
|
||||
this.#handler.onRequestUpgrade?.(this.#controller, statusCode, parseHeaders(rawHeaders), socket)
|
||||
}
|
||||
|
||||
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
|
||||
this.#controller[kResume] = resume
|
||||
this.#handler.onResponseStart?.(this.#controller, statusCode, parseHeaders(rawHeaders), statusMessage)
|
||||
return !this.#controller.paused
|
||||
}
|
||||
|
||||
onData (data) {
|
||||
this.#handler.onResponseData?.(this.#controller, data)
|
||||
return !this.#controller.paused
|
||||
}
|
||||
|
||||
onComplete (rawTrailers) {
|
||||
this.#handler.onResponseEnd?.(this.#controller, parseHeaders(rawTrailers))
|
||||
}
|
||||
|
||||
onError (err) {
|
||||
if (!this.#handler.onResponseError) {
|
||||
throw new InvalidArgumentError('invalid onError method')
|
||||
}
|
||||
|
||||
this.#handler.onResponseError?.(this.#controller, err)
|
||||
}
|
||||
}
|
95
node_modules/undici/lib/handler/wrap-handler.js
generated
vendored
Normal file
95
node_modules/undici/lib/handler/wrap-handler.js
generated
vendored
Normal file
@ -0,0 +1,95 @@
|
||||
'use strict'
|
||||
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
|
||||
module.exports = class WrapHandler {
|
||||
#handler
|
||||
|
||||
constructor (handler) {
|
||||
this.#handler = handler
|
||||
}
|
||||
|
||||
static wrap (handler) {
|
||||
// TODO (fix): More checks...
|
||||
return handler.onRequestStart ? handler : new WrapHandler(handler)
|
||||
}
|
||||
|
||||
// Unwrap Interface
|
||||
|
||||
onConnect (abort, context) {
|
||||
return this.#handler.onConnect?.(abort, context)
|
||||
}
|
||||
|
||||
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
|
||||
return this.#handler.onHeaders?.(statusCode, rawHeaders, resume, statusMessage)
|
||||
}
|
||||
|
||||
onUpgrade (statusCode, rawHeaders, socket) {
|
||||
return this.#handler.onUpgrade?.(statusCode, rawHeaders, socket)
|
||||
}
|
||||
|
||||
onData (data) {
|
||||
return this.#handler.onData?.(data)
|
||||
}
|
||||
|
||||
onComplete (trailers) {
|
||||
return this.#handler.onComplete?.(trailers)
|
||||
}
|
||||
|
||||
onError (err) {
|
||||
if (!this.#handler.onError) {
|
||||
throw err
|
||||
}
|
||||
|
||||
return this.#handler.onError?.(err)
|
||||
}
|
||||
|
||||
// Wrap Interface
|
||||
|
||||
onRequestStart (controller, context) {
|
||||
this.#handler.onConnect?.((reason) => controller.abort(reason), context)
|
||||
}
|
||||
|
||||
onRequestUpgrade (controller, statusCode, headers, socket) {
|
||||
const rawHeaders = []
|
||||
for (const [key, val] of Object.entries(headers)) {
|
||||
rawHeaders.push(Buffer.from(key), Array.isArray(val) ? val.map(v => Buffer.from(v)) : Buffer.from(val))
|
||||
}
|
||||
|
||||
this.#handler.onUpgrade?.(statusCode, rawHeaders, socket)
|
||||
}
|
||||
|
||||
onResponseStart (controller, statusCode, headers, statusMessage) {
|
||||
const rawHeaders = []
|
||||
for (const [key, val] of Object.entries(headers)) {
|
||||
rawHeaders.push(Buffer.from(key), Array.isArray(val) ? val.map(v => Buffer.from(v)) : Buffer.from(val))
|
||||
}
|
||||
|
||||
if (this.#handler.onHeaders?.(statusCode, rawHeaders, () => controller.resume(), statusMessage) === false) {
|
||||
controller.pause()
|
||||
}
|
||||
}
|
||||
|
||||
onResponseData (controller, data) {
|
||||
if (this.#handler.onData?.(data) === false) {
|
||||
controller.pause()
|
||||
}
|
||||
}
|
||||
|
||||
onResponseEnd (controller, trailers) {
|
||||
const rawTrailers = []
|
||||
for (const [key, val] of Object.entries(trailers)) {
|
||||
rawTrailers.push(Buffer.from(key), Array.isArray(val) ? val.map(v => Buffer.from(v)) : Buffer.from(val))
|
||||
}
|
||||
|
||||
this.#handler.onComplete?.(rawTrailers)
|
||||
}
|
||||
|
||||
onResponseError (controller, err) {
|
||||
if (!this.#handler.onError) {
|
||||
throw new InvalidArgumentError('invalid onError method')
|
||||
}
|
||||
|
||||
this.#handler.onError?.(err)
|
||||
}
|
||||
}
|
362
node_modules/undici/lib/interceptor/cache.js
generated
vendored
Normal file
362
node_modules/undici/lib/interceptor/cache.js
generated
vendored
Normal file
@ -0,0 +1,362 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const { Readable } = require('node:stream')
|
||||
const util = require('../core/util')
|
||||
const CacheHandler = require('../handler/cache-handler')
|
||||
const MemoryCacheStore = require('../cache/memory-cache-store')
|
||||
const CacheRevalidationHandler = require('../handler/cache-revalidation-handler')
|
||||
const { assertCacheStore, assertCacheMethods, makeCacheKey, normaliseHeaders, parseCacheControlHeader } = require('../util/cache.js')
|
||||
const { AbortError } = require('../core/errors.js')
|
||||
|
||||
/**
|
||||
* @typedef {(options: import('../../types/dispatcher.d.ts').default.DispatchOptions, handler: import('../../types/dispatcher.d.ts').default.DispatchHandler) => void} DispatchFn
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.GetResult} result
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives | undefined} cacheControlDirectives
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function needsRevalidation (result, cacheControlDirectives) {
|
||||
if (cacheControlDirectives?.['no-cache']) {
|
||||
// Always revalidate requests with the no-cache directive
|
||||
return true
|
||||
}
|
||||
|
||||
const now = Date.now()
|
||||
if (now > result.staleAt) {
|
||||
// Response is stale
|
||||
if (cacheControlDirectives?.['max-stale']) {
|
||||
// There's a threshold where we can serve stale responses, let's see if
|
||||
// we're in it
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#name-max-stale
|
||||
const gracePeriod = result.staleAt + (cacheControlDirectives['max-stale'] * 1000)
|
||||
return now > gracePeriod
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
if (cacheControlDirectives?.['min-fresh']) {
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.1.3
|
||||
|
||||
// At this point, staleAt is always > now
|
||||
const timeLeftTillStale = result.staleAt - now
|
||||
const threshold = cacheControlDirectives['min-fresh'] * 1000
|
||||
|
||||
return timeLeftTillStale <= threshold
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {DispatchFn} dispatch
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheHandlerOptions} globalOpts
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} cacheKey
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchHandler} handler
|
||||
* @param {import('../../types/dispatcher.d.ts').default.RequestOptions} opts
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives | undefined} reqCacheControl
|
||||
*/
|
||||
function handleUncachedResponse (
|
||||
dispatch,
|
||||
globalOpts,
|
||||
cacheKey,
|
||||
handler,
|
||||
opts,
|
||||
reqCacheControl
|
||||
) {
|
||||
if (reqCacheControl?.['only-if-cached']) {
|
||||
let aborted = false
|
||||
try {
|
||||
if (typeof handler.onConnect === 'function') {
|
||||
handler.onConnect(() => {
|
||||
aborted = true
|
||||
})
|
||||
|
||||
if (aborted) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof handler.onHeaders === 'function') {
|
||||
handler.onHeaders(504, [], () => {}, 'Gateway Timeout')
|
||||
if (aborted) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof handler.onComplete === 'function') {
|
||||
handler.onComplete([])
|
||||
}
|
||||
} catch (err) {
|
||||
if (typeof handler.onError === 'function') {
|
||||
handler.onError(err)
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
return dispatch(opts, new CacheHandler(globalOpts, cacheKey, handler))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchHandler} handler
|
||||
* @param {import('../../types/dispatcher.d.ts').default.RequestOptions} opts
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.GetResult} result
|
||||
* @param {number} age
|
||||
* @param {any} context
|
||||
* @param {boolean} isStale
|
||||
*/
|
||||
function sendCachedValue (handler, opts, result, age, context, isStale) {
|
||||
// TODO (perf): Readable.from path can be optimized...
|
||||
const stream = util.isStream(result.body)
|
||||
? result.body
|
||||
: Readable.from(result.body ?? [])
|
||||
|
||||
assert(!stream.destroyed, 'stream should not be destroyed')
|
||||
assert(!stream.readableDidRead, 'stream should not be readableDidRead')
|
||||
|
||||
const controller = {
|
||||
resume () {
|
||||
stream.resume()
|
||||
},
|
||||
pause () {
|
||||
stream.pause()
|
||||
},
|
||||
get paused () {
|
||||
return stream.isPaused()
|
||||
},
|
||||
get aborted () {
|
||||
return stream.destroyed
|
||||
},
|
||||
get reason () {
|
||||
return stream.errored
|
||||
},
|
||||
abort (reason) {
|
||||
stream.destroy(reason ?? new AbortError())
|
||||
}
|
||||
}
|
||||
|
||||
stream
|
||||
.on('error', function (err) {
|
||||
if (!this.readableEnded) {
|
||||
if (typeof handler.onResponseError === 'function') {
|
||||
handler.onResponseError(controller, err)
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
})
|
||||
.on('close', function () {
|
||||
if (!this.errored) {
|
||||
handler.onResponseEnd?.(controller, {})
|
||||
}
|
||||
})
|
||||
|
||||
handler.onRequestStart?.(controller, context)
|
||||
|
||||
if (stream.destroyed) {
|
||||
return
|
||||
}
|
||||
|
||||
// Add the age header
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#name-age
|
||||
const headers = { ...result.headers, age: String(age) }
|
||||
|
||||
if (isStale) {
|
||||
// Add warning header
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Warning
|
||||
headers.warning = '110 - "response is stale"'
|
||||
}
|
||||
|
||||
handler.onResponseStart?.(controller, result.statusCode, headers, result.statusMessage)
|
||||
|
||||
if (opts.method === 'HEAD') {
|
||||
stream.destroy()
|
||||
} else {
|
||||
stream.on('data', function (chunk) {
|
||||
handler.onResponseData?.(controller, chunk)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {DispatchFn} dispatch
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheHandlerOptions} globalOpts
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} cacheKey
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchHandler} handler
|
||||
* @param {import('../../types/dispatcher.d.ts').default.RequestOptions} opts
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives | undefined} reqCacheControl
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.GetResult | undefined} result
|
||||
*/
|
||||
function handleResult (
|
||||
dispatch,
|
||||
globalOpts,
|
||||
cacheKey,
|
||||
handler,
|
||||
opts,
|
||||
reqCacheControl,
|
||||
result
|
||||
) {
|
||||
if (!result) {
|
||||
return handleUncachedResponse(dispatch, globalOpts, cacheKey, handler, opts, reqCacheControl)
|
||||
}
|
||||
|
||||
const now = Date.now()
|
||||
if (now > result.deleteAt) {
|
||||
// Response is expired, cache store shouldn't have given this to us
|
||||
return dispatch(opts, new CacheHandler(globalOpts, cacheKey, handler))
|
||||
}
|
||||
|
||||
const age = Math.round((now - result.cachedAt) / 1000)
|
||||
if (reqCacheControl?.['max-age'] && age >= reqCacheControl['max-age']) {
|
||||
// Response is considered expired for this specific request
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.1.1
|
||||
return dispatch(opts, handler)
|
||||
}
|
||||
|
||||
// Check if the response is stale
|
||||
if (needsRevalidation(result, reqCacheControl)) {
|
||||
if (util.isStream(opts.body) && util.bodyLength(opts.body) !== 0) {
|
||||
// If body is a stream we can't revalidate...
|
||||
// TODO (fix): This could be less strict...
|
||||
return dispatch(opts, new CacheHandler(globalOpts, cacheKey, handler))
|
||||
}
|
||||
|
||||
let withinStaleIfErrorThreshold = false
|
||||
const staleIfErrorExpiry = result.cacheControlDirectives['stale-if-error'] ?? reqCacheControl?.['stale-if-error']
|
||||
if (staleIfErrorExpiry) {
|
||||
withinStaleIfErrorThreshold = now < (result.staleAt + (staleIfErrorExpiry * 1000))
|
||||
}
|
||||
|
||||
let headers = {
|
||||
...normaliseHeaders(opts),
|
||||
'if-modified-since': new Date(result.cachedAt).toUTCString()
|
||||
}
|
||||
|
||||
if (result.etag) {
|
||||
headers['if-none-match'] = result.etag
|
||||
}
|
||||
|
||||
if (result.vary) {
|
||||
headers = {
|
||||
...headers,
|
||||
...result.vary
|
||||
}
|
||||
}
|
||||
|
||||
// We need to revalidate the response
|
||||
return dispatch(
|
||||
{
|
||||
...opts,
|
||||
headers
|
||||
},
|
||||
new CacheRevalidationHandler(
|
||||
(success, context) => {
|
||||
if (success) {
|
||||
sendCachedValue(handler, opts, result, age, context, true)
|
||||
} else if (util.isStream(result.body)) {
|
||||
result.body.on('error', () => {}).destroy()
|
||||
}
|
||||
},
|
||||
new CacheHandler(globalOpts, cacheKey, handler),
|
||||
withinStaleIfErrorThreshold
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Dump request body.
|
||||
if (util.isStream(opts.body)) {
|
||||
opts.body.on('error', () => {}).destroy()
|
||||
}
|
||||
|
||||
sendCachedValue(handler, opts, result, age, null, false)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('../../types/cache-interceptor.d.ts').default.CacheOptions} [opts]
|
||||
* @returns {import('../../types/dispatcher.d.ts').default.DispatcherComposeInterceptor}
|
||||
*/
|
||||
module.exports = (opts = {}) => {
|
||||
const {
|
||||
store = new MemoryCacheStore(),
|
||||
methods = ['GET'],
|
||||
cacheByDefault = undefined,
|
||||
type = 'shared'
|
||||
} = opts
|
||||
|
||||
if (typeof opts !== 'object' || opts === null) {
|
||||
throw new TypeError(`expected type of opts to be an Object, got ${opts === null ? 'null' : typeof opts}`)
|
||||
}
|
||||
|
||||
assertCacheStore(store, 'opts.store')
|
||||
assertCacheMethods(methods, 'opts.methods')
|
||||
|
||||
if (typeof cacheByDefault !== 'undefined' && typeof cacheByDefault !== 'number') {
|
||||
throw new TypeError(`exepcted opts.cacheByDefault to be number or undefined, got ${typeof cacheByDefault}`)
|
||||
}
|
||||
|
||||
if (typeof type !== 'undefined' && type !== 'shared' && type !== 'private') {
|
||||
throw new TypeError(`exepcted opts.type to be shared, private, or undefined, got ${typeof type}`)
|
||||
}
|
||||
|
||||
const globalOpts = {
|
||||
store,
|
||||
methods,
|
||||
cacheByDefault,
|
||||
type
|
||||
}
|
||||
|
||||
const safeMethodsToNotCache = util.safeHTTPMethods.filter(method => methods.includes(method) === false)
|
||||
|
||||
return dispatch => {
|
||||
return (opts, handler) => {
|
||||
if (!opts.origin || safeMethodsToNotCache.includes(opts.method)) {
|
||||
// Not a method we want to cache or we don't have the origin, skip
|
||||
return dispatch(opts, handler)
|
||||
}
|
||||
|
||||
const reqCacheControl = opts.headers?.['cache-control']
|
||||
? parseCacheControlHeader(opts.headers['cache-control'])
|
||||
: undefined
|
||||
|
||||
if (reqCacheControl?.['no-store']) {
|
||||
return dispatch(opts, handler)
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {import('../../types/cache-interceptor.d.ts').default.CacheKey}
|
||||
*/
|
||||
const cacheKey = makeCacheKey(opts)
|
||||
const result = store.get(cacheKey)
|
||||
|
||||
if (result && typeof result.then === 'function') {
|
||||
result.then(result => {
|
||||
handleResult(dispatch,
|
||||
globalOpts,
|
||||
cacheKey,
|
||||
handler,
|
||||
opts,
|
||||
reqCacheControl,
|
||||
result
|
||||
)
|
||||
})
|
||||
} else {
|
||||
handleResult(
|
||||
dispatch,
|
||||
globalOpts,
|
||||
cacheKey,
|
||||
handler,
|
||||
opts,
|
||||
reqCacheControl,
|
||||
result
|
||||
)
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
432
node_modules/undici/lib/interceptor/dns.js
generated
vendored
Normal file
432
node_modules/undici/lib/interceptor/dns.js
generated
vendored
Normal file
@ -0,0 +1,432 @@
|
||||
'use strict'
|
||||
const { isIP } = require('node:net')
|
||||
const { lookup } = require('node:dns')
|
||||
const DecoratorHandler = require('../handler/decorator-handler')
|
||||
const { InvalidArgumentError, InformationalError } = require('../core/errors')
|
||||
const maxInt = Math.pow(2, 31) - 1
|
||||
|
||||
class DNSInstance {
|
||||
#maxTTL = 0
|
||||
#maxItems = 0
|
||||
#records = new Map()
|
||||
dualStack = true
|
||||
affinity = null
|
||||
lookup = null
|
||||
pick = null
|
||||
|
||||
constructor (opts) {
|
||||
this.#maxTTL = opts.maxTTL
|
||||
this.#maxItems = opts.maxItems
|
||||
this.dualStack = opts.dualStack
|
||||
this.affinity = opts.affinity
|
||||
this.lookup = opts.lookup ?? this.#defaultLookup
|
||||
this.pick = opts.pick ?? this.#defaultPick
|
||||
}
|
||||
|
||||
get full () {
|
||||
return this.#records.size === this.#maxItems
|
||||
}
|
||||
|
||||
runLookup (origin, opts, cb) {
|
||||
const ips = this.#records.get(origin.hostname)
|
||||
|
||||
// If full, we just return the origin
|
||||
if (ips == null && this.full) {
|
||||
cb(null, origin)
|
||||
return
|
||||
}
|
||||
|
||||
const newOpts = {
|
||||
affinity: this.affinity,
|
||||
dualStack: this.dualStack,
|
||||
lookup: this.lookup,
|
||||
pick: this.pick,
|
||||
...opts.dns,
|
||||
maxTTL: this.#maxTTL,
|
||||
maxItems: this.#maxItems
|
||||
}
|
||||
|
||||
// If no IPs we lookup
|
||||
if (ips == null) {
|
||||
this.lookup(origin, newOpts, (err, addresses) => {
|
||||
if (err || addresses == null || addresses.length === 0) {
|
||||
cb(err ?? new InformationalError('No DNS entries found'))
|
||||
return
|
||||
}
|
||||
|
||||
this.setRecords(origin, addresses)
|
||||
const records = this.#records.get(origin.hostname)
|
||||
|
||||
const ip = this.pick(
|
||||
origin,
|
||||
records,
|
||||
newOpts.affinity
|
||||
)
|
||||
|
||||
let port
|
||||
if (typeof ip.port === 'number') {
|
||||
port = `:${ip.port}`
|
||||
} else if (origin.port !== '') {
|
||||
port = `:${origin.port}`
|
||||
} else {
|
||||
port = ''
|
||||
}
|
||||
|
||||
cb(
|
||||
null,
|
||||
new URL(`${origin.protocol}//${
|
||||
ip.family === 6 ? `[${ip.address}]` : ip.address
|
||||
}${port}`)
|
||||
)
|
||||
})
|
||||
} else {
|
||||
// If there's IPs we pick
|
||||
const ip = this.pick(
|
||||
origin,
|
||||
ips,
|
||||
newOpts.affinity
|
||||
)
|
||||
|
||||
// If no IPs we lookup - deleting old records
|
||||
if (ip == null) {
|
||||
this.#records.delete(origin.hostname)
|
||||
this.runLookup(origin, opts, cb)
|
||||
return
|
||||
}
|
||||
|
||||
let port
|
||||
if (typeof ip.port === 'number') {
|
||||
port = `:${ip.port}`
|
||||
} else if (origin.port !== '') {
|
||||
port = `:${origin.port}`
|
||||
} else {
|
||||
port = ''
|
||||
}
|
||||
|
||||
cb(
|
||||
null,
|
||||
new URL(`${origin.protocol}//${
|
||||
ip.family === 6 ? `[${ip.address}]` : ip.address
|
||||
}${port}`)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#defaultLookup (origin, opts, cb) {
|
||||
lookup(
|
||||
origin.hostname,
|
||||
{
|
||||
all: true,
|
||||
family: this.dualStack === false ? this.affinity : 0,
|
||||
order: 'ipv4first'
|
||||
},
|
||||
(err, addresses) => {
|
||||
if (err) {
|
||||
return cb(err)
|
||||
}
|
||||
|
||||
const results = new Map()
|
||||
|
||||
for (const addr of addresses) {
|
||||
// On linux we found duplicates, we attempt to remove them with
|
||||
// the latest record
|
||||
results.set(`${addr.address}:${addr.family}`, addr)
|
||||
}
|
||||
|
||||
cb(null, results.values())
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
#defaultPick (origin, hostnameRecords, affinity) {
|
||||
let ip = null
|
||||
const { records, offset } = hostnameRecords
|
||||
|
||||
let family
|
||||
if (this.dualStack) {
|
||||
if (affinity == null) {
|
||||
// Balance between ip families
|
||||
if (offset == null || offset === maxInt) {
|
||||
hostnameRecords.offset = 0
|
||||
affinity = 4
|
||||
} else {
|
||||
hostnameRecords.offset++
|
||||
affinity = (hostnameRecords.offset & 1) === 1 ? 6 : 4
|
||||
}
|
||||
}
|
||||
|
||||
if (records[affinity] != null && records[affinity].ips.length > 0) {
|
||||
family = records[affinity]
|
||||
} else {
|
||||
family = records[affinity === 4 ? 6 : 4]
|
||||
}
|
||||
} else {
|
||||
family = records[affinity]
|
||||
}
|
||||
|
||||
// If no IPs we return null
|
||||
if (family == null || family.ips.length === 0) {
|
||||
return ip
|
||||
}
|
||||
|
||||
if (family.offset == null || family.offset === maxInt) {
|
||||
family.offset = 0
|
||||
} else {
|
||||
family.offset++
|
||||
}
|
||||
|
||||
const position = family.offset % family.ips.length
|
||||
ip = family.ips[position] ?? null
|
||||
|
||||
if (ip == null) {
|
||||
return ip
|
||||
}
|
||||
|
||||
if (Date.now() - ip.timestamp > ip.ttl) { // record TTL is already in ms
|
||||
// We delete expired records
|
||||
// It is possible that they have different TTL, so we manage them individually
|
||||
family.ips.splice(position, 1)
|
||||
return this.pick(origin, hostnameRecords, affinity)
|
||||
}
|
||||
|
||||
return ip
|
||||
}
|
||||
|
||||
pickFamily (origin, ipFamily) {
|
||||
const records = this.#records.get(origin.hostname)?.records
|
||||
if (!records) {
|
||||
return null
|
||||
}
|
||||
|
||||
const family = records[ipFamily]
|
||||
if (!family) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (family.offset == null || family.offset === maxInt) {
|
||||
family.offset = 0
|
||||
} else {
|
||||
family.offset++
|
||||
}
|
||||
|
||||
const position = family.offset % family.ips.length
|
||||
const ip = family.ips[position] ?? null
|
||||
if (ip == null) {
|
||||
return ip
|
||||
}
|
||||
|
||||
if (Date.now() - ip.timestamp > ip.ttl) { // record TTL is already in ms
|
||||
// We delete expired records
|
||||
// It is possible that they have different TTL, so we manage them individually
|
||||
family.ips.splice(position, 1)
|
||||
}
|
||||
|
||||
return ip
|
||||
}
|
||||
|
||||
setRecords (origin, addresses) {
|
||||
const timestamp = Date.now()
|
||||
const records = { records: { 4: null, 6: null } }
|
||||
for (const record of addresses) {
|
||||
record.timestamp = timestamp
|
||||
if (typeof record.ttl === 'number') {
|
||||
// The record TTL is expected to be in ms
|
||||
record.ttl = Math.min(record.ttl, this.#maxTTL)
|
||||
} else {
|
||||
record.ttl = this.#maxTTL
|
||||
}
|
||||
|
||||
const familyRecords = records.records[record.family] ?? { ips: [] }
|
||||
|
||||
familyRecords.ips.push(record)
|
||||
records.records[record.family] = familyRecords
|
||||
}
|
||||
|
||||
this.#records.set(origin.hostname, records)
|
||||
}
|
||||
|
||||
deleteRecords (origin) {
|
||||
this.#records.delete(origin.hostname)
|
||||
}
|
||||
|
||||
getHandler (meta, opts) {
|
||||
return new DNSDispatchHandler(this, meta, opts)
|
||||
}
|
||||
}
|
||||
|
||||
class DNSDispatchHandler extends DecoratorHandler {
|
||||
#state = null
|
||||
#opts = null
|
||||
#dispatch = null
|
||||
#origin = null
|
||||
#controller = null
|
||||
#newOrigin = null
|
||||
#firstTry = true
|
||||
|
||||
constructor (state, { origin, handler, dispatch, newOrigin }, opts) {
|
||||
super(handler)
|
||||
this.#origin = origin
|
||||
this.#newOrigin = newOrigin
|
||||
this.#opts = { ...opts }
|
||||
this.#state = state
|
||||
this.#dispatch = dispatch
|
||||
}
|
||||
|
||||
onResponseError (controller, err) {
|
||||
switch (err.code) {
|
||||
case 'ETIMEDOUT':
|
||||
case 'ECONNREFUSED': {
|
||||
if (this.#state.dualStack) {
|
||||
if (!this.#firstTry) {
|
||||
super.onResponseError(controller, err)
|
||||
return
|
||||
}
|
||||
this.#firstTry = false
|
||||
|
||||
// Pick an ip address from the other family
|
||||
const otherFamily = this.#newOrigin.hostname[0] === '[' ? 4 : 6
|
||||
const ip = this.#state.pickFamily(this.#origin, otherFamily)
|
||||
if (ip == null) {
|
||||
super.onResponseError(controller, err)
|
||||
return
|
||||
}
|
||||
|
||||
let port
|
||||
if (typeof ip.port === 'number') {
|
||||
port = `:${ip.port}`
|
||||
} else if (this.#origin.port !== '') {
|
||||
port = `:${this.#origin.port}`
|
||||
} else {
|
||||
port = ''
|
||||
}
|
||||
|
||||
const dispatchOpts = {
|
||||
...this.#opts,
|
||||
origin: `${this.#origin.protocol}//${
|
||||
ip.family === 6 ? `[${ip.address}]` : ip.address
|
||||
}${port}`
|
||||
}
|
||||
this.#dispatch(dispatchOpts, this)
|
||||
return
|
||||
}
|
||||
|
||||
// if dual-stack disabled, we error out
|
||||
super.onResponseError(controller, err)
|
||||
break
|
||||
}
|
||||
case 'ENOTFOUND':
|
||||
this.#state.deleteRecords(this.#origin)
|
||||
super.onResponseError(controller, err)
|
||||
break
|
||||
default:
|
||||
super.onResponseError(controller, err)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = interceptorOpts => {
|
||||
if (
|
||||
interceptorOpts?.maxTTL != null &&
|
||||
(typeof interceptorOpts?.maxTTL !== 'number' || interceptorOpts?.maxTTL < 0)
|
||||
) {
|
||||
throw new InvalidArgumentError('Invalid maxTTL. Must be a positive number')
|
||||
}
|
||||
|
||||
if (
|
||||
interceptorOpts?.maxItems != null &&
|
||||
(typeof interceptorOpts?.maxItems !== 'number' ||
|
||||
interceptorOpts?.maxItems < 1)
|
||||
) {
|
||||
throw new InvalidArgumentError(
|
||||
'Invalid maxItems. Must be a positive number and greater than zero'
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
interceptorOpts?.affinity != null &&
|
||||
interceptorOpts?.affinity !== 4 &&
|
||||
interceptorOpts?.affinity !== 6
|
||||
) {
|
||||
throw new InvalidArgumentError('Invalid affinity. Must be either 4 or 6')
|
||||
}
|
||||
|
||||
if (
|
||||
interceptorOpts?.dualStack != null &&
|
||||
typeof interceptorOpts?.dualStack !== 'boolean'
|
||||
) {
|
||||
throw new InvalidArgumentError('Invalid dualStack. Must be a boolean')
|
||||
}
|
||||
|
||||
if (
|
||||
interceptorOpts?.lookup != null &&
|
||||
typeof interceptorOpts?.lookup !== 'function'
|
||||
) {
|
||||
throw new InvalidArgumentError('Invalid lookup. Must be a function')
|
||||
}
|
||||
|
||||
if (
|
||||
interceptorOpts?.pick != null &&
|
||||
typeof interceptorOpts?.pick !== 'function'
|
||||
) {
|
||||
throw new InvalidArgumentError('Invalid pick. Must be a function')
|
||||
}
|
||||
|
||||
const dualStack = interceptorOpts?.dualStack ?? true
|
||||
let affinity
|
||||
if (dualStack) {
|
||||
affinity = interceptorOpts?.affinity ?? null
|
||||
} else {
|
||||
affinity = interceptorOpts?.affinity ?? 4
|
||||
}
|
||||
|
||||
const opts = {
|
||||
maxTTL: interceptorOpts?.maxTTL ?? 10e3, // Expressed in ms
|
||||
lookup: interceptorOpts?.lookup ?? null,
|
||||
pick: interceptorOpts?.pick ?? null,
|
||||
dualStack,
|
||||
affinity,
|
||||
maxItems: interceptorOpts?.maxItems ?? Infinity
|
||||
}
|
||||
|
||||
const instance = new DNSInstance(opts)
|
||||
|
||||
return dispatch => {
|
||||
return function dnsInterceptor (origDispatchOpts, handler) {
|
||||
const origin =
|
||||
origDispatchOpts.origin.constructor === URL
|
||||
? origDispatchOpts.origin
|
||||
: new URL(origDispatchOpts.origin)
|
||||
|
||||
if (isIP(origin.hostname) !== 0) {
|
||||
return dispatch(origDispatchOpts, handler)
|
||||
}
|
||||
|
||||
instance.runLookup(origin, origDispatchOpts, (err, newOrigin) => {
|
||||
if (err) {
|
||||
return handler.onResponseError(null, err)
|
||||
}
|
||||
|
||||
const dispatchOpts = {
|
||||
...origDispatchOpts,
|
||||
servername: origin.hostname, // For SNI on TLS
|
||||
origin: newOrigin.origin,
|
||||
headers: {
|
||||
host: origin.host,
|
||||
...origDispatchOpts.headers
|
||||
}
|
||||
}
|
||||
|
||||
dispatch(
|
||||
dispatchOpts,
|
||||
instance.getHandler(
|
||||
{ origin, dispatch, handler, newOrigin },
|
||||
origDispatchOpts
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
111
node_modules/undici/lib/interceptor/dump.js
generated
vendored
Normal file
111
node_modules/undici/lib/interceptor/dump.js
generated
vendored
Normal file
@ -0,0 +1,111 @@
|
||||
'use strict'
|
||||
|
||||
const { InvalidArgumentError, RequestAbortedError } = require('../core/errors')
|
||||
const DecoratorHandler = require('../handler/decorator-handler')
|
||||
|
||||
class DumpHandler extends DecoratorHandler {
|
||||
#maxSize = 1024 * 1024
|
||||
#dumped = false
|
||||
#size = 0
|
||||
#controller = null
|
||||
aborted = false
|
||||
reason = false
|
||||
|
||||
constructor ({ maxSize, signal }, handler) {
|
||||
if (maxSize != null && (!Number.isFinite(maxSize) || maxSize < 1)) {
|
||||
throw new InvalidArgumentError('maxSize must be a number greater than 0')
|
||||
}
|
||||
|
||||
super(handler)
|
||||
|
||||
this.#maxSize = maxSize ?? this.#maxSize
|
||||
// this.#handler = handler
|
||||
}
|
||||
|
||||
#abort (reason) {
|
||||
this.aborted = true
|
||||
this.reason = reason
|
||||
}
|
||||
|
||||
onRequestStart (controller, context) {
|
||||
controller.abort = this.#abort.bind(this)
|
||||
this.#controller = controller
|
||||
|
||||
return super.onRequestStart(controller, context)
|
||||
}
|
||||
|
||||
onResponseStart (controller, statusCode, headers, statusMessage) {
|
||||
const contentLength = headers['content-length']
|
||||
|
||||
if (contentLength != null && contentLength > this.#maxSize) {
|
||||
throw new RequestAbortedError(
|
||||
`Response size (${contentLength}) larger than maxSize (${
|
||||
this.#maxSize
|
||||
})`
|
||||
)
|
||||
}
|
||||
|
||||
if (this.aborted === true) {
|
||||
return true
|
||||
}
|
||||
|
||||
return super.onResponseStart(controller, statusCode, headers, statusMessage)
|
||||
}
|
||||
|
||||
onResponseError (controller, err) {
|
||||
if (this.#dumped) {
|
||||
return
|
||||
}
|
||||
|
||||
err = this.#controller.reason ?? err
|
||||
|
||||
super.onResponseError(controller, err)
|
||||
}
|
||||
|
||||
onResponseData (controller, chunk) {
|
||||
this.#size = this.#size + chunk.length
|
||||
|
||||
if (this.#size >= this.#maxSize) {
|
||||
this.#dumped = true
|
||||
|
||||
if (this.aborted === true) {
|
||||
super.onResponseError(controller, this.reason)
|
||||
} else {
|
||||
super.onResponseEnd(controller, {})
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
onResponseEnd (controller, trailers) {
|
||||
if (this.#dumped) {
|
||||
return
|
||||
}
|
||||
|
||||
if (this.#controller.aborted === true) {
|
||||
super.onResponseError(controller, this.reason)
|
||||
return
|
||||
}
|
||||
|
||||
super.onResponseEnd(controller, trailers)
|
||||
}
|
||||
}
|
||||
|
||||
function createDumpInterceptor (
|
||||
{ maxSize: defaultMaxSize } = {
|
||||
maxSize: 1024 * 1024
|
||||
}
|
||||
) {
|
||||
return dispatch => {
|
||||
return function Intercept (opts, handler) {
|
||||
const { dumpMaxSize = defaultMaxSize } = opts
|
||||
|
||||
const dumpHandler = new DumpHandler({ maxSize: dumpMaxSize, signal: opts.signal }, handler)
|
||||
|
||||
return dispatch(opts, dumpHandler)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = createDumpInterceptor
|
21
node_modules/undici/lib/interceptor/redirect.js
generated
vendored
Normal file
21
node_modules/undici/lib/interceptor/redirect.js
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
'use strict'
|
||||
|
||||
const RedirectHandler = require('../handler/redirect-handler')
|
||||
|
||||
function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections } = {}) {
|
||||
return (dispatch) => {
|
||||
return function Intercept (opts, handler) {
|
||||
const { maxRedirections = defaultMaxRedirections, ...rest } = opts
|
||||
|
||||
if (maxRedirections == null || maxRedirections === 0) {
|
||||
return dispatch(opts, handler)
|
||||
}
|
||||
|
||||
const dispatchOpts = { ...rest, maxRedirections: 0 } // Stop sub dispatcher from also redirecting.
|
||||
const redirectHandler = new RedirectHandler(dispatch, maxRedirections, dispatchOpts, handler)
|
||||
return dispatch(dispatchOpts, redirectHandler)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = createRedirectInterceptor
|
21
node_modules/undici/lib/interceptor/redirectInterceptor.js
generated
vendored
21
node_modules/undici/lib/interceptor/redirectInterceptor.js
generated
vendored
@ -1,21 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const RedirectHandler = require('../handler/RedirectHandler')
|
||||
|
||||
function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) {
|
||||
return (dispatch) => {
|
||||
return function Intercept (opts, handler) {
|
||||
const { maxRedirections = defaultMaxRedirections } = opts
|
||||
|
||||
if (!maxRedirections) {
|
||||
return dispatch(opts, handler)
|
||||
}
|
||||
|
||||
const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler)
|
||||
opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting.
|
||||
return dispatch(opts, redirectHandler)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = createRedirectInterceptor
|
95
node_modules/undici/lib/interceptor/response-error.js
generated
vendored
Normal file
95
node_modules/undici/lib/interceptor/response-error.js
generated
vendored
Normal file
@ -0,0 +1,95 @@
|
||||
'use strict'
|
||||
|
||||
// const { parseHeaders } = require('../core/util')
|
||||
const DecoratorHandler = require('../handler/decorator-handler')
|
||||
const { ResponseError } = require('../core/errors')
|
||||
|
||||
class ResponseErrorHandler extends DecoratorHandler {
|
||||
#statusCode
|
||||
#contentType
|
||||
#decoder
|
||||
#headers
|
||||
#body
|
||||
|
||||
constructor (_opts, { handler }) {
|
||||
super(handler)
|
||||
}
|
||||
|
||||
#checkContentType (contentType) {
|
||||
return (this.#contentType ?? '').indexOf(contentType) === 0
|
||||
}
|
||||
|
||||
onRequestStart (controller, context) {
|
||||
this.#statusCode = 0
|
||||
this.#contentType = null
|
||||
this.#decoder = null
|
||||
this.#headers = null
|
||||
this.#body = ''
|
||||
|
||||
return super.onRequestStart(controller, context)
|
||||
}
|
||||
|
||||
onResponseStart (controller, statusCode, headers, statusMessage) {
|
||||
this.#statusCode = statusCode
|
||||
this.#headers = headers
|
||||
this.#contentType = headers['content-type']
|
||||
|
||||
if (this.#statusCode < 400) {
|
||||
return super.onResponseStart(controller, statusCode, headers, statusMessage)
|
||||
}
|
||||
|
||||
if (this.#checkContentType('application/json') || this.#checkContentType('text/plain')) {
|
||||
this.#decoder = new TextDecoder('utf-8')
|
||||
}
|
||||
}
|
||||
|
||||
onResponseData (controller, chunk) {
|
||||
if (this.#statusCode < 400) {
|
||||
return super.onResponseData(controller, chunk)
|
||||
}
|
||||
|
||||
this.#body += this.#decoder?.decode(chunk, { stream: true }) ?? ''
|
||||
}
|
||||
|
||||
onResponseEnd (controller, trailers) {
|
||||
if (this.#statusCode >= 400) {
|
||||
this.#body += this.#decoder?.decode(undefined, { stream: false }) ?? ''
|
||||
|
||||
if (this.#checkContentType('application/json')) {
|
||||
try {
|
||||
this.#body = JSON.parse(this.#body)
|
||||
} catch {
|
||||
// Do nothing...
|
||||
}
|
||||
}
|
||||
|
||||
let err
|
||||
const stackTraceLimit = Error.stackTraceLimit
|
||||
Error.stackTraceLimit = 0
|
||||
try {
|
||||
err = new ResponseError('Response Error', this.#statusCode, {
|
||||
body: this.#body,
|
||||
headers: this.#headers
|
||||
})
|
||||
} finally {
|
||||
Error.stackTraceLimit = stackTraceLimit
|
||||
}
|
||||
|
||||
super.onResponseError(controller, err)
|
||||
} else {
|
||||
super.onResponseEnd(controller, trailers)
|
||||
}
|
||||
}
|
||||
|
||||
onResponseError (controller, err) {
|
||||
super.onResponseError(controller, err)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = () => {
|
||||
return (dispatch) => {
|
||||
return function Intercept (opts, handler) {
|
||||
return dispatch(opts, new ResponseErrorHandler(opts, { handler }))
|
||||
}
|
||||
}
|
||||
}
|
19
node_modules/undici/lib/interceptor/retry.js
generated
vendored
Normal file
19
node_modules/undici/lib/interceptor/retry.js
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
'use strict'
|
||||
const RetryHandler = require('../handler/retry-handler')
|
||||
|
||||
module.exports = globalOpts => {
|
||||
return dispatch => {
|
||||
return function retryInterceptor (opts, handler) {
|
||||
return dispatch(
|
||||
opts,
|
||||
new RetryHandler(
|
||||
{ ...opts, retryOptions: { ...globalOpts, ...opts.retryOptions } },
|
||||
{
|
||||
handler,
|
||||
dispatch
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
152
node_modules/undici/lib/llhttp/constants.d.ts
generated
vendored
152
node_modules/undici/lib/llhttp/constants.d.ts
generated
vendored
@ -1,112 +1,21 @@
|
||||
import { IEnumMap } from './utils';
|
||||
export declare type HTTPMode = 'loose' | 'strict';
|
||||
export declare enum ERROR {
|
||||
OK = 0,
|
||||
INTERNAL = 1,
|
||||
STRICT = 2,
|
||||
LF_EXPECTED = 3,
|
||||
UNEXPECTED_CONTENT_LENGTH = 4,
|
||||
CLOSED_CONNECTION = 5,
|
||||
INVALID_METHOD = 6,
|
||||
INVALID_URL = 7,
|
||||
INVALID_CONSTANT = 8,
|
||||
INVALID_VERSION = 9,
|
||||
INVALID_HEADER_TOKEN = 10,
|
||||
INVALID_CONTENT_LENGTH = 11,
|
||||
INVALID_CHUNK_SIZE = 12,
|
||||
INVALID_STATUS = 13,
|
||||
INVALID_EOF_STATE = 14,
|
||||
INVALID_TRANSFER_ENCODING = 15,
|
||||
CB_MESSAGE_BEGIN = 16,
|
||||
CB_HEADERS_COMPLETE = 17,
|
||||
CB_MESSAGE_COMPLETE = 18,
|
||||
CB_CHUNK_HEADER = 19,
|
||||
CB_CHUNK_COMPLETE = 20,
|
||||
PAUSED = 21,
|
||||
PAUSED_UPGRADE = 22,
|
||||
PAUSED_H2_UPGRADE = 23,
|
||||
USER = 24
|
||||
}
|
||||
export declare enum TYPE {
|
||||
BOTH = 0,
|
||||
REQUEST = 1,
|
||||
RESPONSE = 2
|
||||
}
|
||||
export declare enum FLAGS {
|
||||
CONNECTION_KEEP_ALIVE = 1,
|
||||
CONNECTION_CLOSE = 2,
|
||||
CONNECTION_UPGRADE = 4,
|
||||
CHUNKED = 8,
|
||||
UPGRADE = 16,
|
||||
CONTENT_LENGTH = 32,
|
||||
SKIPBODY = 64,
|
||||
TRAILING = 128,
|
||||
TRANSFER_ENCODING = 512
|
||||
}
|
||||
export declare enum LENIENT_FLAGS {
|
||||
HEADERS = 1,
|
||||
CHUNKED_LENGTH = 2,
|
||||
KEEP_ALIVE = 4
|
||||
}
|
||||
export declare enum METHODS {
|
||||
DELETE = 0,
|
||||
GET = 1,
|
||||
HEAD = 2,
|
||||
POST = 3,
|
||||
PUT = 4,
|
||||
CONNECT = 5,
|
||||
OPTIONS = 6,
|
||||
TRACE = 7,
|
||||
COPY = 8,
|
||||
LOCK = 9,
|
||||
MKCOL = 10,
|
||||
MOVE = 11,
|
||||
PROPFIND = 12,
|
||||
PROPPATCH = 13,
|
||||
SEARCH = 14,
|
||||
UNLOCK = 15,
|
||||
BIND = 16,
|
||||
REBIND = 17,
|
||||
UNBIND = 18,
|
||||
ACL = 19,
|
||||
REPORT = 20,
|
||||
MKACTIVITY = 21,
|
||||
CHECKOUT = 22,
|
||||
MERGE = 23,
|
||||
'M-SEARCH' = 24,
|
||||
NOTIFY = 25,
|
||||
SUBSCRIBE = 26,
|
||||
UNSUBSCRIBE = 27,
|
||||
PATCH = 28,
|
||||
PURGE = 29,
|
||||
MKCALENDAR = 30,
|
||||
LINK = 31,
|
||||
UNLINK = 32,
|
||||
SOURCE = 33,
|
||||
PRI = 34,
|
||||
DESCRIBE = 35,
|
||||
ANNOUNCE = 36,
|
||||
SETUP = 37,
|
||||
PLAY = 38,
|
||||
PAUSE = 39,
|
||||
TEARDOWN = 40,
|
||||
GET_PARAMETER = 41,
|
||||
SET_PARAMETER = 42,
|
||||
REDIRECT = 43,
|
||||
RECORD = 44,
|
||||
FLUSH = 45
|
||||
}
|
||||
export declare const METHODS_HTTP: METHODS[];
|
||||
export declare const METHODS_ICE: METHODS[];
|
||||
export declare const METHODS_RTSP: METHODS[];
|
||||
export declare const METHOD_MAP: IEnumMap;
|
||||
export declare const H_METHOD_MAP: IEnumMap;
|
||||
export declare enum FINISH {
|
||||
SAFE = 0,
|
||||
SAFE_WITH_CB = 1,
|
||||
UNSAFE = 2
|
||||
}
|
||||
export declare type CharList = Array<string | number>;
|
||||
export type IntDict = Record<string, number>;
|
||||
export declare const ERROR: IntDict;
|
||||
export declare const TYPE: IntDict;
|
||||
export declare const FLAGS: IntDict;
|
||||
export declare const LENIENT_FLAGS: IntDict;
|
||||
export declare const METHODS: IntDict;
|
||||
export declare const STATUSES: IntDict;
|
||||
export declare const FINISH: IntDict;
|
||||
export declare const HEADER_STATE: IntDict;
|
||||
export declare const METHODS_HTTP: number[];
|
||||
export declare const METHODS_ICE: number[];
|
||||
export declare const METHODS_RTSP: number[];
|
||||
export declare const METHOD_MAP: IntDict;
|
||||
export declare const H_METHOD_MAP: {
|
||||
[k: string]: number;
|
||||
};
|
||||
export declare const STATUSES_HTTP: number[];
|
||||
export type CharList = Array<string | number>;
|
||||
export declare const ALPHA: CharList;
|
||||
export declare const NUM_MAP: {
|
||||
0: number;
|
||||
@ -148,13 +57,13 @@ export declare const NUM: CharList;
|
||||
export declare const ALPHANUM: CharList;
|
||||
export declare const MARK: CharList;
|
||||
export declare const USERINFO_CHARS: CharList;
|
||||
export declare const STRICT_URL_CHAR: CharList;
|
||||
export declare const URL_CHAR: CharList;
|
||||
export declare const HEX: CharList;
|
||||
export declare const STRICT_TOKEN: CharList;
|
||||
export declare const TOKEN: CharList;
|
||||
export declare const HEADER_CHARS: CharList;
|
||||
export declare const CONNECTION_TOKEN_CHARS: CharList;
|
||||
export declare const QUOTED_STRING: CharList;
|
||||
export declare const HTAB_SP_VCHAR_OBS_TEXT: CharList;
|
||||
export declare const MAJOR: {
|
||||
0: number;
|
||||
1: number;
|
||||
@ -179,21 +88,10 @@ export declare const MINOR: {
|
||||
8: number;
|
||||
9: number;
|
||||
};
|
||||
export declare enum HEADER_STATE {
|
||||
GENERAL = 0,
|
||||
CONNECTION = 1,
|
||||
CONTENT_LENGTH = 2,
|
||||
TRANSFER_ENCODING = 3,
|
||||
UPGRADE = 4,
|
||||
CONNECTION_KEEP_ALIVE = 5,
|
||||
CONNECTION_CLOSE = 6,
|
||||
CONNECTION_UPGRADE = 7,
|
||||
TRANSFER_ENCODING_CHUNKED = 8
|
||||
}
|
||||
export declare const SPECIAL_HEADERS: {
|
||||
connection: HEADER_STATE;
|
||||
'content-length': HEADER_STATE;
|
||||
'proxy-connection': HEADER_STATE;
|
||||
'transfer-encoding': HEADER_STATE;
|
||||
upgrade: HEADER_STATE;
|
||||
connection: number;
|
||||
'content-length': number;
|
||||
'proxy-connection': number;
|
||||
'transfer-encoding': number;
|
||||
upgrade: number;
|
||||
};
|
||||
|
604
node_modules/undici/lib/llhttp/constants.js
generated
vendored
604
node_modules/undici/lib/llhttp/constants.js
generated
vendored
@ -1,192 +1,416 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.SPECIAL_HEADERS = exports.HEADER_STATE = exports.MINOR = exports.MAJOR = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.STRICT_TOKEN = exports.HEX = exports.URL_CHAR = exports.STRICT_URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.FINISH = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0;
|
||||
exports.SPECIAL_HEADERS = exports.MINOR = exports.MAJOR = exports.HTAB_SP_VCHAR_OBS_TEXT = exports.QUOTED_STRING = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.HEX = exports.URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.STATUSES_HTTP = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.HEADER_STATE = exports.FINISH = exports.STATUSES = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0;
|
||||
const utils_1 = require("./utils");
|
||||
// C headers
|
||||
var ERROR;
|
||||
(function (ERROR) {
|
||||
ERROR[ERROR["OK"] = 0] = "OK";
|
||||
ERROR[ERROR["INTERNAL"] = 1] = "INTERNAL";
|
||||
ERROR[ERROR["STRICT"] = 2] = "STRICT";
|
||||
ERROR[ERROR["LF_EXPECTED"] = 3] = "LF_EXPECTED";
|
||||
ERROR[ERROR["UNEXPECTED_CONTENT_LENGTH"] = 4] = "UNEXPECTED_CONTENT_LENGTH";
|
||||
ERROR[ERROR["CLOSED_CONNECTION"] = 5] = "CLOSED_CONNECTION";
|
||||
ERROR[ERROR["INVALID_METHOD"] = 6] = "INVALID_METHOD";
|
||||
ERROR[ERROR["INVALID_URL"] = 7] = "INVALID_URL";
|
||||
ERROR[ERROR["INVALID_CONSTANT"] = 8] = "INVALID_CONSTANT";
|
||||
ERROR[ERROR["INVALID_VERSION"] = 9] = "INVALID_VERSION";
|
||||
ERROR[ERROR["INVALID_HEADER_TOKEN"] = 10] = "INVALID_HEADER_TOKEN";
|
||||
ERROR[ERROR["INVALID_CONTENT_LENGTH"] = 11] = "INVALID_CONTENT_LENGTH";
|
||||
ERROR[ERROR["INVALID_CHUNK_SIZE"] = 12] = "INVALID_CHUNK_SIZE";
|
||||
ERROR[ERROR["INVALID_STATUS"] = 13] = "INVALID_STATUS";
|
||||
ERROR[ERROR["INVALID_EOF_STATE"] = 14] = "INVALID_EOF_STATE";
|
||||
ERROR[ERROR["INVALID_TRANSFER_ENCODING"] = 15] = "INVALID_TRANSFER_ENCODING";
|
||||
ERROR[ERROR["CB_MESSAGE_BEGIN"] = 16] = "CB_MESSAGE_BEGIN";
|
||||
ERROR[ERROR["CB_HEADERS_COMPLETE"] = 17] = "CB_HEADERS_COMPLETE";
|
||||
ERROR[ERROR["CB_MESSAGE_COMPLETE"] = 18] = "CB_MESSAGE_COMPLETE";
|
||||
ERROR[ERROR["CB_CHUNK_HEADER"] = 19] = "CB_CHUNK_HEADER";
|
||||
ERROR[ERROR["CB_CHUNK_COMPLETE"] = 20] = "CB_CHUNK_COMPLETE";
|
||||
ERROR[ERROR["PAUSED"] = 21] = "PAUSED";
|
||||
ERROR[ERROR["PAUSED_UPGRADE"] = 22] = "PAUSED_UPGRADE";
|
||||
ERROR[ERROR["PAUSED_H2_UPGRADE"] = 23] = "PAUSED_H2_UPGRADE";
|
||||
ERROR[ERROR["USER"] = 24] = "USER";
|
||||
})(ERROR = exports.ERROR || (exports.ERROR = {}));
|
||||
var TYPE;
|
||||
(function (TYPE) {
|
||||
TYPE[TYPE["BOTH"] = 0] = "BOTH";
|
||||
TYPE[TYPE["REQUEST"] = 1] = "REQUEST";
|
||||
TYPE[TYPE["RESPONSE"] = 2] = "RESPONSE";
|
||||
})(TYPE = exports.TYPE || (exports.TYPE = {}));
|
||||
var FLAGS;
|
||||
(function (FLAGS) {
|
||||
FLAGS[FLAGS["CONNECTION_KEEP_ALIVE"] = 1] = "CONNECTION_KEEP_ALIVE";
|
||||
FLAGS[FLAGS["CONNECTION_CLOSE"] = 2] = "CONNECTION_CLOSE";
|
||||
FLAGS[FLAGS["CONNECTION_UPGRADE"] = 4] = "CONNECTION_UPGRADE";
|
||||
FLAGS[FLAGS["CHUNKED"] = 8] = "CHUNKED";
|
||||
FLAGS[FLAGS["UPGRADE"] = 16] = "UPGRADE";
|
||||
FLAGS[FLAGS["CONTENT_LENGTH"] = 32] = "CONTENT_LENGTH";
|
||||
FLAGS[FLAGS["SKIPBODY"] = 64] = "SKIPBODY";
|
||||
FLAGS[FLAGS["TRAILING"] = 128] = "TRAILING";
|
||||
// Emums
|
||||
exports.ERROR = {
|
||||
OK: 0,
|
||||
INTERNAL: 1,
|
||||
STRICT: 2,
|
||||
CR_EXPECTED: 25,
|
||||
LF_EXPECTED: 3,
|
||||
UNEXPECTED_CONTENT_LENGTH: 4,
|
||||
UNEXPECTED_SPACE: 30,
|
||||
CLOSED_CONNECTION: 5,
|
||||
INVALID_METHOD: 6,
|
||||
INVALID_URL: 7,
|
||||
INVALID_CONSTANT: 8,
|
||||
INVALID_VERSION: 9,
|
||||
INVALID_HEADER_TOKEN: 10,
|
||||
INVALID_CONTENT_LENGTH: 11,
|
||||
INVALID_CHUNK_SIZE: 12,
|
||||
INVALID_STATUS: 13,
|
||||
INVALID_EOF_STATE: 14,
|
||||
INVALID_TRANSFER_ENCODING: 15,
|
||||
CB_MESSAGE_BEGIN: 16,
|
||||
CB_HEADERS_COMPLETE: 17,
|
||||
CB_MESSAGE_COMPLETE: 18,
|
||||
CB_CHUNK_HEADER: 19,
|
||||
CB_CHUNK_COMPLETE: 20,
|
||||
PAUSED: 21,
|
||||
PAUSED_UPGRADE: 22,
|
||||
PAUSED_H2_UPGRADE: 23,
|
||||
USER: 24,
|
||||
CB_URL_COMPLETE: 26,
|
||||
CB_STATUS_COMPLETE: 27,
|
||||
CB_METHOD_COMPLETE: 32,
|
||||
CB_VERSION_COMPLETE: 33,
|
||||
CB_HEADER_FIELD_COMPLETE: 28,
|
||||
CB_HEADER_VALUE_COMPLETE: 29,
|
||||
CB_CHUNK_EXTENSION_NAME_COMPLETE: 34,
|
||||
CB_CHUNK_EXTENSION_VALUE_COMPLETE: 35,
|
||||
CB_RESET: 31,
|
||||
};
|
||||
exports.TYPE = {
|
||||
BOTH: 0, // default
|
||||
REQUEST: 1,
|
||||
RESPONSE: 2,
|
||||
};
|
||||
exports.FLAGS = {
|
||||
CONNECTION_KEEP_ALIVE: 1 << 0,
|
||||
CONNECTION_CLOSE: 1 << 1,
|
||||
CONNECTION_UPGRADE: 1 << 2,
|
||||
CHUNKED: 1 << 3,
|
||||
UPGRADE: 1 << 4,
|
||||
CONTENT_LENGTH: 1 << 5,
|
||||
SKIPBODY: 1 << 6,
|
||||
TRAILING: 1 << 7,
|
||||
// 1 << 8 is unused
|
||||
FLAGS[FLAGS["TRANSFER_ENCODING"] = 512] = "TRANSFER_ENCODING";
|
||||
})(FLAGS = exports.FLAGS || (exports.FLAGS = {}));
|
||||
var LENIENT_FLAGS;
|
||||
(function (LENIENT_FLAGS) {
|
||||
LENIENT_FLAGS[LENIENT_FLAGS["HEADERS"] = 1] = "HEADERS";
|
||||
LENIENT_FLAGS[LENIENT_FLAGS["CHUNKED_LENGTH"] = 2] = "CHUNKED_LENGTH";
|
||||
LENIENT_FLAGS[LENIENT_FLAGS["KEEP_ALIVE"] = 4] = "KEEP_ALIVE";
|
||||
})(LENIENT_FLAGS = exports.LENIENT_FLAGS || (exports.LENIENT_FLAGS = {}));
|
||||
var METHODS;
|
||||
(function (METHODS) {
|
||||
METHODS[METHODS["DELETE"] = 0] = "DELETE";
|
||||
METHODS[METHODS["GET"] = 1] = "GET";
|
||||
METHODS[METHODS["HEAD"] = 2] = "HEAD";
|
||||
METHODS[METHODS["POST"] = 3] = "POST";
|
||||
METHODS[METHODS["PUT"] = 4] = "PUT";
|
||||
TRANSFER_ENCODING: 1 << 9,
|
||||
};
|
||||
exports.LENIENT_FLAGS = {
|
||||
HEADERS: 1 << 0,
|
||||
CHUNKED_LENGTH: 1 << 1,
|
||||
KEEP_ALIVE: 1 << 2,
|
||||
TRANSFER_ENCODING: 1 << 3,
|
||||
VERSION: 1 << 4,
|
||||
DATA_AFTER_CLOSE: 1 << 5,
|
||||
OPTIONAL_LF_AFTER_CR: 1 << 6,
|
||||
OPTIONAL_CRLF_AFTER_CHUNK: 1 << 7,
|
||||
OPTIONAL_CR_BEFORE_LF: 1 << 8,
|
||||
SPACES_AFTER_CHUNK_SIZE: 1 << 9,
|
||||
};
|
||||
exports.METHODS = {
|
||||
'DELETE': 0,
|
||||
'GET': 1,
|
||||
'HEAD': 2,
|
||||
'POST': 3,
|
||||
'PUT': 4,
|
||||
/* pathological */
|
||||
METHODS[METHODS["CONNECT"] = 5] = "CONNECT";
|
||||
METHODS[METHODS["OPTIONS"] = 6] = "OPTIONS";
|
||||
METHODS[METHODS["TRACE"] = 7] = "TRACE";
|
||||
'CONNECT': 5,
|
||||
'OPTIONS': 6,
|
||||
'TRACE': 7,
|
||||
/* WebDAV */
|
||||
METHODS[METHODS["COPY"] = 8] = "COPY";
|
||||
METHODS[METHODS["LOCK"] = 9] = "LOCK";
|
||||
METHODS[METHODS["MKCOL"] = 10] = "MKCOL";
|
||||
METHODS[METHODS["MOVE"] = 11] = "MOVE";
|
||||
METHODS[METHODS["PROPFIND"] = 12] = "PROPFIND";
|
||||
METHODS[METHODS["PROPPATCH"] = 13] = "PROPPATCH";
|
||||
METHODS[METHODS["SEARCH"] = 14] = "SEARCH";
|
||||
METHODS[METHODS["UNLOCK"] = 15] = "UNLOCK";
|
||||
METHODS[METHODS["BIND"] = 16] = "BIND";
|
||||
METHODS[METHODS["REBIND"] = 17] = "REBIND";
|
||||
METHODS[METHODS["UNBIND"] = 18] = "UNBIND";
|
||||
METHODS[METHODS["ACL"] = 19] = "ACL";
|
||||
'COPY': 8,
|
||||
'LOCK': 9,
|
||||
'MKCOL': 10,
|
||||
'MOVE': 11,
|
||||
'PROPFIND': 12,
|
||||
'PROPPATCH': 13,
|
||||
'SEARCH': 14,
|
||||
'UNLOCK': 15,
|
||||
'BIND': 16,
|
||||
'REBIND': 17,
|
||||
'UNBIND': 18,
|
||||
'ACL': 19,
|
||||
/* subversion */
|
||||
METHODS[METHODS["REPORT"] = 20] = "REPORT";
|
||||
METHODS[METHODS["MKACTIVITY"] = 21] = "MKACTIVITY";
|
||||
METHODS[METHODS["CHECKOUT"] = 22] = "CHECKOUT";
|
||||
METHODS[METHODS["MERGE"] = 23] = "MERGE";
|
||||
'REPORT': 20,
|
||||
'MKACTIVITY': 21,
|
||||
'CHECKOUT': 22,
|
||||
'MERGE': 23,
|
||||
/* upnp */
|
||||
METHODS[METHODS["M-SEARCH"] = 24] = "M-SEARCH";
|
||||
METHODS[METHODS["NOTIFY"] = 25] = "NOTIFY";
|
||||
METHODS[METHODS["SUBSCRIBE"] = 26] = "SUBSCRIBE";
|
||||
METHODS[METHODS["UNSUBSCRIBE"] = 27] = "UNSUBSCRIBE";
|
||||
'M-SEARCH': 24,
|
||||
'NOTIFY': 25,
|
||||
'SUBSCRIBE': 26,
|
||||
'UNSUBSCRIBE': 27,
|
||||
/* RFC-5789 */
|
||||
METHODS[METHODS["PATCH"] = 28] = "PATCH";
|
||||
METHODS[METHODS["PURGE"] = 29] = "PURGE";
|
||||
'PATCH': 28,
|
||||
'PURGE': 29,
|
||||
/* CalDAV */
|
||||
METHODS[METHODS["MKCALENDAR"] = 30] = "MKCALENDAR";
|
||||
'MKCALENDAR': 30,
|
||||
/* RFC-2068, section 19.6.1.2 */
|
||||
METHODS[METHODS["LINK"] = 31] = "LINK";
|
||||
METHODS[METHODS["UNLINK"] = 32] = "UNLINK";
|
||||
'LINK': 31,
|
||||
'UNLINK': 32,
|
||||
/* icecast */
|
||||
METHODS[METHODS["SOURCE"] = 33] = "SOURCE";
|
||||
'SOURCE': 33,
|
||||
/* RFC-7540, section 11.6 */
|
||||
METHODS[METHODS["PRI"] = 34] = "PRI";
|
||||
'PRI': 34,
|
||||
/* RFC-2326 RTSP */
|
||||
METHODS[METHODS["DESCRIBE"] = 35] = "DESCRIBE";
|
||||
METHODS[METHODS["ANNOUNCE"] = 36] = "ANNOUNCE";
|
||||
METHODS[METHODS["SETUP"] = 37] = "SETUP";
|
||||
METHODS[METHODS["PLAY"] = 38] = "PLAY";
|
||||
METHODS[METHODS["PAUSE"] = 39] = "PAUSE";
|
||||
METHODS[METHODS["TEARDOWN"] = 40] = "TEARDOWN";
|
||||
METHODS[METHODS["GET_PARAMETER"] = 41] = "GET_PARAMETER";
|
||||
METHODS[METHODS["SET_PARAMETER"] = 42] = "SET_PARAMETER";
|
||||
METHODS[METHODS["REDIRECT"] = 43] = "REDIRECT";
|
||||
METHODS[METHODS["RECORD"] = 44] = "RECORD";
|
||||
'DESCRIBE': 35,
|
||||
'ANNOUNCE': 36,
|
||||
'SETUP': 37,
|
||||
'PLAY': 38,
|
||||
'PAUSE': 39,
|
||||
'TEARDOWN': 40,
|
||||
'GET_PARAMETER': 41,
|
||||
'SET_PARAMETER': 42,
|
||||
'REDIRECT': 43,
|
||||
'RECORD': 44,
|
||||
/* RAOP */
|
||||
METHODS[METHODS["FLUSH"] = 45] = "FLUSH";
|
||||
})(METHODS = exports.METHODS || (exports.METHODS = {}));
|
||||
'FLUSH': 45,
|
||||
/* DRAFT https://www.ietf.org/archive/id/draft-ietf-httpbis-safe-method-w-body-02.html */
|
||||
'QUERY': 46,
|
||||
};
|
||||
exports.STATUSES = {
|
||||
CONTINUE: 100,
|
||||
SWITCHING_PROTOCOLS: 101,
|
||||
PROCESSING: 102,
|
||||
EARLY_HINTS: 103,
|
||||
RESPONSE_IS_STALE: 110, // Unofficial
|
||||
REVALIDATION_FAILED: 111, // Unofficial
|
||||
DISCONNECTED_OPERATION: 112, // Unofficial
|
||||
HEURISTIC_EXPIRATION: 113, // Unofficial
|
||||
MISCELLANEOUS_WARNING: 199, // Unofficial
|
||||
OK: 200,
|
||||
CREATED: 201,
|
||||
ACCEPTED: 202,
|
||||
NON_AUTHORITATIVE_INFORMATION: 203,
|
||||
NO_CONTENT: 204,
|
||||
RESET_CONTENT: 205,
|
||||
PARTIAL_CONTENT: 206,
|
||||
MULTI_STATUS: 207,
|
||||
ALREADY_REPORTED: 208,
|
||||
TRANSFORMATION_APPLIED: 214, // Unofficial
|
||||
IM_USED: 226,
|
||||
MISCELLANEOUS_PERSISTENT_WARNING: 299, // Unofficial
|
||||
MULTIPLE_CHOICES: 300,
|
||||
MOVED_PERMANENTLY: 301,
|
||||
FOUND: 302,
|
||||
SEE_OTHER: 303,
|
||||
NOT_MODIFIED: 304,
|
||||
USE_PROXY: 305,
|
||||
SWITCH_PROXY: 306, // No longer used
|
||||
TEMPORARY_REDIRECT: 307,
|
||||
PERMANENT_REDIRECT: 308,
|
||||
BAD_REQUEST: 400,
|
||||
UNAUTHORIZED: 401,
|
||||
PAYMENT_REQUIRED: 402,
|
||||
FORBIDDEN: 403,
|
||||
NOT_FOUND: 404,
|
||||
METHOD_NOT_ALLOWED: 405,
|
||||
NOT_ACCEPTABLE: 406,
|
||||
PROXY_AUTHENTICATION_REQUIRED: 407,
|
||||
REQUEST_TIMEOUT: 408,
|
||||
CONFLICT: 409,
|
||||
GONE: 410,
|
||||
LENGTH_REQUIRED: 411,
|
||||
PRECONDITION_FAILED: 412,
|
||||
PAYLOAD_TOO_LARGE: 413,
|
||||
URI_TOO_LONG: 414,
|
||||
UNSUPPORTED_MEDIA_TYPE: 415,
|
||||
RANGE_NOT_SATISFIABLE: 416,
|
||||
EXPECTATION_FAILED: 417,
|
||||
IM_A_TEAPOT: 418,
|
||||
PAGE_EXPIRED: 419, // Unofficial
|
||||
ENHANCE_YOUR_CALM: 420, // Unofficial
|
||||
MISDIRECTED_REQUEST: 421,
|
||||
UNPROCESSABLE_ENTITY: 422,
|
||||
LOCKED: 423,
|
||||
FAILED_DEPENDENCY: 424,
|
||||
TOO_EARLY: 425,
|
||||
UPGRADE_REQUIRED: 426,
|
||||
PRECONDITION_REQUIRED: 428,
|
||||
TOO_MANY_REQUESTS: 429,
|
||||
REQUEST_HEADER_FIELDS_TOO_LARGE_UNOFFICIAL: 430, // Unofficial
|
||||
REQUEST_HEADER_FIELDS_TOO_LARGE: 431,
|
||||
LOGIN_TIMEOUT: 440, // Unofficial
|
||||
NO_RESPONSE: 444, // Unofficial
|
||||
RETRY_WITH: 449, // Unofficial
|
||||
BLOCKED_BY_PARENTAL_CONTROL: 450, // Unofficial
|
||||
UNAVAILABLE_FOR_LEGAL_REASONS: 451,
|
||||
CLIENT_CLOSED_LOAD_BALANCED_REQUEST: 460, // Unofficial
|
||||
INVALID_X_FORWARDED_FOR: 463, // Unofficial
|
||||
REQUEST_HEADER_TOO_LARGE: 494, // Unofficial
|
||||
SSL_CERTIFICATE_ERROR: 495, // Unofficial
|
||||
SSL_CERTIFICATE_REQUIRED: 496, // Unofficial
|
||||
HTTP_REQUEST_SENT_TO_HTTPS_PORT: 497, // Unofficial
|
||||
INVALID_TOKEN: 498, // Unofficial
|
||||
CLIENT_CLOSED_REQUEST: 499, // Unofficial
|
||||
INTERNAL_SERVER_ERROR: 500,
|
||||
NOT_IMPLEMENTED: 501,
|
||||
BAD_GATEWAY: 502,
|
||||
SERVICE_UNAVAILABLE: 503,
|
||||
GATEWAY_TIMEOUT: 504,
|
||||
HTTP_VERSION_NOT_SUPPORTED: 505,
|
||||
VARIANT_ALSO_NEGOTIATES: 506,
|
||||
INSUFFICIENT_STORAGE: 507,
|
||||
LOOP_DETECTED: 508,
|
||||
BANDWIDTH_LIMIT_EXCEEDED: 509,
|
||||
NOT_EXTENDED: 510,
|
||||
NETWORK_AUTHENTICATION_REQUIRED: 511,
|
||||
WEB_SERVER_UNKNOWN_ERROR: 520, // Unofficial
|
||||
WEB_SERVER_IS_DOWN: 521, // Unofficial
|
||||
CONNECTION_TIMEOUT: 522, // Unofficial
|
||||
ORIGIN_IS_UNREACHABLE: 523, // Unofficial
|
||||
TIMEOUT_OCCURED: 524, // Unofficial
|
||||
SSL_HANDSHAKE_FAILED: 525, // Unofficial
|
||||
INVALID_SSL_CERTIFICATE: 526, // Unofficial
|
||||
RAILGUN_ERROR: 527, // Unofficial
|
||||
SITE_IS_OVERLOADED: 529, // Unofficial
|
||||
SITE_IS_FROZEN: 530, // Unofficial
|
||||
IDENTITY_PROVIDER_AUTHENTICATION_ERROR: 561, // Unofficial
|
||||
NETWORK_READ_TIMEOUT: 598, // Unofficial
|
||||
NETWORK_CONNECT_TIMEOUT: 599, // Unofficial
|
||||
};
|
||||
exports.FINISH = {
|
||||
SAFE: 0,
|
||||
SAFE_WITH_CB: 1,
|
||||
UNSAFE: 2,
|
||||
};
|
||||
exports.HEADER_STATE = {
|
||||
GENERAL: 0,
|
||||
CONNECTION: 1,
|
||||
CONTENT_LENGTH: 2,
|
||||
TRANSFER_ENCODING: 3,
|
||||
UPGRADE: 4,
|
||||
CONNECTION_KEEP_ALIVE: 5,
|
||||
CONNECTION_CLOSE: 6,
|
||||
CONNECTION_UPGRADE: 7,
|
||||
TRANSFER_ENCODING_CHUNKED: 8,
|
||||
};
|
||||
// C headers
|
||||
exports.METHODS_HTTP = [
|
||||
METHODS.DELETE,
|
||||
METHODS.GET,
|
||||
METHODS.HEAD,
|
||||
METHODS.POST,
|
||||
METHODS.PUT,
|
||||
METHODS.CONNECT,
|
||||
METHODS.OPTIONS,
|
||||
METHODS.TRACE,
|
||||
METHODS.COPY,
|
||||
METHODS.LOCK,
|
||||
METHODS.MKCOL,
|
||||
METHODS.MOVE,
|
||||
METHODS.PROPFIND,
|
||||
METHODS.PROPPATCH,
|
||||
METHODS.SEARCH,
|
||||
METHODS.UNLOCK,
|
||||
METHODS.BIND,
|
||||
METHODS.REBIND,
|
||||
METHODS.UNBIND,
|
||||
METHODS.ACL,
|
||||
METHODS.REPORT,
|
||||
METHODS.MKACTIVITY,
|
||||
METHODS.CHECKOUT,
|
||||
METHODS.MERGE,
|
||||
METHODS['M-SEARCH'],
|
||||
METHODS.NOTIFY,
|
||||
METHODS.SUBSCRIBE,
|
||||
METHODS.UNSUBSCRIBE,
|
||||
METHODS.PATCH,
|
||||
METHODS.PURGE,
|
||||
METHODS.MKCALENDAR,
|
||||
METHODS.LINK,
|
||||
METHODS.UNLINK,
|
||||
METHODS.PRI,
|
||||
exports.METHODS.DELETE,
|
||||
exports.METHODS.GET,
|
||||
exports.METHODS.HEAD,
|
||||
exports.METHODS.POST,
|
||||
exports.METHODS.PUT,
|
||||
exports.METHODS.CONNECT,
|
||||
exports.METHODS.OPTIONS,
|
||||
exports.METHODS.TRACE,
|
||||
exports.METHODS.COPY,
|
||||
exports.METHODS.LOCK,
|
||||
exports.METHODS.MKCOL,
|
||||
exports.METHODS.MOVE,
|
||||
exports.METHODS.PROPFIND,
|
||||
exports.METHODS.PROPPATCH,
|
||||
exports.METHODS.SEARCH,
|
||||
exports.METHODS.UNLOCK,
|
||||
exports.METHODS.BIND,
|
||||
exports.METHODS.REBIND,
|
||||
exports.METHODS.UNBIND,
|
||||
exports.METHODS.ACL,
|
||||
exports.METHODS.REPORT,
|
||||
exports.METHODS.MKACTIVITY,
|
||||
exports.METHODS.CHECKOUT,
|
||||
exports.METHODS.MERGE,
|
||||
exports.METHODS['M-SEARCH'],
|
||||
exports.METHODS.NOTIFY,
|
||||
exports.METHODS.SUBSCRIBE,
|
||||
exports.METHODS.UNSUBSCRIBE,
|
||||
exports.METHODS.PATCH,
|
||||
exports.METHODS.PURGE,
|
||||
exports.METHODS.MKCALENDAR,
|
||||
exports.METHODS.LINK,
|
||||
exports.METHODS.UNLINK,
|
||||
exports.METHODS.PRI,
|
||||
// TODO(indutny): should we allow it with HTTP?
|
||||
METHODS.SOURCE,
|
||||
exports.METHODS.SOURCE,
|
||||
exports.METHODS.QUERY,
|
||||
];
|
||||
exports.METHODS_ICE = [
|
||||
METHODS.SOURCE,
|
||||
exports.METHODS.SOURCE,
|
||||
];
|
||||
exports.METHODS_RTSP = [
|
||||
METHODS.OPTIONS,
|
||||
METHODS.DESCRIBE,
|
||||
METHODS.ANNOUNCE,
|
||||
METHODS.SETUP,
|
||||
METHODS.PLAY,
|
||||
METHODS.PAUSE,
|
||||
METHODS.TEARDOWN,
|
||||
METHODS.GET_PARAMETER,
|
||||
METHODS.SET_PARAMETER,
|
||||
METHODS.REDIRECT,
|
||||
METHODS.RECORD,
|
||||
METHODS.FLUSH,
|
||||
exports.METHODS.OPTIONS,
|
||||
exports.METHODS.DESCRIBE,
|
||||
exports.METHODS.ANNOUNCE,
|
||||
exports.METHODS.SETUP,
|
||||
exports.METHODS.PLAY,
|
||||
exports.METHODS.PAUSE,
|
||||
exports.METHODS.TEARDOWN,
|
||||
exports.METHODS.GET_PARAMETER,
|
||||
exports.METHODS.SET_PARAMETER,
|
||||
exports.METHODS.REDIRECT,
|
||||
exports.METHODS.RECORD,
|
||||
exports.METHODS.FLUSH,
|
||||
// For AirPlay
|
||||
METHODS.GET,
|
||||
METHODS.POST,
|
||||
exports.METHODS.GET,
|
||||
exports.METHODS.POST,
|
||||
];
|
||||
exports.METHOD_MAP = (0, utils_1.enumToMap)(exports.METHODS);
|
||||
exports.H_METHOD_MAP = Object.fromEntries(Object.entries(exports.METHODS).filter(([k]) => k.startsWith('H')));
|
||||
exports.STATUSES_HTTP = [
|
||||
exports.STATUSES.CONTINUE,
|
||||
exports.STATUSES.SWITCHING_PROTOCOLS,
|
||||
exports.STATUSES.PROCESSING,
|
||||
exports.STATUSES.EARLY_HINTS,
|
||||
exports.STATUSES.RESPONSE_IS_STALE,
|
||||
exports.STATUSES.REVALIDATION_FAILED,
|
||||
exports.STATUSES.DISCONNECTED_OPERATION,
|
||||
exports.STATUSES.HEURISTIC_EXPIRATION,
|
||||
exports.STATUSES.MISCELLANEOUS_WARNING,
|
||||
exports.STATUSES.OK,
|
||||
exports.STATUSES.CREATED,
|
||||
exports.STATUSES.ACCEPTED,
|
||||
exports.STATUSES.NON_AUTHORITATIVE_INFORMATION,
|
||||
exports.STATUSES.NO_CONTENT,
|
||||
exports.STATUSES.RESET_CONTENT,
|
||||
exports.STATUSES.PARTIAL_CONTENT,
|
||||
exports.STATUSES.MULTI_STATUS,
|
||||
exports.STATUSES.ALREADY_REPORTED,
|
||||
exports.STATUSES.TRANSFORMATION_APPLIED,
|
||||
exports.STATUSES.IM_USED,
|
||||
exports.STATUSES.MISCELLANEOUS_PERSISTENT_WARNING,
|
||||
exports.STATUSES.MULTIPLE_CHOICES,
|
||||
exports.STATUSES.MOVED_PERMANENTLY,
|
||||
exports.STATUSES.FOUND,
|
||||
exports.STATUSES.SEE_OTHER,
|
||||
exports.STATUSES.NOT_MODIFIED,
|
||||
exports.STATUSES.USE_PROXY,
|
||||
exports.STATUSES.SWITCH_PROXY,
|
||||
exports.STATUSES.TEMPORARY_REDIRECT,
|
||||
exports.STATUSES.PERMANENT_REDIRECT,
|
||||
exports.STATUSES.BAD_REQUEST,
|
||||
exports.STATUSES.UNAUTHORIZED,
|
||||
exports.STATUSES.PAYMENT_REQUIRED,
|
||||
exports.STATUSES.FORBIDDEN,
|
||||
exports.STATUSES.NOT_FOUND,
|
||||
exports.STATUSES.METHOD_NOT_ALLOWED,
|
||||
exports.STATUSES.NOT_ACCEPTABLE,
|
||||
exports.STATUSES.PROXY_AUTHENTICATION_REQUIRED,
|
||||
exports.STATUSES.REQUEST_TIMEOUT,
|
||||
exports.STATUSES.CONFLICT,
|
||||
exports.STATUSES.GONE,
|
||||
exports.STATUSES.LENGTH_REQUIRED,
|
||||
exports.STATUSES.PRECONDITION_FAILED,
|
||||
exports.STATUSES.PAYLOAD_TOO_LARGE,
|
||||
exports.STATUSES.URI_TOO_LONG,
|
||||
exports.STATUSES.UNSUPPORTED_MEDIA_TYPE,
|
||||
exports.STATUSES.RANGE_NOT_SATISFIABLE,
|
||||
exports.STATUSES.EXPECTATION_FAILED,
|
||||
exports.STATUSES.IM_A_TEAPOT,
|
||||
exports.STATUSES.PAGE_EXPIRED,
|
||||
exports.STATUSES.ENHANCE_YOUR_CALM,
|
||||
exports.STATUSES.MISDIRECTED_REQUEST,
|
||||
exports.STATUSES.UNPROCESSABLE_ENTITY,
|
||||
exports.STATUSES.LOCKED,
|
||||
exports.STATUSES.FAILED_DEPENDENCY,
|
||||
exports.STATUSES.TOO_EARLY,
|
||||
exports.STATUSES.UPGRADE_REQUIRED,
|
||||
exports.STATUSES.PRECONDITION_REQUIRED,
|
||||
exports.STATUSES.TOO_MANY_REQUESTS,
|
||||
exports.STATUSES.REQUEST_HEADER_FIELDS_TOO_LARGE_UNOFFICIAL,
|
||||
exports.STATUSES.REQUEST_HEADER_FIELDS_TOO_LARGE,
|
||||
exports.STATUSES.LOGIN_TIMEOUT,
|
||||
exports.STATUSES.NO_RESPONSE,
|
||||
exports.STATUSES.RETRY_WITH,
|
||||
exports.STATUSES.BLOCKED_BY_PARENTAL_CONTROL,
|
||||
exports.STATUSES.UNAVAILABLE_FOR_LEGAL_REASONS,
|
||||
exports.STATUSES.CLIENT_CLOSED_LOAD_BALANCED_REQUEST,
|
||||
exports.STATUSES.INVALID_X_FORWARDED_FOR,
|
||||
exports.STATUSES.REQUEST_HEADER_TOO_LARGE,
|
||||
exports.STATUSES.SSL_CERTIFICATE_ERROR,
|
||||
exports.STATUSES.SSL_CERTIFICATE_REQUIRED,
|
||||
exports.STATUSES.HTTP_REQUEST_SENT_TO_HTTPS_PORT,
|
||||
exports.STATUSES.INVALID_TOKEN,
|
||||
exports.STATUSES.CLIENT_CLOSED_REQUEST,
|
||||
exports.STATUSES.INTERNAL_SERVER_ERROR,
|
||||
exports.STATUSES.NOT_IMPLEMENTED,
|
||||
exports.STATUSES.BAD_GATEWAY,
|
||||
exports.STATUSES.SERVICE_UNAVAILABLE,
|
||||
exports.STATUSES.GATEWAY_TIMEOUT,
|
||||
exports.STATUSES.HTTP_VERSION_NOT_SUPPORTED,
|
||||
exports.STATUSES.VARIANT_ALSO_NEGOTIATES,
|
||||
exports.STATUSES.INSUFFICIENT_STORAGE,
|
||||
exports.STATUSES.LOOP_DETECTED,
|
||||
exports.STATUSES.BANDWIDTH_LIMIT_EXCEEDED,
|
||||
exports.STATUSES.NOT_EXTENDED,
|
||||
exports.STATUSES.NETWORK_AUTHENTICATION_REQUIRED,
|
||||
exports.STATUSES.WEB_SERVER_UNKNOWN_ERROR,
|
||||
exports.STATUSES.WEB_SERVER_IS_DOWN,
|
||||
exports.STATUSES.CONNECTION_TIMEOUT,
|
||||
exports.STATUSES.ORIGIN_IS_UNREACHABLE,
|
||||
exports.STATUSES.TIMEOUT_OCCURED,
|
||||
exports.STATUSES.SSL_HANDSHAKE_FAILED,
|
||||
exports.STATUSES.INVALID_SSL_CERTIFICATE,
|
||||
exports.STATUSES.RAILGUN_ERROR,
|
||||
exports.STATUSES.SITE_IS_OVERLOADED,
|
||||
exports.STATUSES.SITE_IS_FROZEN,
|
||||
exports.STATUSES.IDENTITY_PROVIDER_AUTHENTICATION_ERROR,
|
||||
exports.STATUSES.NETWORK_READ_TIMEOUT,
|
||||
exports.STATUSES.NETWORK_CONNECT_TIMEOUT,
|
||||
];
|
||||
exports.METHOD_MAP = utils_1.enumToMap(METHODS);
|
||||
exports.H_METHOD_MAP = {};
|
||||
Object.keys(exports.METHOD_MAP).forEach((key) => {
|
||||
if (/^H/.test(key)) {
|
||||
exports.H_METHOD_MAP[key] = exports.METHOD_MAP[key];
|
||||
}
|
||||
});
|
||||
var FINISH;
|
||||
(function (FINISH) {
|
||||
FINISH[FINISH["SAFE"] = 0] = "SAFE";
|
||||
FINISH[FINISH["SAFE_WITH_CB"] = 1] = "SAFE_WITH_CB";
|
||||
FINISH[FINISH["UNSAFE"] = 2] = "UNSAFE";
|
||||
})(FINISH = exports.FINISH || (exports.FINISH = {}));
|
||||
exports.ALPHA = [];
|
||||
for (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) {
|
||||
// Upper case
|
||||
@ -213,7 +437,7 @@ exports.USERINFO_CHARS = exports.ALPHANUM
|
||||
.concat(exports.MARK)
|
||||
.concat(['%', ';', ':', '&', '=', '+', '$', ',']);
|
||||
// TODO(indutny): use RFC
|
||||
exports.STRICT_URL_CHAR = [
|
||||
exports.URL_CHAR = [
|
||||
'!', '"', '$', '%', '&', '\'',
|
||||
'(', ')', '*', '+', ',', '-', '.', '/',
|
||||
':', ';', '<', '=', '>',
|
||||
@ -221,12 +445,6 @@ exports.STRICT_URL_CHAR = [
|
||||
'`',
|
||||
'{', '|', '}', '~',
|
||||
].concat(exports.ALPHANUM);
|
||||
exports.URL_CHAR = exports.STRICT_URL_CHAR
|
||||
.concat(['\t', '\f']);
|
||||
// All characters with 0x80 bit set to 1
|
||||
for (let i = 0x80; i <= 0xff; i++) {
|
||||
exports.URL_CHAR.push(i);
|
||||
}
|
||||
exports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']);
|
||||
/* Tokens as defined by rfc 2616. Also lowercases them.
|
||||
* token = 1*<any CHAR except CTLs or separators>
|
||||
@ -235,13 +453,12 @@ exports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', '
|
||||
* | "/" | "[" | "]" | "?" | "="
|
||||
* | "{" | "}" | SP | HT
|
||||
*/
|
||||
exports.STRICT_TOKEN = [
|
||||
exports.TOKEN = [
|
||||
'!', '#', '$', '%', '&', '\'',
|
||||
'*', '+', '-', '.',
|
||||
'^', '_', '`',
|
||||
'|', '~',
|
||||
].concat(exports.ALPHANUM);
|
||||
exports.TOKEN = exports.STRICT_TOKEN.concat([' ']);
|
||||
/*
|
||||
* Verify that a char is a valid visible (printable) US-ASCII
|
||||
* character or %x80-FF
|
||||
@ -254,25 +471,28 @@ for (let i = 32; i <= 255; i++) {
|
||||
}
|
||||
// ',' = \x44
|
||||
exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44);
|
||||
exports.QUOTED_STRING = ['\t', ' '];
|
||||
for (let i = 0x21; i <= 0xff; i++) {
|
||||
if (i !== 0x22 && i !== 0x5c) { // All characters in ASCII except \ and "
|
||||
exports.QUOTED_STRING.push(i);
|
||||
}
|
||||
}
|
||||
exports.HTAB_SP_VCHAR_OBS_TEXT = ['\t', ' '];
|
||||
// VCHAR: https://tools.ietf.org/html/rfc5234#appendix-B.1
|
||||
for (let i = 0x21; i <= 0x7E; i++) {
|
||||
exports.HTAB_SP_VCHAR_OBS_TEXT.push(i);
|
||||
}
|
||||
// OBS_TEXT: https://datatracker.ietf.org/doc/html/rfc9110#name-collected-abnf
|
||||
for (let i = 0x80; i <= 0xff; i++) {
|
||||
exports.HTAB_SP_VCHAR_OBS_TEXT.push(i);
|
||||
}
|
||||
exports.MAJOR = exports.NUM_MAP;
|
||||
exports.MINOR = exports.MAJOR;
|
||||
var HEADER_STATE;
|
||||
(function (HEADER_STATE) {
|
||||
HEADER_STATE[HEADER_STATE["GENERAL"] = 0] = "GENERAL";
|
||||
HEADER_STATE[HEADER_STATE["CONNECTION"] = 1] = "CONNECTION";
|
||||
HEADER_STATE[HEADER_STATE["CONTENT_LENGTH"] = 2] = "CONTENT_LENGTH";
|
||||
HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING"] = 3] = "TRANSFER_ENCODING";
|
||||
HEADER_STATE[HEADER_STATE["UPGRADE"] = 4] = "UPGRADE";
|
||||
HEADER_STATE[HEADER_STATE["CONNECTION_KEEP_ALIVE"] = 5] = "CONNECTION_KEEP_ALIVE";
|
||||
HEADER_STATE[HEADER_STATE["CONNECTION_CLOSE"] = 6] = "CONNECTION_CLOSE";
|
||||
HEADER_STATE[HEADER_STATE["CONNECTION_UPGRADE"] = 7] = "CONNECTION_UPGRADE";
|
||||
HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING_CHUNKED"] = 8] = "TRANSFER_ENCODING_CHUNKED";
|
||||
})(HEADER_STATE = exports.HEADER_STATE || (exports.HEADER_STATE = {}));
|
||||
exports.SPECIAL_HEADERS = {
|
||||
'connection': HEADER_STATE.CONNECTION,
|
||||
'content-length': HEADER_STATE.CONTENT_LENGTH,
|
||||
'proxy-connection': HEADER_STATE.CONNECTION,
|
||||
'transfer-encoding': HEADER_STATE.TRANSFER_ENCODING,
|
||||
'upgrade': HEADER_STATE.UPGRADE,
|
||||
'connection': exports.HEADER_STATE.CONNECTION,
|
||||
'content-length': exports.HEADER_STATE.CONTENT_LENGTH,
|
||||
'proxy-connection': exports.HEADER_STATE.CONNECTION,
|
||||
'transfer-encoding': exports.HEADER_STATE.TRANSFER_ENCODING,
|
||||
'upgrade': exports.HEADER_STATE.UPGRADE,
|
||||
};
|
||||
//# sourceMappingURL=constants.js.map
|
2
node_modules/undici/lib/llhttp/constants.js.map
generated
vendored
2
node_modules/undici/lib/llhttp/constants.js.map
generated
vendored
File diff suppressed because one or more lines are too long
16
node_modules/undici/lib/llhttp/llhttp-wasm.js
generated
vendored
16
node_modules/undici/lib/llhttp/llhttp-wasm.js
generated
vendored
File diff suppressed because one or more lines are too long
16
node_modules/undici/lib/llhttp/llhttp_simd-wasm.js
generated
vendored
16
node_modules/undici/lib/llhttp/llhttp_simd-wasm.js
generated
vendored
File diff suppressed because one or more lines are too long
6
node_modules/undici/lib/llhttp/utils.d.ts
generated
vendored
6
node_modules/undici/lib/llhttp/utils.d.ts
generated
vendored
@ -1,4 +1,2 @@
|
||||
export interface IEnumMap {
|
||||
[key: string]: number;
|
||||
}
|
||||
export declare function enumToMap(obj: any): IEnumMap;
|
||||
import { IntDict } from './constants';
|
||||
export declare function enumToMap(obj: IntDict, filter?: ReadonlyArray<number>, exceptions?: ReadonlyArray<number>): IntDict;
|
||||
|
18
node_modules/undici/lib/llhttp/utils.js
generated
vendored
18
node_modules/undici/lib/llhttp/utils.js
generated
vendored
@ -1,15 +1,15 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.enumToMap = void 0;
|
||||
function enumToMap(obj) {
|
||||
const res = {};
|
||||
Object.keys(obj).forEach((key) => {
|
||||
const value = obj[key];
|
||||
if (typeof value === 'number') {
|
||||
res[key] = value;
|
||||
}
|
||||
});
|
||||
return res;
|
||||
function enumToMap(obj, filter = [], exceptions = []) {
|
||||
var _a, _b;
|
||||
const emptyFilter = ((_a = filter === null || filter === void 0 ? void 0 : filter.length) !== null && _a !== void 0 ? _a : 0) === 0;
|
||||
const emptyExceptions = ((_b = exceptions === null || exceptions === void 0 ? void 0 : exceptions.length) !== null && _b !== void 0 ? _b : 0) === 0;
|
||||
return Object.fromEntries(Object.entries(obj).filter(([, value]) => {
|
||||
return (typeof value === 'number' &&
|
||||
(emptyFilter || filter.includes(value)) &&
|
||||
(emptyExceptions || !exceptions.includes(value)));
|
||||
}));
|
||||
}
|
||||
exports.enumToMap = enumToMap;
|
||||
//# sourceMappingURL=utils.js.map
|
2
node_modules/undici/lib/llhttp/utils.js.map
generated
vendored
2
node_modules/undici/lib/llhttp/utils.js.map
generated
vendored
@ -1 +1 @@
|
||||
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../src/llhttp/utils.ts"],"names":[],"mappings":";;;AAIA,SAAgB,SAAS,CAAC,GAAQ;IAChC,MAAM,GAAG,GAAa,EAAE,CAAC;IAEzB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,EAAE;QAC/B,MAAM,KAAK,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;QACvB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;SAClB;IACH,CAAC,CAAC,CAAC;IAEH,OAAO,GAAG,CAAC;AACb,CAAC;AAXD,8BAWC"}
|
||||
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../src/llhttp/utils.ts"],"names":[],"mappings":";;;AAEA,SAAgB,SAAS,CACvB,GAAY,EACZ,SAAgC,EAAE,EAClC,aAAoC,EAAE;;IAEtC,MAAM,WAAW,GAAG,CAAC,MAAA,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,MAAM,mCAAI,CAAC,CAAC,KAAK,CAAC,CAAC;IAChD,MAAM,eAAe,GAAG,CAAC,MAAA,UAAU,aAAV,UAAU,uBAAV,UAAU,CAAE,MAAM,mCAAI,CAAC,CAAC,KAAK,CAAC,CAAC;IAExD,OAAO,MAAM,CAAC,WAAW,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAE,AAAD,EAAG,KAAK,CAAE,EAAE,EAAE;QACnE,OAAO,CACL,OAAO,KAAK,KAAK,QAAQ;YACzB,CAAC,WAAW,IAAI,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;YACvC,CAAC,eAAe,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CACjD,CAAC;IACJ,CAAC,CAAC,CAAC,CAAC;AACN,CAAC;AAfD,8BAeC"}
|
105
node_modules/undici/lib/mock/mock-agent.js
generated
vendored
105
node_modules/undici/lib/mock/mock-agent.js
generated
vendored
@ -1,7 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
const { kClients } = require('../core/symbols')
|
||||
const Agent = require('../agent')
|
||||
const Agent = require('../dispatcher/agent')
|
||||
const {
|
||||
kAgent,
|
||||
kMockAgentSet,
|
||||
@ -11,42 +11,44 @@ const {
|
||||
kNetConnect,
|
||||
kGetNetConnect,
|
||||
kOptions,
|
||||
kFactory
|
||||
kFactory,
|
||||
kMockAgentRegisterCallHistory,
|
||||
kMockAgentIsCallHistoryEnabled,
|
||||
kMockAgentAddCallHistoryLog,
|
||||
kMockAgentMockCallHistoryInstance,
|
||||
kMockCallHistoryAddLog
|
||||
} = require('./mock-symbols')
|
||||
const MockClient = require('./mock-client')
|
||||
const MockPool = require('./mock-pool')
|
||||
const { matchValue, buildMockOptions } = require('./mock-utils')
|
||||
const { matchValue, buildAndValidateMockOptions } = require('./mock-utils')
|
||||
const { InvalidArgumentError, UndiciError } = require('../core/errors')
|
||||
const Dispatcher = require('../dispatcher')
|
||||
const Pluralizer = require('./pluralizer')
|
||||
const Dispatcher = require('../dispatcher/dispatcher')
|
||||
const PendingInterceptorsFormatter = require('./pending-interceptors-formatter')
|
||||
|
||||
class FakeWeakRef {
|
||||
constructor (value) {
|
||||
this.value = value
|
||||
}
|
||||
|
||||
deref () {
|
||||
return this.value
|
||||
}
|
||||
}
|
||||
const { MockCallHistory } = require('./mock-call-history')
|
||||
|
||||
class MockAgent extends Dispatcher {
|
||||
constructor (opts) {
|
||||
super(opts)
|
||||
|
||||
const mockOptions = buildAndValidateMockOptions(opts)
|
||||
|
||||
this[kNetConnect] = true
|
||||
this[kIsMockActive] = true
|
||||
this[kMockAgentIsCallHistoryEnabled] = mockOptions?.enableCallHistory ?? false
|
||||
|
||||
// Instantiate Agent and encapsulate
|
||||
if ((opts && opts.agent && typeof opts.agent.dispatch !== 'function')) {
|
||||
if (opts?.agent && typeof opts.agent.dispatch !== 'function') {
|
||||
throw new InvalidArgumentError('Argument opts.agent must implement Agent')
|
||||
}
|
||||
const agent = opts && opts.agent ? opts.agent : new Agent(opts)
|
||||
const agent = opts?.agent ? opts.agent : new Agent(opts)
|
||||
this[kAgent] = agent
|
||||
|
||||
this[kClients] = agent[kClients]
|
||||
this[kOptions] = buildMockOptions(opts)
|
||||
this[kOptions] = mockOptions
|
||||
|
||||
if (this[kMockAgentIsCallHistoryEnabled]) {
|
||||
this[kMockAgentRegisterCallHistory]()
|
||||
}
|
||||
}
|
||||
|
||||
get (origin) {
|
||||
@ -62,10 +64,14 @@ class MockAgent extends Dispatcher {
|
||||
dispatch (opts, handler) {
|
||||
// Call MockAgent.get to perform additional setup before dispatching as normal
|
||||
this.get(opts.origin)
|
||||
|
||||
this[kMockAgentAddCallHistoryLog](opts)
|
||||
|
||||
return this[kAgent].dispatch(opts, handler)
|
||||
}
|
||||
|
||||
async close () {
|
||||
this.clearCallHistory()
|
||||
await this[kAgent].close()
|
||||
this[kClients].clear()
|
||||
}
|
||||
@ -96,14 +102,52 @@ class MockAgent extends Dispatcher {
|
||||
this[kNetConnect] = false
|
||||
}
|
||||
|
||||
enableCallHistory () {
|
||||
this[kMockAgentIsCallHistoryEnabled] = true
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
disableCallHistory () {
|
||||
this[kMockAgentIsCallHistoryEnabled] = false
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getCallHistory () {
|
||||
return this[kMockAgentMockCallHistoryInstance]
|
||||
}
|
||||
|
||||
clearCallHistory () {
|
||||
if (this[kMockAgentMockCallHistoryInstance] !== undefined) {
|
||||
this[kMockAgentMockCallHistoryInstance].clear()
|
||||
}
|
||||
}
|
||||
|
||||
// This is required to bypass issues caused by using global symbols - see:
|
||||
// https://github.com/nodejs/undici/issues/1447
|
||||
get isMockActive () {
|
||||
return this[kIsMockActive]
|
||||
}
|
||||
|
||||
[kMockAgentRegisterCallHistory] () {
|
||||
if (this[kMockAgentMockCallHistoryInstance] === undefined) {
|
||||
this[kMockAgentMockCallHistoryInstance] = new MockCallHistory()
|
||||
}
|
||||
}
|
||||
|
||||
[kMockAgentAddCallHistoryLog] (opts) {
|
||||
if (this[kMockAgentIsCallHistoryEnabled]) {
|
||||
// additional setup when enableCallHistory class method is used after mockAgent instantiation
|
||||
this[kMockAgentRegisterCallHistory]()
|
||||
|
||||
// add call history log on every call (intercepted or not)
|
||||
this[kMockAgentMockCallHistoryInstance][kMockCallHistoryAddLog](opts)
|
||||
}
|
||||
}
|
||||
|
||||
[kMockAgentSet] (origin, dispatcher) {
|
||||
this[kClients].set(origin, new FakeWeakRef(dispatcher))
|
||||
this[kClients].set(origin, dispatcher)
|
||||
}
|
||||
|
||||
[kFactory] (origin) {
|
||||
@ -115,9 +159,9 @@ class MockAgent extends Dispatcher {
|
||||
|
||||
[kMockAgentGet] (origin) {
|
||||
// First check if we can immediately find it
|
||||
const ref = this[kClients].get(origin)
|
||||
if (ref) {
|
||||
return ref.deref()
|
||||
const client = this[kClients].get(origin)
|
||||
if (client) {
|
||||
return client
|
||||
}
|
||||
|
||||
// If the origin is not a string create a dummy parent pool and return to user
|
||||
@ -128,8 +172,7 @@ class MockAgent extends Dispatcher {
|
||||
}
|
||||
|
||||
// If we match, create a pool and assign the same dispatches
|
||||
for (const [keyMatcher, nonExplicitRef] of Array.from(this[kClients])) {
|
||||
const nonExplicitDispatcher = nonExplicitRef.deref()
|
||||
for (const [keyMatcher, nonExplicitDispatcher] of Array.from(this[kClients])) {
|
||||
if (nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) {
|
||||
const dispatcher = this[kFactory](origin)
|
||||
this[kMockAgentSet](origin, dispatcher)
|
||||
@ -147,7 +190,7 @@ class MockAgent extends Dispatcher {
|
||||
const mockAgentClients = this[kClients]
|
||||
|
||||
return Array.from(mockAgentClients.entries())
|
||||
.flatMap(([origin, scope]) => scope.deref()[kDispatches].map(dispatch => ({ ...dispatch, origin })))
|
||||
.flatMap(([origin, scope]) => scope[kDispatches].map(dispatch => ({ ...dispatch, origin })))
|
||||
.filter(({ pending }) => pending)
|
||||
}
|
||||
|
||||
@ -158,13 +201,11 @@ class MockAgent extends Dispatcher {
|
||||
return
|
||||
}
|
||||
|
||||
const pluralizer = new Pluralizer('interceptor', 'interceptors').pluralize(pending.length)
|
||||
|
||||
throw new UndiciError(`
|
||||
${pluralizer.count} ${pluralizer.noun} ${pluralizer.is} pending:
|
||||
|
||||
${pendingInterceptorsFormatter.format(pending)}
|
||||
`.trim())
|
||||
throw new UndiciError(
|
||||
pending.length === 1
|
||||
? `1 interceptor is pending:\n\n${pendingInterceptorsFormatter.format(pending)}`.trim()
|
||||
: `${pending.length} interceptors are pending:\n\n${pendingInterceptorsFormatter.format(pending)}`.trim()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
248
node_modules/undici/lib/mock/mock-call-history.js
generated
vendored
Normal file
248
node_modules/undici/lib/mock/mock-call-history.js
generated
vendored
Normal file
@ -0,0 +1,248 @@
|
||||
'use strict'
|
||||
|
||||
const { kMockCallHistoryAddLog } = require('./mock-symbols')
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
|
||||
function handleFilterCallsWithOptions (criteria, options, handler, store) {
|
||||
switch (options.operator) {
|
||||
case 'OR':
|
||||
store.push(...handler(criteria))
|
||||
|
||||
return store
|
||||
case 'AND':
|
||||
return handler.call({ logs: store }, criteria)
|
||||
default:
|
||||
// guard -- should never happens because buildAndValidateFilterCallsOptions is called before
|
||||
throw new InvalidArgumentError('options.operator must to be a case insensitive string equal to \'OR\' or \'AND\'')
|
||||
}
|
||||
}
|
||||
|
||||
function buildAndValidateFilterCallsOptions (options = {}) {
|
||||
const finalOptions = {}
|
||||
|
||||
if ('operator' in options) {
|
||||
if (typeof options.operator !== 'string' || (options.operator.toUpperCase() !== 'OR' && options.operator.toUpperCase() !== 'AND')) {
|
||||
throw new InvalidArgumentError('options.operator must to be a case insensitive string equal to \'OR\' or \'AND\'')
|
||||
}
|
||||
|
||||
return {
|
||||
...finalOptions,
|
||||
operator: options.operator.toUpperCase()
|
||||
}
|
||||
}
|
||||
|
||||
return finalOptions
|
||||
}
|
||||
|
||||
function makeFilterCalls (parameterName) {
|
||||
return (parameterValue) => {
|
||||
if (typeof parameterValue === 'string' || parameterValue == null) {
|
||||
return this.logs.filter((log) => {
|
||||
return log[parameterName] === parameterValue
|
||||
})
|
||||
}
|
||||
if (parameterValue instanceof RegExp) {
|
||||
return this.logs.filter((log) => {
|
||||
return parameterValue.test(log[parameterName])
|
||||
})
|
||||
}
|
||||
|
||||
throw new InvalidArgumentError(`${parameterName} parameter should be one of string, regexp, undefined or null`)
|
||||
}
|
||||
}
|
||||
function computeUrlWithMaybeSearchParameters (requestInit) {
|
||||
// path can contains query url parameters
|
||||
// or query can contains query url parameters
|
||||
try {
|
||||
const url = new URL(requestInit.path, requestInit.origin)
|
||||
|
||||
// requestInit.path contains query url parameters
|
||||
// requestInit.query is then undefined
|
||||
if (url.search.length !== 0) {
|
||||
return url
|
||||
}
|
||||
|
||||
// requestInit.query can be populated here
|
||||
url.search = new URLSearchParams(requestInit.query).toString()
|
||||
|
||||
return url
|
||||
} catch (error) {
|
||||
throw new InvalidArgumentError('An error occurred when computing MockCallHistoryLog.url', { cause: error })
|
||||
}
|
||||
}
|
||||
|
||||
class MockCallHistoryLog {
|
||||
constructor (requestInit = {}) {
|
||||
this.body = requestInit.body
|
||||
this.headers = requestInit.headers
|
||||
this.method = requestInit.method
|
||||
|
||||
const url = computeUrlWithMaybeSearchParameters(requestInit)
|
||||
|
||||
this.fullUrl = url.toString()
|
||||
this.origin = url.origin
|
||||
this.path = url.pathname
|
||||
this.searchParams = Object.fromEntries(url.searchParams)
|
||||
this.protocol = url.protocol
|
||||
this.host = url.host
|
||||
this.port = url.port
|
||||
this.hash = url.hash
|
||||
}
|
||||
|
||||
toMap () {
|
||||
return new Map([
|
||||
['protocol', this.protocol],
|
||||
['host', this.host],
|
||||
['port', this.port],
|
||||
['origin', this.origin],
|
||||
['path', this.path],
|
||||
['hash', this.hash],
|
||||
['searchParams', this.searchParams],
|
||||
['fullUrl', this.fullUrl],
|
||||
['method', this.method],
|
||||
['body', this.body],
|
||||
['headers', this.headers]]
|
||||
)
|
||||
}
|
||||
|
||||
toString () {
|
||||
const options = { betweenKeyValueSeparator: '->', betweenPairSeparator: '|' }
|
||||
let result = ''
|
||||
|
||||
this.toMap().forEach((value, key) => {
|
||||
if (typeof value === 'string' || value === undefined || value === null) {
|
||||
result = `${result}${key}${options.betweenKeyValueSeparator}${value}${options.betweenPairSeparator}`
|
||||
}
|
||||
if ((typeof value === 'object' && value !== null) || Array.isArray(value)) {
|
||||
result = `${result}${key}${options.betweenKeyValueSeparator}${JSON.stringify(value)}${options.betweenPairSeparator}`
|
||||
}
|
||||
// maybe miss something for non Record / Array headers and searchParams here
|
||||
})
|
||||
|
||||
// delete last betweenPairSeparator
|
||||
return result.slice(0, -1)
|
||||
}
|
||||
}
|
||||
|
||||
class MockCallHistory {
|
||||
logs = []
|
||||
|
||||
calls () {
|
||||
return this.logs
|
||||
}
|
||||
|
||||
firstCall () {
|
||||
return this.logs.at(0)
|
||||
}
|
||||
|
||||
lastCall () {
|
||||
return this.logs.at(-1)
|
||||
}
|
||||
|
||||
nthCall (number) {
|
||||
if (typeof number !== 'number') {
|
||||
throw new InvalidArgumentError('nthCall must be called with a number')
|
||||
}
|
||||
if (!Number.isInteger(number)) {
|
||||
throw new InvalidArgumentError('nthCall must be called with an integer')
|
||||
}
|
||||
if (Math.sign(number) !== 1) {
|
||||
throw new InvalidArgumentError('nthCall must be called with a positive value. use firstCall or lastCall instead')
|
||||
}
|
||||
|
||||
// non zero based index. this is more human readable
|
||||
return this.logs.at(number - 1)
|
||||
}
|
||||
|
||||
filterCalls (criteria, options) {
|
||||
// perf
|
||||
if (this.logs.length === 0) {
|
||||
return this.logs
|
||||
}
|
||||
if (typeof criteria === 'function') {
|
||||
return this.logs.filter(criteria)
|
||||
}
|
||||
if (criteria instanceof RegExp) {
|
||||
return this.logs.filter((log) => {
|
||||
return criteria.test(log.toString())
|
||||
})
|
||||
}
|
||||
if (typeof criteria === 'object' && criteria !== null) {
|
||||
// no criteria - returning all logs
|
||||
if (Object.keys(criteria).length === 0) {
|
||||
return this.logs
|
||||
}
|
||||
|
||||
const finalOptions = { operator: 'OR', ...buildAndValidateFilterCallsOptions(options) }
|
||||
|
||||
let maybeDuplicatedLogsFiltered = []
|
||||
if ('protocol' in criteria) {
|
||||
maybeDuplicatedLogsFiltered = handleFilterCallsWithOptions(criteria.protocol, finalOptions, this.filterCallsByProtocol, maybeDuplicatedLogsFiltered)
|
||||
}
|
||||
if ('host' in criteria) {
|
||||
maybeDuplicatedLogsFiltered = handleFilterCallsWithOptions(criteria.host, finalOptions, this.filterCallsByHost, maybeDuplicatedLogsFiltered)
|
||||
}
|
||||
if ('port' in criteria) {
|
||||
maybeDuplicatedLogsFiltered = handleFilterCallsWithOptions(criteria.port, finalOptions, this.filterCallsByPort, maybeDuplicatedLogsFiltered)
|
||||
}
|
||||
if ('origin' in criteria) {
|
||||
maybeDuplicatedLogsFiltered = handleFilterCallsWithOptions(criteria.origin, finalOptions, this.filterCallsByOrigin, maybeDuplicatedLogsFiltered)
|
||||
}
|
||||
if ('path' in criteria) {
|
||||
maybeDuplicatedLogsFiltered = handleFilterCallsWithOptions(criteria.path, finalOptions, this.filterCallsByPath, maybeDuplicatedLogsFiltered)
|
||||
}
|
||||
if ('hash' in criteria) {
|
||||
maybeDuplicatedLogsFiltered = handleFilterCallsWithOptions(criteria.hash, finalOptions, this.filterCallsByHash, maybeDuplicatedLogsFiltered)
|
||||
}
|
||||
if ('fullUrl' in criteria) {
|
||||
maybeDuplicatedLogsFiltered = handleFilterCallsWithOptions(criteria.fullUrl, finalOptions, this.filterCallsByFullUrl, maybeDuplicatedLogsFiltered)
|
||||
}
|
||||
if ('method' in criteria) {
|
||||
maybeDuplicatedLogsFiltered = handleFilterCallsWithOptions(criteria.method, finalOptions, this.filterCallsByMethod, maybeDuplicatedLogsFiltered)
|
||||
}
|
||||
|
||||
const uniqLogsFiltered = [...new Set(maybeDuplicatedLogsFiltered)]
|
||||
|
||||
return uniqLogsFiltered
|
||||
}
|
||||
|
||||
throw new InvalidArgumentError('criteria parameter should be one of function, regexp, or object')
|
||||
}
|
||||
|
||||
filterCallsByProtocol = makeFilterCalls.call(this, 'protocol')
|
||||
|
||||
filterCallsByHost = makeFilterCalls.call(this, 'host')
|
||||
|
||||
filterCallsByPort = makeFilterCalls.call(this, 'port')
|
||||
|
||||
filterCallsByOrigin = makeFilterCalls.call(this, 'origin')
|
||||
|
||||
filterCallsByPath = makeFilterCalls.call(this, 'path')
|
||||
|
||||
filterCallsByHash = makeFilterCalls.call(this, 'hash')
|
||||
|
||||
filterCallsByFullUrl = makeFilterCalls.call(this, 'fullUrl')
|
||||
|
||||
filterCallsByMethod = makeFilterCalls.call(this, 'method')
|
||||
|
||||
clear () {
|
||||
this.logs = []
|
||||
}
|
||||
|
||||
[kMockCallHistoryAddLog] (requestInit) {
|
||||
const log = new MockCallHistoryLog(requestInit)
|
||||
|
||||
this.logs.push(log)
|
||||
|
||||
return log
|
||||
}
|
||||
|
||||
* [Symbol.iterator] () {
|
||||
for (const log of this.calls()) {
|
||||
yield log
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.MockCallHistory = MockCallHistory
|
||||
module.exports.MockCallHistoryLog = MockCallHistoryLog
|
17
node_modules/undici/lib/mock/mock-client.js
generated
vendored
17
node_modules/undici/lib/mock/mock-client.js
generated
vendored
@ -1,7 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
const { promisify } = require('util')
|
||||
const Client = require('../client')
|
||||
const { promisify } = require('node:util')
|
||||
const Client = require('../dispatcher/client')
|
||||
const { buildMockDispatch } = require('./mock-utils')
|
||||
const {
|
||||
kDispatches,
|
||||
@ -10,7 +10,8 @@ const {
|
||||
kOriginalClose,
|
||||
kOrigin,
|
||||
kOriginalDispatch,
|
||||
kConnected
|
||||
kConnected,
|
||||
kIgnoreTrailingSlash
|
||||
} = require('./mock-symbols')
|
||||
const { MockInterceptor } = require('./mock-interceptor')
|
||||
const Symbols = require('../core/symbols')
|
||||
@ -21,14 +22,15 @@ const { InvalidArgumentError } = require('../core/errors')
|
||||
*/
|
||||
class MockClient extends Client {
|
||||
constructor (origin, opts) {
|
||||
super(origin, opts)
|
||||
|
||||
if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') {
|
||||
throw new InvalidArgumentError('Argument opts.agent must implement Agent')
|
||||
}
|
||||
|
||||
super(origin, opts)
|
||||
|
||||
this[kMockAgent] = opts.agent
|
||||
this[kOrigin] = origin
|
||||
this[kIgnoreTrailingSlash] = opts.ignoreTrailingSlash ?? false
|
||||
this[kDispatches] = []
|
||||
this[kConnected] = 1
|
||||
this[kOriginalDispatch] = this.dispatch
|
||||
@ -46,7 +48,10 @@ class MockClient extends Client {
|
||||
* Sets up the base interceptor for mocking replies from undici.
|
||||
*/
|
||||
intercept (opts) {
|
||||
return new MockInterceptor(opts, this[kDispatches])
|
||||
return new MockInterceptor(
|
||||
opts && { ignoreTrailingSlash: this[kIgnoreTrailingSlash], ...opts },
|
||||
this[kDispatches]
|
||||
)
|
||||
}
|
||||
|
||||
async [kClose] () {
|
||||
|
4
node_modules/undici/lib/mock/mock-errors.js
generated
vendored
4
node_modules/undici/lib/mock/mock-errors.js
generated
vendored
@ -2,10 +2,12 @@
|
||||
|
||||
const { UndiciError } = require('../core/errors')
|
||||
|
||||
/**
|
||||
* The request does not match any registered mock dispatches.
|
||||
*/
|
||||
class MockNotMatchedError extends UndiciError {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
Error.captureStackTrace(this, MockNotMatchedError)
|
||||
this.name = 'MockNotMatchedError'
|
||||
this.message = message || 'The request does not match any registered mock dispatches'
|
||||
this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED'
|
||||
|
51
node_modules/undici/lib/mock/mock-interceptor.js
generated
vendored
51
node_modules/undici/lib/mock/mock-interceptor.js
generated
vendored
@ -7,10 +7,11 @@ const {
|
||||
kDefaultHeaders,
|
||||
kDefaultTrailers,
|
||||
kContentLength,
|
||||
kMockDispatch
|
||||
kMockDispatch,
|
||||
kIgnoreTrailingSlash
|
||||
} = require('./mock-symbols')
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
const { buildURL } = require('../core/util')
|
||||
const { serializePathWithQuery } = require('../core/util')
|
||||
|
||||
/**
|
||||
* Defines the scope API for an interceptor reply
|
||||
@ -72,9 +73,9 @@ class MockInterceptor {
|
||||
// fragments to servers when they retrieve a document,
|
||||
if (typeof opts.path === 'string') {
|
||||
if (opts.query) {
|
||||
opts.path = buildURL(opts.path, opts.query)
|
||||
opts.path = serializePathWithQuery(opts.path, opts.query)
|
||||
} else {
|
||||
// Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811
|
||||
// Matches https://github.com/nodejs/undici/blob/main/lib/web/fetch/index.js#L1811
|
||||
const parsedURL = new URL(opts.path, 'data://')
|
||||
opts.path = parsedURL.pathname + parsedURL.search
|
||||
}
|
||||
@ -85,12 +86,13 @@ class MockInterceptor {
|
||||
|
||||
this[kDispatchKey] = buildKey(opts)
|
||||
this[kDispatches] = mockDispatches
|
||||
this[kIgnoreTrailingSlash] = opts.ignoreTrailingSlash ?? false
|
||||
this[kDefaultHeaders] = {}
|
||||
this[kDefaultTrailers] = {}
|
||||
this[kContentLength] = false
|
||||
}
|
||||
|
||||
createMockScopeDispatchData (statusCode, data, responseOptions = {}) {
|
||||
createMockScopeDispatchData ({ statusCode, data, responseOptions }) {
|
||||
const responseData = getResponseData(data)
|
||||
const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {}
|
||||
const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers }
|
||||
@ -99,14 +101,11 @@ class MockInterceptor {
|
||||
return { statusCode, data, headers, trailers }
|
||||
}
|
||||
|
||||
validateReplyParameters (statusCode, data, responseOptions) {
|
||||
if (typeof statusCode === 'undefined') {
|
||||
validateReplyParameters (replyParameters) {
|
||||
if (typeof replyParameters.statusCode === 'undefined') {
|
||||
throw new InvalidArgumentError('statusCode must be defined')
|
||||
}
|
||||
if (typeof data === 'undefined') {
|
||||
throw new InvalidArgumentError('data must be defined')
|
||||
}
|
||||
if (typeof responseOptions !== 'object') {
|
||||
if (typeof replyParameters.responseOptions !== 'object' || replyParameters.responseOptions === null) {
|
||||
throw new InvalidArgumentError('responseOptions must be an object')
|
||||
}
|
||||
}
|
||||
@ -114,33 +113,33 @@ class MockInterceptor {
|
||||
/**
|
||||
* Mock an undici request with a defined reply.
|
||||
*/
|
||||
reply (replyData) {
|
||||
reply (replyOptionsCallbackOrStatusCode) {
|
||||
// Values of reply aren't available right now as they
|
||||
// can only be available when the reply callback is invoked.
|
||||
if (typeof replyData === 'function') {
|
||||
if (typeof replyOptionsCallbackOrStatusCode === 'function') {
|
||||
// We'll first wrap the provided callback in another function,
|
||||
// this function will properly resolve the data from the callback
|
||||
// when invoked.
|
||||
const wrappedDefaultsCallback = (opts) => {
|
||||
// Our reply options callback contains the parameter for statusCode, data and options.
|
||||
const resolvedData = replyData(opts)
|
||||
const resolvedData = replyOptionsCallbackOrStatusCode(opts)
|
||||
|
||||
// Check if it is in the right format
|
||||
if (typeof resolvedData !== 'object') {
|
||||
if (typeof resolvedData !== 'object' || resolvedData === null) {
|
||||
throw new InvalidArgumentError('reply options callback must return an object')
|
||||
}
|
||||
|
||||
const { statusCode, data = '', responseOptions = {} } = resolvedData
|
||||
this.validateReplyParameters(statusCode, data, responseOptions)
|
||||
const replyParameters = { data: '', responseOptions: {}, ...resolvedData }
|
||||
this.validateReplyParameters(replyParameters)
|
||||
// Since the values can be obtained immediately we return them
|
||||
// from this higher order function that will be resolved later.
|
||||
return {
|
||||
...this.createMockScopeDispatchData(statusCode, data, responseOptions)
|
||||
...this.createMockScopeDispatchData(replyParameters)
|
||||
}
|
||||
}
|
||||
|
||||
// Add usual dispatch data, but this time set the data parameter to function that will eventually provide data.
|
||||
const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback)
|
||||
const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback, { ignoreTrailingSlash: this[kIgnoreTrailingSlash] })
|
||||
return new MockScope(newMockDispatch)
|
||||
}
|
||||
|
||||
@ -148,12 +147,16 @@ class MockInterceptor {
|
||||
// we should have 1-3 parameters. So we spread the arguments of
|
||||
// this function to obtain the parameters, since replyData will always
|
||||
// just be the statusCode.
|
||||
const [statusCode, data = '', responseOptions = {}] = [...arguments]
|
||||
this.validateReplyParameters(statusCode, data, responseOptions)
|
||||
const replyParameters = {
|
||||
statusCode: replyOptionsCallbackOrStatusCode,
|
||||
data: arguments[1] === undefined ? '' : arguments[1],
|
||||
responseOptions: arguments[2] === undefined ? {} : arguments[2]
|
||||
}
|
||||
this.validateReplyParameters(replyParameters)
|
||||
|
||||
// Send in-already provided data like usual
|
||||
const dispatchData = this.createMockScopeDispatchData(statusCode, data, responseOptions)
|
||||
const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData)
|
||||
const dispatchData = this.createMockScopeDispatchData(replyParameters)
|
||||
const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData, { ignoreTrailingSlash: this[kIgnoreTrailingSlash] })
|
||||
return new MockScope(newMockDispatch)
|
||||
}
|
||||
|
||||
@ -165,7 +168,7 @@ class MockInterceptor {
|
||||
throw new InvalidArgumentError('error must be defined')
|
||||
}
|
||||
|
||||
const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error })
|
||||
const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error }, { ignoreTrailingSlash: this[kIgnoreTrailingSlash] })
|
||||
return new MockScope(newMockDispatch)
|
||||
}
|
||||
|
||||
|
17
node_modules/undici/lib/mock/mock-pool.js
generated
vendored
17
node_modules/undici/lib/mock/mock-pool.js
generated
vendored
@ -1,7 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
const { promisify } = require('util')
|
||||
const Pool = require('../pool')
|
||||
const { promisify } = require('node:util')
|
||||
const Pool = require('../dispatcher/pool')
|
||||
const { buildMockDispatch } = require('./mock-utils')
|
||||
const {
|
||||
kDispatches,
|
||||
@ -10,7 +10,8 @@ const {
|
||||
kOriginalClose,
|
||||
kOrigin,
|
||||
kOriginalDispatch,
|
||||
kConnected
|
||||
kConnected,
|
||||
kIgnoreTrailingSlash
|
||||
} = require('./mock-symbols')
|
||||
const { MockInterceptor } = require('./mock-interceptor')
|
||||
const Symbols = require('../core/symbols')
|
||||
@ -21,14 +22,15 @@ const { InvalidArgumentError } = require('../core/errors')
|
||||
*/
|
||||
class MockPool extends Pool {
|
||||
constructor (origin, opts) {
|
||||
super(origin, opts)
|
||||
|
||||
if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') {
|
||||
throw new InvalidArgumentError('Argument opts.agent must implement Agent')
|
||||
}
|
||||
|
||||
super(origin, opts)
|
||||
|
||||
this[kMockAgent] = opts.agent
|
||||
this[kOrigin] = origin
|
||||
this[kIgnoreTrailingSlash] = opts.ignoreTrailingSlash ?? false
|
||||
this[kDispatches] = []
|
||||
this[kConnected] = 1
|
||||
this[kOriginalDispatch] = this.dispatch
|
||||
@ -46,7 +48,10 @@ class MockPool extends Pool {
|
||||
* Sets up the base interceptor for mocking replies from undici.
|
||||
*/
|
||||
intercept (opts) {
|
||||
return new MockInterceptor(opts, this[kDispatches])
|
||||
return new MockInterceptor(
|
||||
opts && { ignoreTrailingSlash: this[kIgnoreTrailingSlash], ...opts },
|
||||
this[kDispatches]
|
||||
)
|
||||
}
|
||||
|
||||
async [kClose] () {
|
||||
|
9
node_modules/undici/lib/mock/mock-symbols.js
generated
vendored
9
node_modules/undici/lib/mock/mock-symbols.js
generated
vendored
@ -15,9 +15,16 @@ module.exports = {
|
||||
kMockDispatch: Symbol('mock dispatch'),
|
||||
kClose: Symbol('close'),
|
||||
kOriginalClose: Symbol('original agent close'),
|
||||
kOriginalDispatch: Symbol('original dispatch'),
|
||||
kOrigin: Symbol('origin'),
|
||||
kIsMockActive: Symbol('is mock active'),
|
||||
kNetConnect: Symbol('net connect'),
|
||||
kGetNetConnect: Symbol('get net connect'),
|
||||
kConnected: Symbol('connected')
|
||||
kConnected: Symbol('connected'),
|
||||
kIgnoreTrailingSlash: Symbol('ignore trailing slash'),
|
||||
kMockAgentMockCallHistoryInstance: Symbol('mock agent mock call history name'),
|
||||
kMockAgentRegisterCallHistory: Symbol('mock agent register mock call history'),
|
||||
kMockAgentAddCallHistoryLog: Symbol('mock agent add call history log'),
|
||||
kMockAgentIsCallHistoryEnabled: Symbol('mock agent is call history enabled'),
|
||||
kMockCallHistoryAddLog: Symbol('mock call history add log')
|
||||
}
|
||||
|
96
node_modules/undici/lib/mock/mock-utils.js
generated
vendored
96
node_modules/undici/lib/mock/mock-utils.js
generated
vendored
@ -8,13 +8,14 @@ const {
|
||||
kOrigin,
|
||||
kGetNetConnect
|
||||
} = require('./mock-symbols')
|
||||
const { buildURL, nop } = require('../core/util')
|
||||
const { STATUS_CODES } = require('http')
|
||||
const { serializePathWithQuery } = require('../core/util')
|
||||
const { STATUS_CODES } = require('node:http')
|
||||
const {
|
||||
types: {
|
||||
isPromise
|
||||
}
|
||||
} = require('util')
|
||||
} = require('node:util')
|
||||
const { InvalidArgumentError } = require('../core/errors')
|
||||
|
||||
function matchValue (match, value) {
|
||||
if (typeof match === 'string') {
|
||||
@ -96,7 +97,7 @@ function safeUrl (path) {
|
||||
return path
|
||||
}
|
||||
|
||||
const pathSegments = path.split('?')
|
||||
const pathSegments = path.split('?', 3)
|
||||
|
||||
if (pathSegments.length !== 2) {
|
||||
return path
|
||||
@ -118,19 +119,33 @@ function matchKey (mockDispatch, { path, method, body, headers }) {
|
||||
function getResponseData (data) {
|
||||
if (Buffer.isBuffer(data)) {
|
||||
return data
|
||||
} else if (data instanceof Uint8Array) {
|
||||
return data
|
||||
} else if (data instanceof ArrayBuffer) {
|
||||
return data
|
||||
} else if (typeof data === 'object') {
|
||||
return JSON.stringify(data)
|
||||
} else {
|
||||
} else if (data) {
|
||||
return data.toString()
|
||||
} else {
|
||||
return ''
|
||||
}
|
||||
}
|
||||
|
||||
function getMockDispatch (mockDispatches, key) {
|
||||
const basePath = key.query ? buildURL(key.path, key.query) : key.path
|
||||
const basePath = key.query ? serializePathWithQuery(key.path, key.query) : key.path
|
||||
const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath
|
||||
|
||||
const resolvedPathWithoutTrailingSlash = removeTrailingSlash(resolvedPath)
|
||||
|
||||
// Match path
|
||||
let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(safeUrl(path), resolvedPath))
|
||||
let matchedMockDispatches = mockDispatches
|
||||
.filter(({ consumed }) => !consumed)
|
||||
.filter(({ path, ignoreTrailingSlash }) => {
|
||||
return ignoreTrailingSlash
|
||||
? matchValue(removeTrailingSlash(safeUrl(path)), resolvedPathWithoutTrailingSlash)
|
||||
: matchValue(safeUrl(path), resolvedPath)
|
||||
})
|
||||
if (matchedMockDispatches.length === 0) {
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`)
|
||||
}
|
||||
@ -138,26 +153,27 @@ function getMockDispatch (mockDispatches, key) {
|
||||
// Match method
|
||||
matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method))
|
||||
if (matchedMockDispatches.length === 0) {
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}'`)
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}' on path '${resolvedPath}'`)
|
||||
}
|
||||
|
||||
// Match body
|
||||
matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true)
|
||||
if (matchedMockDispatches.length === 0) {
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}'`)
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}' on path '${resolvedPath}'`)
|
||||
}
|
||||
|
||||
// Match headers
|
||||
matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers))
|
||||
if (matchedMockDispatches.length === 0) {
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for headers '${typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers}'`)
|
||||
const headers = typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers
|
||||
throw new MockNotMatchedError(`Mock dispatch not matched for headers '${headers}' on path '${resolvedPath}'`)
|
||||
}
|
||||
|
||||
return matchedMockDispatches[0]
|
||||
}
|
||||
|
||||
function addMockDispatch (mockDispatches, key, data) {
|
||||
const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false }
|
||||
function addMockDispatch (mockDispatches, key, data, opts) {
|
||||
const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false, ...opts }
|
||||
const replyData = typeof data === 'function' ? { callback: data } : { ...data }
|
||||
const newMockDispatch = { ...baseData, ...key, pending: true, data: { error: null, ...replyData } }
|
||||
mockDispatches.push(newMockDispatch)
|
||||
@ -176,8 +192,24 @@ function deleteMockDispatch (mockDispatches, key) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} path Path to remove trailing slash from
|
||||
*/
|
||||
function removeTrailingSlash (path) {
|
||||
while (path.endsWith('/')) {
|
||||
path = path.slice(0, -1)
|
||||
}
|
||||
|
||||
if (path.length === 0) {
|
||||
path = '/'
|
||||
}
|
||||
|
||||
return path
|
||||
}
|
||||
|
||||
function buildKey (opts) {
|
||||
const { path, method, body, headers, query } = opts
|
||||
|
||||
return {
|
||||
path,
|
||||
method,
|
||||
@ -188,11 +220,21 @@ function buildKey (opts) {
|
||||
}
|
||||
|
||||
function generateKeyValues (data) {
|
||||
return Object.entries(data).reduce((keyValuePairs, [key, value]) => [
|
||||
...keyValuePairs,
|
||||
Buffer.from(`${key}`),
|
||||
Array.isArray(value) ? value.map(x => Buffer.from(`${x}`)) : Buffer.from(`${value}`)
|
||||
], [])
|
||||
const keys = Object.keys(data)
|
||||
const result = []
|
||||
for (let i = 0; i < keys.length; ++i) {
|
||||
const key = keys[i]
|
||||
const value = data[key]
|
||||
const name = Buffer.from(`${key}`)
|
||||
if (Array.isArray(value)) {
|
||||
for (let j = 0; j < value.length; ++j) {
|
||||
result.push(name, Buffer.from(`${value[j]}`))
|
||||
}
|
||||
} else {
|
||||
result.push(name, Buffer.from(`${value}`))
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
@ -274,10 +316,10 @@ function mockDispatch (opts, handler) {
|
||||
const responseHeaders = generateKeyValues(headers)
|
||||
const responseTrailers = generateKeyValues(trailers)
|
||||
|
||||
handler.abort = nop
|
||||
handler.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode))
|
||||
handler.onData(Buffer.from(responseData))
|
||||
handler.onComplete(responseTrailers)
|
||||
handler.onConnect?.(err => handler.onError(err), null)
|
||||
handler.onHeaders?.(statusCode, responseHeaders, resume, getStatusText(statusCode))
|
||||
handler.onData?.(Buffer.from(responseData))
|
||||
handler.onComplete?.(responseTrailers)
|
||||
deleteMockDispatch(mockDispatches, key)
|
||||
}
|
||||
|
||||
@ -326,9 +368,14 @@ function checkNetConnect (netConnect, origin) {
|
||||
return false
|
||||
}
|
||||
|
||||
function buildMockOptions (opts) {
|
||||
function buildAndValidateMockOptions (opts) {
|
||||
if (opts) {
|
||||
const { agent, ...mockOptions } = opts
|
||||
|
||||
if ('enableCallHistory' in mockOptions && typeof mockOptions.enableCallHistory !== 'boolean') {
|
||||
throw new InvalidArgumentError('options.enableCallHistory must to be a boolean')
|
||||
}
|
||||
|
||||
return mockOptions
|
||||
}
|
||||
}
|
||||
@ -346,6 +393,7 @@ module.exports = {
|
||||
mockDispatch,
|
||||
buildMockDispatch,
|
||||
checkNetConnect,
|
||||
buildMockOptions,
|
||||
getHeaderByName
|
||||
buildAndValidateMockOptions,
|
||||
getHeaderByName,
|
||||
buildHeadersFromArray
|
||||
}
|
||||
|
9
node_modules/undici/lib/mock/pending-interceptors-formatter.js
generated
vendored
9
node_modules/undici/lib/mock/pending-interceptors-formatter.js
generated
vendored
@ -1,7 +1,10 @@
|
||||
'use strict'
|
||||
|
||||
const { Transform } = require('stream')
|
||||
const { Console } = require('console')
|
||||
const { Transform } = require('node:stream')
|
||||
const { Console } = require('node:console')
|
||||
|
||||
const PERSISTENT = process.versions.icu ? '✅' : 'Y '
|
||||
const NOT_PERSISTENT = process.versions.icu ? '❌' : 'N '
|
||||
|
||||
/**
|
||||
* Gets the output of `console.table(…)` as a string.
|
||||
@ -29,7 +32,7 @@ module.exports = class PendingInterceptorsFormatter {
|
||||
Origin: origin,
|
||||
Path: path,
|
||||
'Status code': statusCode,
|
||||
Persistent: persist ? '✅' : '❌',
|
||||
Persistent: persist ? PERSISTENT : NOT_PERSISTENT,
|
||||
Invocations: timesInvoked,
|
||||
Remaining: persist ? Infinity : times - timesInvoked
|
||||
}))
|
||||
|
29
node_modules/undici/lib/mock/pluralizer.js
generated
vendored
29
node_modules/undici/lib/mock/pluralizer.js
generated
vendored
@ -1,29 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const singulars = {
|
||||
pronoun: 'it',
|
||||
is: 'is',
|
||||
was: 'was',
|
||||
this: 'this'
|
||||
}
|
||||
|
||||
const plurals = {
|
||||
pronoun: 'they',
|
||||
is: 'are',
|
||||
was: 'were',
|
||||
this: 'these'
|
||||
}
|
||||
|
||||
module.exports = class Pluralizer {
|
||||
constructor (singular, plural) {
|
||||
this.singular = singular
|
||||
this.plural = plural
|
||||
}
|
||||
|
||||
pluralize (count) {
|
||||
const one = count === 1
|
||||
const keys = one ? singulars : plurals
|
||||
const noun = one ? this.singular : this.plural
|
||||
return { ...keys, count, noun }
|
||||
}
|
||||
}
|
97
node_modules/undici/lib/timers.js
generated
vendored
97
node_modules/undici/lib/timers.js
generated
vendored
@ -1,97 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
let fastNow = Date.now()
|
||||
let fastNowTimeout
|
||||
|
||||
const fastTimers = []
|
||||
|
||||
function onTimeout () {
|
||||
fastNow = Date.now()
|
||||
|
||||
let len = fastTimers.length
|
||||
let idx = 0
|
||||
while (idx < len) {
|
||||
const timer = fastTimers[idx]
|
||||
|
||||
if (timer.state === 0) {
|
||||
timer.state = fastNow + timer.delay
|
||||
} else if (timer.state > 0 && fastNow >= timer.state) {
|
||||
timer.state = -1
|
||||
timer.callback(timer.opaque)
|
||||
}
|
||||
|
||||
if (timer.state === -1) {
|
||||
timer.state = -2
|
||||
if (idx !== len - 1) {
|
||||
fastTimers[idx] = fastTimers.pop()
|
||||
} else {
|
||||
fastTimers.pop()
|
||||
}
|
||||
len -= 1
|
||||
} else {
|
||||
idx += 1
|
||||
}
|
||||
}
|
||||
|
||||
if (fastTimers.length > 0) {
|
||||
refreshTimeout()
|
||||
}
|
||||
}
|
||||
|
||||
function refreshTimeout () {
|
||||
if (fastNowTimeout && fastNowTimeout.refresh) {
|
||||
fastNowTimeout.refresh()
|
||||
} else {
|
||||
clearTimeout(fastNowTimeout)
|
||||
fastNowTimeout = setTimeout(onTimeout, 1e3)
|
||||
if (fastNowTimeout.unref) {
|
||||
fastNowTimeout.unref()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class Timeout {
|
||||
constructor (callback, delay, opaque) {
|
||||
this.callback = callback
|
||||
this.delay = delay
|
||||
this.opaque = opaque
|
||||
|
||||
// -2 not in timer list
|
||||
// -1 in timer list but inactive
|
||||
// 0 in timer list waiting for time
|
||||
// > 0 in timer list waiting for time to expire
|
||||
this.state = -2
|
||||
|
||||
this.refresh()
|
||||
}
|
||||
|
||||
refresh () {
|
||||
if (this.state === -2) {
|
||||
fastTimers.push(this)
|
||||
if (!fastNowTimeout || fastTimers.length === 1) {
|
||||
refreshTimeout()
|
||||
}
|
||||
}
|
||||
|
||||
this.state = 0
|
||||
}
|
||||
|
||||
clear () {
|
||||
this.state = -1
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
setTimeout (callback, delay, opaque) {
|
||||
return delay < 1e3
|
||||
? setTimeout(callback, delay, opaque)
|
||||
: new Timeout(callback, delay, opaque)
|
||||
},
|
||||
clearTimeout (timeout) {
|
||||
if (timeout instanceof Timeout) {
|
||||
timeout.clear()
|
||||
} else {
|
||||
clearTimeout(timeout)
|
||||
}
|
||||
}
|
||||
}
|
370
node_modules/undici/lib/util/cache.js
generated
vendored
Normal file
370
node_modules/undici/lib/util/cache.js
generated
vendored
Normal file
@ -0,0 +1,370 @@
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
safeHTTPMethods
|
||||
} = require('../core/util')
|
||||
|
||||
/**
|
||||
* @param {import('../../types/dispatcher.d.ts').default.DispatchOptions} opts
|
||||
*/
|
||||
function makeCacheKey (opts) {
|
||||
if (!opts.origin) {
|
||||
throw new Error('opts.origin is undefined')
|
||||
}
|
||||
|
||||
const headers = normaliseHeaders(opts)
|
||||
|
||||
return {
|
||||
origin: opts.origin.toString(),
|
||||
method: opts.method,
|
||||
path: opts.path,
|
||||
headers
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Record<string, string[] | string>}
|
||||
* @return {Record<string, string[] | string>}
|
||||
*/
|
||||
function normaliseHeaders (opts) {
|
||||
let headers
|
||||
if (opts.headers == null) {
|
||||
headers = {}
|
||||
} else if (typeof opts.headers[Symbol.iterator] === 'function') {
|
||||
headers = {}
|
||||
for (const x of opts.headers) {
|
||||
if (!Array.isArray(x)) {
|
||||
throw new Error('opts.headers is not a valid header map')
|
||||
}
|
||||
const [key, val] = x
|
||||
if (typeof key !== 'string' || typeof val !== 'string') {
|
||||
throw new Error('opts.headers is not a valid header map')
|
||||
}
|
||||
headers[key.toLowerCase()] = val
|
||||
}
|
||||
} else if (typeof opts.headers === 'object') {
|
||||
headers = {}
|
||||
|
||||
for (const key of Object.keys(opts.headers)) {
|
||||
headers[key.toLowerCase()] = opts.headers[key]
|
||||
}
|
||||
} else {
|
||||
throw new Error('opts.headers is not an object')
|
||||
}
|
||||
|
||||
return headers
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} key
|
||||
*/
|
||||
function assertCacheKey (key) {
|
||||
if (typeof key !== 'object') {
|
||||
throw new TypeError(`expected key to be object, got ${typeof key}`)
|
||||
}
|
||||
|
||||
for (const property of ['origin', 'method', 'path']) {
|
||||
if (typeof key[property] !== 'string') {
|
||||
throw new TypeError(`expected key.${property} to be string, got ${typeof key[property]}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (key.headers !== undefined && typeof key.headers !== 'object') {
|
||||
throw new TypeError(`expected headers to be object, got ${typeof key}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} value
|
||||
*/
|
||||
function assertCacheValue (value) {
|
||||
if (typeof value !== 'object') {
|
||||
throw new TypeError(`expected value to be object, got ${typeof value}`)
|
||||
}
|
||||
|
||||
for (const property of ['statusCode', 'cachedAt', 'staleAt', 'deleteAt']) {
|
||||
if (typeof value[property] !== 'number') {
|
||||
throw new TypeError(`expected value.${property} to be number, got ${typeof value[property]}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof value.statusMessage !== 'string') {
|
||||
throw new TypeError(`expected value.statusMessage to be string, got ${typeof value.statusMessage}`)
|
||||
}
|
||||
|
||||
if (value.headers != null && typeof value.headers !== 'object') {
|
||||
throw new TypeError(`expected value.rawHeaders to be object, got ${typeof value.headers}`)
|
||||
}
|
||||
|
||||
if (value.vary !== undefined && typeof value.vary !== 'object') {
|
||||
throw new TypeError(`expected value.vary to be object, got ${typeof value.vary}`)
|
||||
}
|
||||
|
||||
if (value.etag !== undefined && typeof value.etag !== 'string') {
|
||||
throw new TypeError(`expected value.etag to be string, got ${typeof value.etag}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://www.rfc-editor.org/rfc/rfc9111.html#name-cache-control
|
||||
* @see https://www.iana.org/assignments/http-cache-directives/http-cache-directives.xhtml
|
||||
|
||||
* @param {string | string[]} header
|
||||
* @returns {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives}
|
||||
*/
|
||||
function parseCacheControlHeader (header) {
|
||||
/**
|
||||
* @type {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives}
|
||||
*/
|
||||
const output = {}
|
||||
|
||||
let directives
|
||||
if (Array.isArray(header)) {
|
||||
directives = []
|
||||
|
||||
for (const directive of header) {
|
||||
directives.push(...directive.split(','))
|
||||
}
|
||||
} else {
|
||||
directives = header.split(',')
|
||||
}
|
||||
|
||||
for (let i = 0; i < directives.length; i++) {
|
||||
const directive = directives[i].toLowerCase()
|
||||
const keyValueDelimiter = directive.indexOf('=')
|
||||
|
||||
let key
|
||||
let value
|
||||
if (keyValueDelimiter !== -1) {
|
||||
key = directive.substring(0, keyValueDelimiter).trimStart()
|
||||
value = directive.substring(keyValueDelimiter + 1)
|
||||
} else {
|
||||
key = directive.trim()
|
||||
}
|
||||
|
||||
switch (key) {
|
||||
case 'min-fresh':
|
||||
case 'max-stale':
|
||||
case 'max-age':
|
||||
case 's-maxage':
|
||||
case 'stale-while-revalidate':
|
||||
case 'stale-if-error': {
|
||||
if (value === undefined || value[0] === ' ') {
|
||||
continue
|
||||
}
|
||||
|
||||
if (
|
||||
value.length >= 2 &&
|
||||
value[0] === '"' &&
|
||||
value[value.length - 1] === '"'
|
||||
) {
|
||||
value = value.substring(1, value.length - 1)
|
||||
}
|
||||
|
||||
const parsedValue = parseInt(value, 10)
|
||||
// eslint-disable-next-line no-self-compare
|
||||
if (parsedValue !== parsedValue) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (key === 'max-age' && key in output && output[key] >= parsedValue) {
|
||||
continue
|
||||
}
|
||||
|
||||
output[key] = parsedValue
|
||||
|
||||
break
|
||||
}
|
||||
case 'private':
|
||||
case 'no-cache': {
|
||||
if (value) {
|
||||
// The private and no-cache directives can be unqualified (aka just
|
||||
// `private` or `no-cache`) or qualified (w/ a value). When they're
|
||||
// qualified, it's a list of headers like `no-cache=header1`,
|
||||
// `no-cache="header1"`, or `no-cache="header1, header2"`
|
||||
// If we're given multiple headers, the comma messes us up since
|
||||
// we split the full header by commas. So, let's loop through the
|
||||
// remaining parts in front of us until we find one that ends in a
|
||||
// quote. We can then just splice all of the parts in between the
|
||||
// starting quote and the ending quote out of the directives array
|
||||
// and continue parsing like normal.
|
||||
// https://www.rfc-editor.org/rfc/rfc9111.html#name-no-cache-2
|
||||
if (value[0] === '"') {
|
||||
// Something like `no-cache="some-header"` OR `no-cache="some-header, another-header"`.
|
||||
|
||||
// Add the first header on and cut off the leading quote
|
||||
const headers = [value.substring(1)]
|
||||
|
||||
let foundEndingQuote = value[value.length - 1] === '"'
|
||||
if (!foundEndingQuote) {
|
||||
// Something like `no-cache="some-header, another-header"`
|
||||
// This can still be something invalid, e.g. `no-cache="some-header, ...`
|
||||
for (let j = i + 1; j < directives.length; j++) {
|
||||
const nextPart = directives[j]
|
||||
const nextPartLength = nextPart.length
|
||||
|
||||
headers.push(nextPart.trim())
|
||||
|
||||
if (nextPartLength !== 0 && nextPart[nextPartLength - 1] === '"') {
|
||||
foundEndingQuote = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (foundEndingQuote) {
|
||||
let lastHeader = headers[headers.length - 1]
|
||||
if (lastHeader[lastHeader.length - 1] === '"') {
|
||||
lastHeader = lastHeader.substring(0, lastHeader.length - 1)
|
||||
headers[headers.length - 1] = lastHeader
|
||||
}
|
||||
|
||||
if (key in output) {
|
||||
output[key] = output[key].concat(headers)
|
||||
} else {
|
||||
output[key] = headers
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Something like `no-cache=some-header`
|
||||
if (key in output) {
|
||||
output[key] = output[key].concat(value)
|
||||
} else {
|
||||
output[key] = [value]
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line no-fallthrough
|
||||
case 'public':
|
||||
case 'no-store':
|
||||
case 'must-revalidate':
|
||||
case 'proxy-revalidate':
|
||||
case 'immutable':
|
||||
case 'no-transform':
|
||||
case 'must-understand':
|
||||
case 'only-if-cached':
|
||||
if (value) {
|
||||
// These are qualified (something like `public=...`) when they aren't
|
||||
// allowed to be, skip
|
||||
continue
|
||||
}
|
||||
|
||||
output[key] = true
|
||||
break
|
||||
default:
|
||||
// Ignore unknown directives as per https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.3-1
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string | string[]} varyHeader Vary header from the server
|
||||
* @param {Record<string, string | string[]>} headers Request headers
|
||||
* @returns {Record<string, string | string[]>}
|
||||
*/
|
||||
function parseVaryHeader (varyHeader, headers) {
|
||||
if (typeof varyHeader === 'string' && varyHeader.includes('*')) {
|
||||
return headers
|
||||
}
|
||||
|
||||
const output = /** @type {Record<string, string | string[] | null>} */ ({})
|
||||
|
||||
const varyingHeaders = typeof varyHeader === 'string'
|
||||
? varyHeader.split(',')
|
||||
: varyHeader
|
||||
|
||||
for (const header of varyingHeaders) {
|
||||
const trimmedHeader = header.trim().toLowerCase()
|
||||
|
||||
output[trimmedHeader] = headers[trimmedHeader] ?? null
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
/**
|
||||
* Note: this deviates from the spec a little. Empty etags ("", W/"") are valid,
|
||||
* however, including them in cached resposnes serves little to no purpose.
|
||||
*
|
||||
* @see https://www.rfc-editor.org/rfc/rfc9110.html#name-etag
|
||||
*
|
||||
* @param {string} etag
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isEtagUsable (etag) {
|
||||
if (etag.length <= 2) {
|
||||
// Shortest an etag can be is two chars (just ""). This is where we deviate
|
||||
// from the spec requiring a min of 3 chars however
|
||||
return false
|
||||
}
|
||||
|
||||
if (etag[0] === '"' && etag[etag.length - 1] === '"') {
|
||||
// ETag: ""asd123"" or ETag: "W/"asd123"", kinda undefined behavior in the
|
||||
// spec. Some servers will accept these while others don't.
|
||||
// ETag: "asd123"
|
||||
return !(etag[1] === '"' || etag.startsWith('"W/'))
|
||||
}
|
||||
|
||||
if (etag.startsWith('W/"') && etag[etag.length - 1] === '"') {
|
||||
// ETag: W/"", also where we deviate from the spec & require a min of 3
|
||||
// chars
|
||||
// ETag: for W/"", W/"asd123"
|
||||
return etag.length !== 4
|
||||
}
|
||||
|
||||
// Anything else
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {unknown} store
|
||||
* @returns {asserts store is import('../../types/cache-interceptor.d.ts').default.CacheStore}
|
||||
*/
|
||||
function assertCacheStore (store, name = 'CacheStore') {
|
||||
if (typeof store !== 'object' || store === null) {
|
||||
throw new TypeError(`expected type of ${name} to be a CacheStore, got ${store === null ? 'null' : typeof store}`)
|
||||
}
|
||||
|
||||
for (const fn of ['get', 'createWriteStream', 'delete']) {
|
||||
if (typeof store[fn] !== 'function') {
|
||||
throw new TypeError(`${name} needs to have a \`${fn}()\` function`)
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @param {unknown} methods
|
||||
* @returns {asserts methods is import('../../types/cache-interceptor.d.ts').default.CacheMethods[]}
|
||||
*/
|
||||
function assertCacheMethods (methods, name = 'CacheMethods') {
|
||||
if (!Array.isArray(methods)) {
|
||||
throw new TypeError(`expected type of ${name} needs to be an array, got ${methods === null ? 'null' : typeof methods}`)
|
||||
}
|
||||
|
||||
if (methods.length === 0) {
|
||||
throw new TypeError(`${name} needs to have at least one method`)
|
||||
}
|
||||
|
||||
for (const method of methods) {
|
||||
if (!safeHTTPMethods.includes(method)) {
|
||||
throw new TypeError(`element of ${name}-array needs to be one of following values: ${safeHTTPMethods.join(', ')}, got ${method}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
makeCacheKey,
|
||||
normaliseHeaders,
|
||||
assertCacheKey,
|
||||
assertCacheValue,
|
||||
parseCacheControlHeader,
|
||||
parseVaryHeader,
|
||||
isEtagUsable,
|
||||
assertCacheMethods,
|
||||
assertCacheStore
|
||||
}
|
259
node_modules/undici/lib/util/date.js
generated
vendored
Normal file
259
node_modules/undici/lib/util/date.js
generated
vendored
Normal file
@ -0,0 +1,259 @@
|
||||
'use strict'
|
||||
|
||||
const IMF_DAYS = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
|
||||
const IMF_SPACES = [4, 7, 11, 16, 25]
|
||||
const IMF_MONTHS = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
|
||||
const IMF_COLONS = [19, 22]
|
||||
|
||||
const ASCTIME_SPACES = [3, 7, 10, 19]
|
||||
|
||||
const RFC850_DAYS = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday']
|
||||
|
||||
/**
|
||||
* @see https://www.rfc-editor.org/rfc/rfc9110.html#name-date-time-formats
|
||||
*
|
||||
* @param {string} date
|
||||
* @param {Date} [now]
|
||||
* @returns {Date | undefined}
|
||||
*/
|
||||
function parseHttpDate (date, now) {
|
||||
// Sun, 06 Nov 1994 08:49:37 GMT ; IMF-fixdate
|
||||
// Sun Nov 6 08:49:37 1994 ; ANSI C's asctime() format
|
||||
// Sunday, 06-Nov-94 08:49:37 GMT ; obsolete RFC 850 format
|
||||
|
||||
date = date.toLowerCase()
|
||||
|
||||
switch (date[3]) {
|
||||
case ',': return parseImfDate(date)
|
||||
case ' ': return parseAscTimeDate(date)
|
||||
default: return parseRfc850Date(date, now)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://httpwg.org/specs/rfc9110.html#preferred.date.format
|
||||
*
|
||||
* @param {string} date
|
||||
* @returns {Date | undefined}
|
||||
*/
|
||||
function parseImfDate (date) {
|
||||
if (date.length !== 29) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (!date.endsWith('gmt')) {
|
||||
// Unsupported timezone
|
||||
return undefined
|
||||
}
|
||||
|
||||
for (const spaceInx of IMF_SPACES) {
|
||||
if (date[spaceInx] !== ' ') {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
for (const colonIdx of IMF_COLONS) {
|
||||
if (date[colonIdx] !== ':') {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
const dayName = date.substring(0, 3)
|
||||
if (!IMF_DAYS.includes(dayName)) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const dayString = date.substring(5, 7)
|
||||
const day = Number.parseInt(dayString)
|
||||
if (isNaN(day) || (day < 10 && dayString[0] !== '0')) {
|
||||
// Not a number, 0, or it's less than 10 and didn't start with a 0
|
||||
return undefined
|
||||
}
|
||||
|
||||
const month = date.substring(8, 11)
|
||||
const monthIdx = IMF_MONTHS.indexOf(month)
|
||||
if (monthIdx === -1) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const year = Number.parseInt(date.substring(12, 16))
|
||||
if (isNaN(year)) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const hourString = date.substring(17, 19)
|
||||
const hour = Number.parseInt(hourString)
|
||||
if (isNaN(hour) || (hour < 10 && hourString[0] !== '0')) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const minuteString = date.substring(20, 22)
|
||||
const minute = Number.parseInt(minuteString)
|
||||
if (isNaN(minute) || (minute < 10 && minuteString[0] !== '0')) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const secondString = date.substring(23, 25)
|
||||
const second = Number.parseInt(secondString)
|
||||
if (isNaN(second) || (second < 10 && secondString[0] !== '0')) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
return new Date(Date.UTC(year, monthIdx, day, hour, minute, second))
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://httpwg.org/specs/rfc9110.html#obsolete.date.formats
|
||||
*
|
||||
* @param {string} date
|
||||
* @returns {Date | undefined}
|
||||
*/
|
||||
function parseAscTimeDate (date) {
|
||||
// This is assumed to be in UTC
|
||||
|
||||
if (date.length !== 24) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
for (const spaceIdx of ASCTIME_SPACES) {
|
||||
if (date[spaceIdx] !== ' ') {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
const dayName = date.substring(0, 3)
|
||||
if (!IMF_DAYS.includes(dayName)) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const month = date.substring(4, 7)
|
||||
const monthIdx = IMF_MONTHS.indexOf(month)
|
||||
if (monthIdx === -1) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const dayString = date.substring(8, 10)
|
||||
const day = Number.parseInt(dayString)
|
||||
if (isNaN(day) || (day < 10 && dayString[0] !== ' ')) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const hourString = date.substring(11, 13)
|
||||
const hour = Number.parseInt(hourString)
|
||||
if (isNaN(hour) || (hour < 10 && hourString[0] !== '0')) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const minuteString = date.substring(14, 16)
|
||||
const minute = Number.parseInt(minuteString)
|
||||
if (isNaN(minute) || (minute < 10 && minuteString[0] !== '0')) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const secondString = date.substring(17, 19)
|
||||
const second = Number.parseInt(secondString)
|
||||
if (isNaN(second) || (second < 10 && secondString[0] !== '0')) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const year = Number.parseInt(date.substring(20, 24))
|
||||
if (isNaN(year)) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
return new Date(Date.UTC(year, monthIdx, day, hour, minute, second))
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://httpwg.org/specs/rfc9110.html#obsolete.date.formats
|
||||
*
|
||||
* @param {string} date
|
||||
* @param {Date} [now]
|
||||
* @returns {Date | undefined}
|
||||
*/
|
||||
function parseRfc850Date (date, now = new Date()) {
|
||||
if (!date.endsWith('gmt')) {
|
||||
// Unsupported timezone
|
||||
return undefined
|
||||
}
|
||||
|
||||
const commaIndex = date.indexOf(',')
|
||||
if (commaIndex === -1) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if ((date.length - commaIndex - 1) !== 23) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const dayName = date.substring(0, commaIndex)
|
||||
if (!RFC850_DAYS.includes(dayName)) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (
|
||||
date[commaIndex + 1] !== ' ' ||
|
||||
date[commaIndex + 4] !== '-' ||
|
||||
date[commaIndex + 8] !== '-' ||
|
||||
date[commaIndex + 11] !== ' ' ||
|
||||
date[commaIndex + 14] !== ':' ||
|
||||
date[commaIndex + 17] !== ':' ||
|
||||
date[commaIndex + 20] !== ' '
|
||||
) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const dayString = date.substring(commaIndex + 2, commaIndex + 4)
|
||||
const day = Number.parseInt(dayString)
|
||||
if (isNaN(day) || (day < 10 && dayString[0] !== '0')) {
|
||||
// Not a number, or it's less than 10 and didn't start with a 0
|
||||
return undefined
|
||||
}
|
||||
|
||||
const month = date.substring(commaIndex + 5, commaIndex + 8)
|
||||
const monthIdx = IMF_MONTHS.indexOf(month)
|
||||
if (monthIdx === -1) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
// As of this point year is just the decade (i.e. 94)
|
||||
let year = Number.parseInt(date.substring(commaIndex + 9, commaIndex + 11))
|
||||
if (isNaN(year)) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const currentYear = now.getUTCFullYear()
|
||||
const currentDecade = currentYear % 100
|
||||
const currentCentury = Math.floor(currentYear / 100)
|
||||
|
||||
if (year > currentDecade && year - currentDecade >= 50) {
|
||||
// Over 50 years in future, go to previous century
|
||||
year += (currentCentury - 1) * 100
|
||||
} else {
|
||||
year += currentCentury * 100
|
||||
}
|
||||
|
||||
const hourString = date.substring(commaIndex + 12, commaIndex + 14)
|
||||
const hour = Number.parseInt(hourString)
|
||||
if (isNaN(hour) || (hour < 10 && hourString[0] !== '0')) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const minuteString = date.substring(commaIndex + 15, commaIndex + 17)
|
||||
const minute = Number.parseInt(minuteString)
|
||||
if (isNaN(minute) || (minute < 10 && minuteString[0] !== '0')) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const secondString = date.substring(commaIndex + 18, commaIndex + 20)
|
||||
const second = Number.parseInt(secondString)
|
||||
if (isNaN(second) || (second < 10 && secondString[0] !== '0')) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
return new Date(Date.UTC(year, monthIdx, day, hour, minute, second))
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseHttpDate
|
||||
}
|
423
node_modules/undici/lib/util/timers.js
generated
vendored
Normal file
423
node_modules/undici/lib/util/timers.js
generated
vendored
Normal file
@ -0,0 +1,423 @@
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* This module offers an optimized timer implementation designed for scenarios
|
||||
* where high precision is not critical.
|
||||
*
|
||||
* The timer achieves faster performance by using a low-resolution approach,
|
||||
* with an accuracy target of within 500ms. This makes it particularly useful
|
||||
* for timers with delays of 1 second or more, where exact timing is less
|
||||
* crucial.
|
||||
*
|
||||
* It's important to note that Node.js timers are inherently imprecise, as
|
||||
* delays can occur due to the event loop being blocked by other operations.
|
||||
* Consequently, timers may trigger later than their scheduled time.
|
||||
*/
|
||||
|
||||
/**
|
||||
* The fastNow variable contains the internal fast timer clock value.
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
let fastNow = 0
|
||||
|
||||
/**
|
||||
* RESOLUTION_MS represents the target resolution time in milliseconds.
|
||||
*
|
||||
* @type {number}
|
||||
* @default 1000
|
||||
*/
|
||||
const RESOLUTION_MS = 1e3
|
||||
|
||||
/**
|
||||
* TICK_MS defines the desired interval in milliseconds between each tick.
|
||||
* The target value is set to half the resolution time, minus 1 ms, to account
|
||||
* for potential event loop overhead.
|
||||
*
|
||||
* @type {number}
|
||||
* @default 499
|
||||
*/
|
||||
const TICK_MS = (RESOLUTION_MS >> 1) - 1
|
||||
|
||||
/**
|
||||
* fastNowTimeout is a Node.js timer used to manage and process
|
||||
* the FastTimers stored in the `fastTimers` array.
|
||||
*
|
||||
* @type {NodeJS.Timeout}
|
||||
*/
|
||||
let fastNowTimeout
|
||||
|
||||
/**
|
||||
* The kFastTimer symbol is used to identify FastTimer instances.
|
||||
*
|
||||
* @type {Symbol}
|
||||
*/
|
||||
const kFastTimer = Symbol('kFastTimer')
|
||||
|
||||
/**
|
||||
* The fastTimers array contains all active FastTimers.
|
||||
*
|
||||
* @type {FastTimer[]}
|
||||
*/
|
||||
const fastTimers = []
|
||||
|
||||
/**
|
||||
* These constants represent the various states of a FastTimer.
|
||||
*/
|
||||
|
||||
/**
|
||||
* The `NOT_IN_LIST` constant indicates that the FastTimer is not included
|
||||
* in the `fastTimers` array. Timers with this status will not be processed
|
||||
* during the next tick by the `onTick` function.
|
||||
*
|
||||
* A FastTimer can be re-added to the `fastTimers` array by invoking the
|
||||
* `refresh` method on the FastTimer instance.
|
||||
*
|
||||
* @type {-2}
|
||||
*/
|
||||
const NOT_IN_LIST = -2
|
||||
|
||||
/**
|
||||
* The `TO_BE_CLEARED` constant indicates that the FastTimer is scheduled
|
||||
* for removal from the `fastTimers` array. A FastTimer in this state will
|
||||
* be removed in the next tick by the `onTick` function and will no longer
|
||||
* be processed.
|
||||
*
|
||||
* This status is also set when the `clear` method is called on the FastTimer instance.
|
||||
*
|
||||
* @type {-1}
|
||||
*/
|
||||
const TO_BE_CLEARED = -1
|
||||
|
||||
/**
|
||||
* The `PENDING` constant signifies that the FastTimer is awaiting processing
|
||||
* in the next tick by the `onTick` function. Timers with this status will have
|
||||
* their `_idleStart` value set and their status updated to `ACTIVE` in the next tick.
|
||||
*
|
||||
* @type {0}
|
||||
*/
|
||||
const PENDING = 0
|
||||
|
||||
/**
|
||||
* The `ACTIVE` constant indicates that the FastTimer is active and waiting
|
||||
* for its timer to expire. During the next tick, the `onTick` function will
|
||||
* check if the timer has expired, and if so, it will execute the associated callback.
|
||||
*
|
||||
* @type {1}
|
||||
*/
|
||||
const ACTIVE = 1
|
||||
|
||||
/**
|
||||
* The onTick function processes the fastTimers array.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
function onTick () {
|
||||
/**
|
||||
* Increment the fastNow value by the TICK_MS value, despite the actual time
|
||||
* that has passed since the last tick. This approach ensures independence
|
||||
* from the system clock and delays caused by a blocked event loop.
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
fastNow += TICK_MS
|
||||
|
||||
/**
|
||||
* The `idx` variable is used to iterate over the `fastTimers` array.
|
||||
* Expired timers are removed by replacing them with the last element in the array.
|
||||
* Consequently, `idx` is only incremented when the current element is not removed.
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
let idx = 0
|
||||
|
||||
/**
|
||||
* The len variable will contain the length of the fastTimers array
|
||||
* and will be decremented when a FastTimer should be removed from the
|
||||
* fastTimers array.
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
let len = fastTimers.length
|
||||
|
||||
while (idx < len) {
|
||||
/**
|
||||
* @type {FastTimer}
|
||||
*/
|
||||
const timer = fastTimers[idx]
|
||||
|
||||
// If the timer is in the ACTIVE state and the timer has expired, it will
|
||||
// be processed in the next tick.
|
||||
if (timer._state === PENDING) {
|
||||
// Set the _idleStart value to the fastNow value minus the TICK_MS value
|
||||
// to account for the time the timer was in the PENDING state.
|
||||
timer._idleStart = fastNow - TICK_MS
|
||||
timer._state = ACTIVE
|
||||
} else if (
|
||||
timer._state === ACTIVE &&
|
||||
fastNow >= timer._idleStart + timer._idleTimeout
|
||||
) {
|
||||
timer._state = TO_BE_CLEARED
|
||||
timer._idleStart = -1
|
||||
timer._onTimeout(timer._timerArg)
|
||||
}
|
||||
|
||||
if (timer._state === TO_BE_CLEARED) {
|
||||
timer._state = NOT_IN_LIST
|
||||
|
||||
// Move the last element to the current index and decrement len if it is
|
||||
// not the only element in the array.
|
||||
if (--len !== 0) {
|
||||
fastTimers[idx] = fastTimers[len]
|
||||
}
|
||||
} else {
|
||||
++idx
|
||||
}
|
||||
}
|
||||
|
||||
// Set the length of the fastTimers array to the new length and thus
|
||||
// removing the excess FastTimers elements from the array.
|
||||
fastTimers.length = len
|
||||
|
||||
// If there are still active FastTimers in the array, refresh the Timer.
|
||||
// If there are no active FastTimers, the timer will be refreshed again
|
||||
// when a new FastTimer is instantiated.
|
||||
if (fastTimers.length !== 0) {
|
||||
refreshTimeout()
|
||||
}
|
||||
}
|
||||
|
||||
function refreshTimeout () {
|
||||
// If the fastNowTimeout is already set, refresh it.
|
||||
if (fastNowTimeout) {
|
||||
fastNowTimeout.refresh()
|
||||
// fastNowTimeout is not instantiated yet, create a new Timer.
|
||||
} else {
|
||||
clearTimeout(fastNowTimeout)
|
||||
fastNowTimeout = setTimeout(onTick, TICK_MS)
|
||||
|
||||
// If the Timer has an unref method, call it to allow the process to exit if
|
||||
// there are no other active handles.
|
||||
if (fastNowTimeout.unref) {
|
||||
fastNowTimeout.unref()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The `FastTimer` class is a data structure designed to store and manage
|
||||
* timer information.
|
||||
*/
|
||||
class FastTimer {
|
||||
[kFastTimer] = true
|
||||
|
||||
/**
|
||||
* The state of the timer, which can be one of the following:
|
||||
* - NOT_IN_LIST (-2)
|
||||
* - TO_BE_CLEARED (-1)
|
||||
* - PENDING (0)
|
||||
* - ACTIVE (1)
|
||||
*
|
||||
* @type {-2|-1|0|1}
|
||||
* @private
|
||||
*/
|
||||
_state = NOT_IN_LIST
|
||||
|
||||
/**
|
||||
* The number of milliseconds to wait before calling the callback.
|
||||
*
|
||||
* @type {number}
|
||||
* @private
|
||||
*/
|
||||
_idleTimeout = -1
|
||||
|
||||
/**
|
||||
* The time in milliseconds when the timer was started. This value is used to
|
||||
* calculate when the timer should expire.
|
||||
*
|
||||
* @type {number}
|
||||
* @default -1
|
||||
* @private
|
||||
*/
|
||||
_idleStart = -1
|
||||
|
||||
/**
|
||||
* The function to be executed when the timer expires.
|
||||
* @type {Function}
|
||||
* @private
|
||||
*/
|
||||
_onTimeout
|
||||
|
||||
/**
|
||||
* The argument to be passed to the callback when the timer expires.
|
||||
*
|
||||
* @type {*}
|
||||
* @private
|
||||
*/
|
||||
_timerArg
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param {Function} callback A function to be executed after the timer
|
||||
* expires.
|
||||
* @param {number} delay The time, in milliseconds that the timer should wait
|
||||
* before the specified function or code is executed.
|
||||
* @param {*} arg
|
||||
*/
|
||||
constructor (callback, delay, arg) {
|
||||
this._onTimeout = callback
|
||||
this._idleTimeout = delay
|
||||
this._timerArg = arg
|
||||
|
||||
this.refresh()
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the timer's start time to the current time, and reschedules the timer
|
||||
* to call its callback at the previously specified duration adjusted to the
|
||||
* current time.
|
||||
* Using this on a timer that has already called its callback will reactivate
|
||||
* the timer.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
refresh () {
|
||||
// In the special case that the timer is not in the list of active timers,
|
||||
// add it back to the array to be processed in the next tick by the onTick
|
||||
// function.
|
||||
if (this._state === NOT_IN_LIST) {
|
||||
fastTimers.push(this)
|
||||
}
|
||||
|
||||
// If the timer is the only active timer, refresh the fastNowTimeout for
|
||||
// better resolution.
|
||||
if (!fastNowTimeout || fastTimers.length === 1) {
|
||||
refreshTimeout()
|
||||
}
|
||||
|
||||
// Setting the state to PENDING will cause the timer to be reset in the
|
||||
// next tick by the onTick function.
|
||||
this._state = PENDING
|
||||
}
|
||||
|
||||
/**
|
||||
* The `clear` method cancels the timer, preventing it from executing.
|
||||
*
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
clear () {
|
||||
// Set the state to TO_BE_CLEARED to mark the timer for removal in the next
|
||||
// tick by the onTick function.
|
||||
this._state = TO_BE_CLEARED
|
||||
|
||||
// Reset the _idleStart value to -1 to indicate that the timer is no longer
|
||||
// active.
|
||||
this._idleStart = -1
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This module exports a setTimeout and clearTimeout function that can be
|
||||
* used as a drop-in replacement for the native functions.
|
||||
*/
|
||||
module.exports = {
|
||||
/**
|
||||
* The setTimeout() method sets a timer which executes a function once the
|
||||
* timer expires.
|
||||
* @param {Function} callback A function to be executed after the timer
|
||||
* expires.
|
||||
* @param {number} delay The time, in milliseconds that the timer should
|
||||
* wait before the specified function or code is executed.
|
||||
* @param {*} [arg] An optional argument to be passed to the callback function
|
||||
* when the timer expires.
|
||||
* @returns {NodeJS.Timeout|FastTimer}
|
||||
*/
|
||||
setTimeout (callback, delay, arg) {
|
||||
// If the delay is less than or equal to the RESOLUTION_MS value return a
|
||||
// native Node.js Timer instance.
|
||||
return delay <= RESOLUTION_MS
|
||||
? setTimeout(callback, delay, arg)
|
||||
: new FastTimer(callback, delay, arg)
|
||||
},
|
||||
/**
|
||||
* The clearTimeout method cancels an instantiated Timer previously created
|
||||
* by calling setTimeout.
|
||||
*
|
||||
* @param {NodeJS.Timeout|FastTimer} timeout
|
||||
*/
|
||||
clearTimeout (timeout) {
|
||||
// If the timeout is a FastTimer, call its own clear method.
|
||||
if (timeout[kFastTimer]) {
|
||||
/**
|
||||
* @type {FastTimer}
|
||||
*/
|
||||
timeout.clear()
|
||||
// Otherwise it is an instance of a native NodeJS.Timeout, so call the
|
||||
// Node.js native clearTimeout function.
|
||||
} else {
|
||||
clearTimeout(timeout)
|
||||
}
|
||||
},
|
||||
/**
|
||||
* The setFastTimeout() method sets a fastTimer which executes a function once
|
||||
* the timer expires.
|
||||
* @param {Function} callback A function to be executed after the timer
|
||||
* expires.
|
||||
* @param {number} delay The time, in milliseconds that the timer should
|
||||
* wait before the specified function or code is executed.
|
||||
* @param {*} [arg] An optional argument to be passed to the callback function
|
||||
* when the timer expires.
|
||||
* @returns {FastTimer}
|
||||
*/
|
||||
setFastTimeout (callback, delay, arg) {
|
||||
return new FastTimer(callback, delay, arg)
|
||||
},
|
||||
/**
|
||||
* The clearTimeout method cancels an instantiated FastTimer previously
|
||||
* created by calling setFastTimeout.
|
||||
*
|
||||
* @param {FastTimer} timeout
|
||||
*/
|
||||
clearFastTimeout (timeout) {
|
||||
timeout.clear()
|
||||
},
|
||||
/**
|
||||
* The now method returns the value of the internal fast timer clock.
|
||||
*
|
||||
* @returns {number}
|
||||
*/
|
||||
now () {
|
||||
return fastNow
|
||||
},
|
||||
/**
|
||||
* Trigger the onTick function to process the fastTimers array.
|
||||
* Exported for testing purposes only.
|
||||
* Marking as deprecated to discourage any use outside of testing.
|
||||
* @deprecated
|
||||
* @param {number} [delay=0] The delay in milliseconds to add to the now value.
|
||||
*/
|
||||
tick (delay = 0) {
|
||||
fastNow += delay - RESOLUTION_MS + 1
|
||||
onTick()
|
||||
onTick()
|
||||
},
|
||||
/**
|
||||
* Reset FastTimers.
|
||||
* Exported for testing purposes only.
|
||||
* Marking as deprecated to discourage any use outside of testing.
|
||||
* @deprecated
|
||||
*/
|
||||
reset () {
|
||||
fastNow = 0
|
||||
fastTimers.length = 0
|
||||
clearTimeout(fastNowTimeout)
|
||||
fastNowTimeout = null
|
||||
},
|
||||
/**
|
||||
* Exporting for testing purposes only.
|
||||
* Marking as deprecated to discourage any use outside of testing.
|
||||
* @deprecated
|
||||
*/
|
||||
kFastTimer
|
||||
}
|
262
node_modules/undici/lib/cache/cache.js → node_modules/undici/lib/web/cache/cache.js
generated
vendored
262
node_modules/undici/lib/cache/cache.js → node_modules/undici/lib/web/cache/cache.js
generated
vendored
@ -1,17 +1,14 @@
|
||||
'use strict'
|
||||
|
||||
const { kConstruct } = require('./symbols')
|
||||
const { urlEquals, fieldValues: getFieldValues } = require('./util')
|
||||
const { kEnumerableProperty, isDisturbed } = require('../core/util')
|
||||
const { kHeadersList } = require('../core/symbols')
|
||||
const { kConstruct } = require('../../core/symbols')
|
||||
const { urlEquals, getFieldValues } = require('./util')
|
||||
const { kEnumerableProperty, isDisturbed } = require('../../core/util')
|
||||
const { webidl } = require('../fetch/webidl')
|
||||
const { Response, cloneResponse } = require('../fetch/response')
|
||||
const { Request } = require('../fetch/request')
|
||||
const { kState, kHeaders, kGuard, kRealm } = require('../fetch/symbols')
|
||||
const { cloneResponse, fromInnerResponse, getResponseState } = require('../fetch/response')
|
||||
const { Request, fromInnerRequest, getRequestState } = require('../fetch/request')
|
||||
const { fetching } = require('../fetch/index')
|
||||
const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = require('../fetch/util')
|
||||
const assert = require('assert')
|
||||
const { getGlobalDispatcher } = require('../global')
|
||||
const assert = require('node:assert')
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation
|
||||
@ -39,17 +36,20 @@ class Cache {
|
||||
webidl.illegalConstructor()
|
||||
}
|
||||
|
||||
webidl.util.markAsUncloneable(this)
|
||||
this.#relevantRequestResponseList = arguments[1]
|
||||
}
|
||||
|
||||
async match (request, options = {}) {
|
||||
webidl.brandCheck(this, Cache)
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' })
|
||||
|
||||
request = webidl.converters.RequestInfo(request)
|
||||
options = webidl.converters.CacheQueryOptions(options)
|
||||
const prefix = 'Cache.match'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
const p = await this.matchAll(request, options)
|
||||
request = webidl.converters.RequestInfo(request, prefix, 'request')
|
||||
options = webidl.converters.CacheQueryOptions(options, prefix, 'options')
|
||||
|
||||
const p = this.#internalMatchAll(request, options, 1)
|
||||
|
||||
if (p.length === 0) {
|
||||
return
|
||||
@ -61,76 +61,20 @@ class Cache {
|
||||
async matchAll (request = undefined, options = {}) {
|
||||
webidl.brandCheck(this, Cache)
|
||||
|
||||
if (request !== undefined) request = webidl.converters.RequestInfo(request)
|
||||
options = webidl.converters.CacheQueryOptions(options)
|
||||
const prefix = 'Cache.matchAll'
|
||||
if (request !== undefined) request = webidl.converters.RequestInfo(request, prefix, 'request')
|
||||
options = webidl.converters.CacheQueryOptions(options, prefix, 'options')
|
||||
|
||||
// 1.
|
||||
let r = null
|
||||
|
||||
// 2.
|
||||
if (request !== undefined) {
|
||||
if (request instanceof Request) {
|
||||
// 2.1.1
|
||||
r = request[kState]
|
||||
|
||||
// 2.1.2
|
||||
if (r.method !== 'GET' && !options.ignoreMethod) {
|
||||
return []
|
||||
}
|
||||
} else if (typeof request === 'string') {
|
||||
// 2.2.1
|
||||
r = new Request(request)[kState]
|
||||
}
|
||||
}
|
||||
|
||||
// 5.
|
||||
// 5.1
|
||||
const responses = []
|
||||
|
||||
// 5.2
|
||||
if (request === undefined) {
|
||||
// 5.2.1
|
||||
for (const requestResponse of this.#relevantRequestResponseList) {
|
||||
responses.push(requestResponse[1])
|
||||
}
|
||||
} else { // 5.3
|
||||
// 5.3.1
|
||||
const requestResponses = this.#queryCache(r, options)
|
||||
|
||||
// 5.3.2
|
||||
for (const requestResponse of requestResponses) {
|
||||
responses.push(requestResponse[1])
|
||||
}
|
||||
}
|
||||
|
||||
// 5.4
|
||||
// We don't implement CORs so we don't need to loop over the responses, yay!
|
||||
|
||||
// 5.5.1
|
||||
const responseList = []
|
||||
|
||||
// 5.5.2
|
||||
for (const response of responses) {
|
||||
// 5.5.2.1
|
||||
const responseObject = new Response(response.body?.source ?? null)
|
||||
const body = responseObject[kState].body
|
||||
responseObject[kState] = response
|
||||
responseObject[kState].body = body
|
||||
responseObject[kHeaders][kHeadersList] = response.headersList
|
||||
responseObject[kHeaders][kGuard] = 'immutable'
|
||||
|
||||
responseList.push(responseObject)
|
||||
}
|
||||
|
||||
// 6.
|
||||
return Object.freeze(responseList)
|
||||
return this.#internalMatchAll(request, options)
|
||||
}
|
||||
|
||||
async add (request) {
|
||||
webidl.brandCheck(this, Cache)
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' })
|
||||
|
||||
request = webidl.converters.RequestInfo(request)
|
||||
const prefix = 'Cache.add'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
request = webidl.converters.RequestInfo(request, prefix, 'request')
|
||||
|
||||
// 1.
|
||||
const requests = [request]
|
||||
@ -144,9 +88,9 @@ class Cache {
|
||||
|
||||
async addAll (requests) {
|
||||
webidl.brandCheck(this, Cache)
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' })
|
||||
|
||||
requests = webidl.converters['sequence<RequestInfo>'](requests)
|
||||
const prefix = 'Cache.addAll'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
// 1.
|
||||
const responsePromises = []
|
||||
@ -155,18 +99,28 @@ class Cache {
|
||||
const requestList = []
|
||||
|
||||
// 3.
|
||||
for (const request of requests) {
|
||||
for (let request of requests) {
|
||||
if (request === undefined) {
|
||||
throw webidl.errors.conversionFailed({
|
||||
prefix,
|
||||
argument: 'Argument 1',
|
||||
types: ['undefined is not allowed']
|
||||
})
|
||||
}
|
||||
|
||||
request = webidl.converters.RequestInfo(request)
|
||||
|
||||
if (typeof request === 'string') {
|
||||
continue
|
||||
}
|
||||
|
||||
// 3.1
|
||||
const r = request[kState]
|
||||
const r = getRequestState(request)
|
||||
|
||||
// 3.2
|
||||
if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') {
|
||||
throw webidl.errors.exception({
|
||||
header: 'Cache.addAll',
|
||||
header: prefix,
|
||||
message: 'Expected http/s scheme when method is not GET.'
|
||||
})
|
||||
}
|
||||
@ -179,12 +133,12 @@ class Cache {
|
||||
// 5.
|
||||
for (const request of requests) {
|
||||
// 5.1
|
||||
const r = new Request(request)[kState]
|
||||
const r = getRequestState(new Request(request))
|
||||
|
||||
// 5.2
|
||||
if (!urlIsHttpHttpsScheme(r.url)) {
|
||||
throw webidl.errors.exception({
|
||||
header: 'Cache.addAll',
|
||||
header: prefix,
|
||||
message: 'Expected http/s scheme.'
|
||||
})
|
||||
}
|
||||
@ -202,7 +156,6 @@ class Cache {
|
||||
// 5.7
|
||||
fetchControllers.push(fetching({
|
||||
request: r,
|
||||
dispatcher: getGlobalDispatcher(),
|
||||
processResponse (response) {
|
||||
// 1.
|
||||
if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) {
|
||||
@ -305,36 +258,38 @@ class Cache {
|
||||
|
||||
async put (request, response) {
|
||||
webidl.brandCheck(this, Cache)
|
||||
webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' })
|
||||
|
||||
request = webidl.converters.RequestInfo(request)
|
||||
response = webidl.converters.Response(response)
|
||||
const prefix = 'Cache.put'
|
||||
webidl.argumentLengthCheck(arguments, 2, prefix)
|
||||
|
||||
request = webidl.converters.RequestInfo(request, prefix, 'request')
|
||||
response = webidl.converters.Response(response, prefix, 'response')
|
||||
|
||||
// 1.
|
||||
let innerRequest = null
|
||||
|
||||
// 2.
|
||||
if (request instanceof Request) {
|
||||
innerRequest = request[kState]
|
||||
if (webidl.is.Request(request)) {
|
||||
innerRequest = getRequestState(request)
|
||||
} else { // 3.
|
||||
innerRequest = new Request(request)[kState]
|
||||
innerRequest = getRequestState(new Request(request))
|
||||
}
|
||||
|
||||
// 4.
|
||||
if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') {
|
||||
throw webidl.errors.exception({
|
||||
header: 'Cache.put',
|
||||
header: prefix,
|
||||
message: 'Expected an http/s scheme when method is not GET'
|
||||
})
|
||||
}
|
||||
|
||||
// 5.
|
||||
const innerResponse = response[kState]
|
||||
const innerResponse = getResponseState(response)
|
||||
|
||||
// 6.
|
||||
if (innerResponse.status === 206) {
|
||||
throw webidl.errors.exception({
|
||||
header: 'Cache.put',
|
||||
header: prefix,
|
||||
message: 'Got 206 status'
|
||||
})
|
||||
}
|
||||
@ -349,7 +304,7 @@ class Cache {
|
||||
// 7.2.1
|
||||
if (fieldValue === '*') {
|
||||
throw webidl.errors.exception({
|
||||
header: 'Cache.put',
|
||||
header: prefix,
|
||||
message: 'Got * vary field value'
|
||||
})
|
||||
}
|
||||
@ -359,7 +314,7 @@ class Cache {
|
||||
// 8.
|
||||
if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) {
|
||||
throw webidl.errors.exception({
|
||||
header: 'Cache.put',
|
||||
header: prefix,
|
||||
message: 'Response body is locked or disturbed'
|
||||
})
|
||||
}
|
||||
@ -379,7 +334,7 @@ class Cache {
|
||||
const reader = stream.getReader()
|
||||
|
||||
// 11.3
|
||||
readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject)
|
||||
readAllBytes(reader, bodyReadPromise.resolve, bodyReadPromise.reject)
|
||||
} else {
|
||||
bodyReadPromise.resolve(undefined)
|
||||
}
|
||||
@ -434,18 +389,20 @@ class Cache {
|
||||
|
||||
async delete (request, options = {}) {
|
||||
webidl.brandCheck(this, Cache)
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' })
|
||||
|
||||
request = webidl.converters.RequestInfo(request)
|
||||
options = webidl.converters.CacheQueryOptions(options)
|
||||
const prefix = 'Cache.delete'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
request = webidl.converters.RequestInfo(request, prefix, 'request')
|
||||
options = webidl.converters.CacheQueryOptions(options, prefix, 'options')
|
||||
|
||||
/**
|
||||
* @type {Request}
|
||||
*/
|
||||
let r = null
|
||||
|
||||
if (request instanceof Request) {
|
||||
r = request[kState]
|
||||
if (webidl.is.Request(request)) {
|
||||
r = getRequestState(request)
|
||||
|
||||
if (r.method !== 'GET' && !options.ignoreMethod) {
|
||||
return false
|
||||
@ -453,7 +410,7 @@ class Cache {
|
||||
} else {
|
||||
assert(typeof request === 'string')
|
||||
|
||||
r = new Request(request)[kState]
|
||||
r = getRequestState(new Request(request))
|
||||
}
|
||||
|
||||
/** @type {CacheBatchOperation[]} */
|
||||
@ -494,13 +451,15 @@ class Cache {
|
||||
* @see https://w3c.github.io/ServiceWorker/#dom-cache-keys
|
||||
* @param {any} request
|
||||
* @param {import('../../types/cache').CacheQueryOptions} options
|
||||
* @returns {readonly Request[]}
|
||||
* @returns {Promise<readonly Request[]>}
|
||||
*/
|
||||
async keys (request = undefined, options = {}) {
|
||||
webidl.brandCheck(this, Cache)
|
||||
|
||||
if (request !== undefined) request = webidl.converters.RequestInfo(request)
|
||||
options = webidl.converters.CacheQueryOptions(options)
|
||||
const prefix = 'Cache.keys'
|
||||
|
||||
if (request !== undefined) request = webidl.converters.RequestInfo(request, prefix, 'request')
|
||||
options = webidl.converters.CacheQueryOptions(options, prefix, 'options')
|
||||
|
||||
// 1.
|
||||
let r = null
|
||||
@ -508,16 +467,16 @@ class Cache {
|
||||
// 2.
|
||||
if (request !== undefined) {
|
||||
// 2.1
|
||||
if (request instanceof Request) {
|
||||
if (webidl.is.Request(request)) {
|
||||
// 2.1.1
|
||||
r = request[kState]
|
||||
r = getRequestState(request)
|
||||
|
||||
// 2.1.2
|
||||
if (r.method !== 'GET' && !options.ignoreMethod) {
|
||||
return []
|
||||
}
|
||||
} else if (typeof request === 'string') { // 2.2
|
||||
r = new Request(request)[kState]
|
||||
r = getRequestState(new Request(request))
|
||||
}
|
||||
}
|
||||
|
||||
@ -553,12 +512,12 @@ class Cache {
|
||||
|
||||
// 5.4.2
|
||||
for (const request of requests) {
|
||||
const requestObject = new Request('https://a')
|
||||
requestObject[kState] = request
|
||||
requestObject[kHeaders][kHeadersList] = request.headersList
|
||||
requestObject[kHeaders][kGuard] = 'immutable'
|
||||
requestObject[kRealm] = request.client
|
||||
|
||||
const requestObject = fromInnerRequest(
|
||||
request,
|
||||
undefined,
|
||||
new AbortController().signal,
|
||||
'immutable'
|
||||
)
|
||||
// 5.4.2.1
|
||||
requestList.push(requestObject)
|
||||
}
|
||||
@ -783,6 +742,68 @@ class Cache {
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
#internalMatchAll (request, options, maxResponses = Infinity) {
|
||||
// 1.
|
||||
let r = null
|
||||
|
||||
// 2.
|
||||
if (request !== undefined) {
|
||||
if (webidl.is.Request(request)) {
|
||||
// 2.1.1
|
||||
r = getRequestState(request)
|
||||
|
||||
// 2.1.2
|
||||
if (r.method !== 'GET' && !options.ignoreMethod) {
|
||||
return []
|
||||
}
|
||||
} else if (typeof request === 'string') {
|
||||
// 2.2.1
|
||||
r = getRequestState(new Request(request))
|
||||
}
|
||||
}
|
||||
|
||||
// 5.
|
||||
// 5.1
|
||||
const responses = []
|
||||
|
||||
// 5.2
|
||||
if (request === undefined) {
|
||||
// 5.2.1
|
||||
for (const requestResponse of this.#relevantRequestResponseList) {
|
||||
responses.push(requestResponse[1])
|
||||
}
|
||||
} else { // 5.3
|
||||
// 5.3.1
|
||||
const requestResponses = this.#queryCache(r, options)
|
||||
|
||||
// 5.3.2
|
||||
for (const requestResponse of requestResponses) {
|
||||
responses.push(requestResponse[1])
|
||||
}
|
||||
}
|
||||
|
||||
// 5.4
|
||||
// We don't implement CORs so we don't need to loop over the responses, yay!
|
||||
|
||||
// 5.5.1
|
||||
const responseList = []
|
||||
|
||||
// 5.5.2
|
||||
for (const response of responses) {
|
||||
// 5.5.2.1
|
||||
const responseObject = fromInnerResponse(response, 'immutable')
|
||||
|
||||
responseList.push(responseObject.clone())
|
||||
|
||||
if (responseList.length >= maxResponses) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// 6.
|
||||
return Object.freeze(responseList)
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperties(Cache.prototype, {
|
||||
@ -803,17 +824,17 @@ const cacheQueryOptionConverters = [
|
||||
{
|
||||
key: 'ignoreSearch',
|
||||
converter: webidl.converters.boolean,
|
||||
defaultValue: false
|
||||
defaultValue: () => false
|
||||
},
|
||||
{
|
||||
key: 'ignoreMethod',
|
||||
converter: webidl.converters.boolean,
|
||||
defaultValue: false
|
||||
defaultValue: () => false
|
||||
},
|
||||
{
|
||||
key: 'ignoreVary',
|
||||
converter: webidl.converters.boolean,
|
||||
defaultValue: false
|
||||
defaultValue: () => false
|
||||
}
|
||||
]
|
||||
|
||||
@ -827,7 +848,10 @@ webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([
|
||||
}
|
||||
])
|
||||
|
||||
webidl.converters.Response = webidl.interfaceConverter(Response)
|
||||
webidl.converters.Response = webidl.interfaceConverter(
|
||||
webidl.is.Response,
|
||||
'Response'
|
||||
)
|
||||
|
||||
webidl.converters['sequence<RequestInfo>'] = webidl.sequenceConverter(
|
||||
webidl.converters.RequestInfo
|
@ -1,9 +1,9 @@
|
||||
'use strict'
|
||||
|
||||
const { kConstruct } = require('./symbols')
|
||||
const { Cache } = require('./cache')
|
||||
const { webidl } = require('../fetch/webidl')
|
||||
const { kEnumerableProperty } = require('../core/util')
|
||||
const { kEnumerableProperty } = require('../../core/util')
|
||||
const { kConstruct } = require('../../core/symbols')
|
||||
|
||||
class CacheStorage {
|
||||
/**
|
||||
@ -16,11 +16,13 @@ class CacheStorage {
|
||||
if (arguments[0] !== kConstruct) {
|
||||
webidl.illegalConstructor()
|
||||
}
|
||||
|
||||
webidl.util.markAsUncloneable(this)
|
||||
}
|
||||
|
||||
async match (request, options = {}) {
|
||||
webidl.brandCheck(this, CacheStorage)
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.match' })
|
||||
webidl.argumentLengthCheck(arguments, 1, 'CacheStorage.match')
|
||||
|
||||
request = webidl.converters.RequestInfo(request)
|
||||
options = webidl.converters.MultiCacheQueryOptions(options)
|
||||
@ -57,9 +59,11 @@ class CacheStorage {
|
||||
*/
|
||||
async has (cacheName) {
|
||||
webidl.brandCheck(this, CacheStorage)
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' })
|
||||
|
||||
cacheName = webidl.converters.DOMString(cacheName)
|
||||
const prefix = 'CacheStorage.has'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
cacheName = webidl.converters.DOMString(cacheName, prefix, 'cacheName')
|
||||
|
||||
// 2.1.1
|
||||
// 2.2
|
||||
@ -73,9 +77,11 @@ class CacheStorage {
|
||||
*/
|
||||
async open (cacheName) {
|
||||
webidl.brandCheck(this, CacheStorage)
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' })
|
||||
|
||||
cacheName = webidl.converters.DOMString(cacheName)
|
||||
const prefix = 'CacheStorage.open'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
cacheName = webidl.converters.DOMString(cacheName, prefix, 'cacheName')
|
||||
|
||||
// 2.1
|
||||
if (this.#caches.has(cacheName)) {
|
||||
@ -105,16 +111,18 @@ class CacheStorage {
|
||||
*/
|
||||
async delete (cacheName) {
|
||||
webidl.brandCheck(this, CacheStorage)
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' })
|
||||
|
||||
cacheName = webidl.converters.DOMString(cacheName)
|
||||
const prefix = 'CacheStorage.delete'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
cacheName = webidl.converters.DOMString(cacheName, prefix, 'cacheName')
|
||||
|
||||
return this.#caches.delete(cacheName)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/ServiceWorker/#cache-storage-keys
|
||||
* @returns {string[]}
|
||||
* @returns {Promise<string[]>}
|
||||
*/
|
||||
async keys () {
|
||||
webidl.brandCheck(this, CacheStorage)
|
16
node_modules/undici/lib/cache/util.js → node_modules/undici/lib/web/cache/util.js
generated
vendored
16
node_modules/undici/lib/cache/util.js → node_modules/undici/lib/web/cache/util.js
generated
vendored
@ -1,7 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('assert')
|
||||
const { URLSerializer } = require('../fetch/dataURL')
|
||||
const assert = require('node:assert')
|
||||
const { URLSerializer } = require('../fetch/data-url')
|
||||
const { isValidHeaderName } = require('../fetch/util')
|
||||
|
||||
/**
|
||||
@ -23,7 +23,7 @@ function urlEquals (A, B, excludeFragment = false) {
|
||||
* @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262
|
||||
* @param {string} header
|
||||
*/
|
||||
function fieldValues (header) {
|
||||
function getFieldValues (header) {
|
||||
assert(header !== null)
|
||||
|
||||
const values = []
|
||||
@ -31,13 +31,9 @@ function fieldValues (header) {
|
||||
for (let value of header.split(',')) {
|
||||
value = value.trim()
|
||||
|
||||
if (!value.length) {
|
||||
continue
|
||||
} else if (!isValidHeaderName(value)) {
|
||||
continue
|
||||
if (isValidHeaderName(value)) {
|
||||
values.push(value)
|
||||
}
|
||||
|
||||
values.push(value)
|
||||
}
|
||||
|
||||
return values
|
||||
@ -45,5 +41,5 @@ function fieldValues (header) {
|
||||
|
||||
module.exports = {
|
||||
urlEquals,
|
||||
fieldValues
|
||||
getFieldValues
|
||||
}
|
79
node_modules/undici/lib/cookies/index.js → node_modules/undici/lib/web/cookies/index.js
generated
vendored
79
node_modules/undici/lib/cookies/index.js → node_modules/undici/lib/web/cookies/index.js
generated
vendored
@ -1,22 +1,24 @@
|
||||
'use strict'
|
||||
|
||||
const { parseSetCookie } = require('./parse')
|
||||
const { stringify, getHeadersList } = require('./util')
|
||||
const { stringify } = require('./util')
|
||||
const { webidl } = require('../fetch/webidl')
|
||||
const { Headers } = require('../fetch/headers')
|
||||
|
||||
const brandChecks = webidl.brandCheckMultiple([Headers, globalThis.Headers].filter(Boolean))
|
||||
|
||||
/**
|
||||
* @typedef {Object} Cookie
|
||||
* @property {string} name
|
||||
* @property {string} value
|
||||
* @property {Date|number|undefined} expires
|
||||
* @property {number|undefined} maxAge
|
||||
* @property {string|undefined} domain
|
||||
* @property {string|undefined} path
|
||||
* @property {boolean|undefined} secure
|
||||
* @property {boolean|undefined} httpOnly
|
||||
* @property {'Strict'|'Lax'|'None'} sameSite
|
||||
* @property {string[]} unparsed
|
||||
* @property {Date|number} [expires]
|
||||
* @property {number} [maxAge]
|
||||
* @property {string} [domain]
|
||||
* @property {string} [path]
|
||||
* @property {boolean} [secure]
|
||||
* @property {boolean} [httpOnly]
|
||||
* @property {'Strict'|'Lax'|'None'} [sameSite]
|
||||
* @property {string[]} [unparsed]
|
||||
*/
|
||||
|
||||
/**
|
||||
@ -24,11 +26,13 @@ const { Headers } = require('../fetch/headers')
|
||||
* @returns {Record<string, string>}
|
||||
*/
|
||||
function getCookies (headers) {
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' })
|
||||
webidl.argumentLengthCheck(arguments, 1, 'getCookies')
|
||||
|
||||
webidl.brandCheck(headers, Headers, { strict: false })
|
||||
brandChecks(headers)
|
||||
|
||||
const cookie = headers.get('cookie')
|
||||
|
||||
/** @type {Record<string, string>} */
|
||||
const out = {}
|
||||
|
||||
if (!cookie) {
|
||||
@ -51,11 +55,12 @@ function getCookies (headers) {
|
||||
* @returns {void}
|
||||
*/
|
||||
function deleteCookie (headers, name, attributes) {
|
||||
webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' })
|
||||
brandChecks(headers)
|
||||
|
||||
webidl.brandCheck(headers, Headers, { strict: false })
|
||||
const prefix = 'deleteCookie'
|
||||
webidl.argumentLengthCheck(arguments, 2, prefix)
|
||||
|
||||
name = webidl.converters.DOMString(name)
|
||||
name = webidl.converters.DOMString(name, prefix, 'name')
|
||||
attributes = webidl.converters.DeleteCookieAttributes(attributes)
|
||||
|
||||
// Matches behavior of
|
||||
@ -73,18 +78,27 @@ function deleteCookie (headers, name, attributes) {
|
||||
* @returns {Cookie[]}
|
||||
*/
|
||||
function getSetCookies (headers) {
|
||||
webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' })
|
||||
webidl.argumentLengthCheck(arguments, 1, 'getSetCookies')
|
||||
|
||||
webidl.brandCheck(headers, Headers, { strict: false })
|
||||
brandChecks(headers)
|
||||
|
||||
const cookies = getHeadersList(headers).cookies
|
||||
const cookies = headers.getSetCookie()
|
||||
|
||||
if (!cookies) {
|
||||
return []
|
||||
}
|
||||
|
||||
// In older versions of undici, cookies is a list of name:value.
|
||||
return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair))
|
||||
return cookies.map((pair) => parseSetCookie(pair))
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a cookie string
|
||||
* @param {string} cookie
|
||||
*/
|
||||
function parseCookie (cookie) {
|
||||
cookie = webidl.converters.DOMString(cookie)
|
||||
|
||||
return parseSetCookie(cookie)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -93,16 +107,16 @@ function getSetCookies (headers) {
|
||||
* @returns {void}
|
||||
*/
|
||||
function setCookie (headers, cookie) {
|
||||
webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' })
|
||||
webidl.argumentLengthCheck(arguments, 2, 'setCookie')
|
||||
|
||||
webidl.brandCheck(headers, Headers, { strict: false })
|
||||
brandChecks(headers)
|
||||
|
||||
cookie = webidl.converters.Cookie(cookie)
|
||||
|
||||
const str = stringify(cookie)
|
||||
|
||||
if (str) {
|
||||
headers.append('Set-Cookie', stringify(cookie))
|
||||
headers.append('set-cookie', str, true)
|
||||
}
|
||||
}
|
||||
|
||||
@ -110,12 +124,12 @@ webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([
|
||||
{
|
||||
converter: webidl.nullableConverter(webidl.converters.DOMString),
|
||||
key: 'path',
|
||||
defaultValue: null
|
||||
defaultValue: () => null
|
||||
},
|
||||
{
|
||||
converter: webidl.nullableConverter(webidl.converters.DOMString),
|
||||
key: 'domain',
|
||||
defaultValue: null
|
||||
defaultValue: () => null
|
||||
}
|
||||
])
|
||||
|
||||
@ -137,32 +151,32 @@ webidl.converters.Cookie = webidl.dictionaryConverter([
|
||||
return new Date(value)
|
||||
}),
|
||||
key: 'expires',
|
||||
defaultValue: null
|
||||
defaultValue: () => null
|
||||
},
|
||||
{
|
||||
converter: webidl.nullableConverter(webidl.converters['long long']),
|
||||
key: 'maxAge',
|
||||
defaultValue: null
|
||||
defaultValue: () => null
|
||||
},
|
||||
{
|
||||
converter: webidl.nullableConverter(webidl.converters.DOMString),
|
||||
key: 'domain',
|
||||
defaultValue: null
|
||||
defaultValue: () => null
|
||||
},
|
||||
{
|
||||
converter: webidl.nullableConverter(webidl.converters.DOMString),
|
||||
key: 'path',
|
||||
defaultValue: null
|
||||
defaultValue: () => null
|
||||
},
|
||||
{
|
||||
converter: webidl.nullableConverter(webidl.converters.boolean),
|
||||
key: 'secure',
|
||||
defaultValue: null
|
||||
defaultValue: () => null
|
||||
},
|
||||
{
|
||||
converter: webidl.nullableConverter(webidl.converters.boolean),
|
||||
key: 'httpOnly',
|
||||
defaultValue: null
|
||||
defaultValue: () => null
|
||||
},
|
||||
{
|
||||
converter: webidl.converters.USVString,
|
||||
@ -172,7 +186,7 @@ webidl.converters.Cookie = webidl.dictionaryConverter([
|
||||
{
|
||||
converter: webidl.sequenceConverter(webidl.converters.DOMString),
|
||||
key: 'unparsed',
|
||||
defaultValue: []
|
||||
defaultValue: () => new Array(0)
|
||||
}
|
||||
])
|
||||
|
||||
@ -180,5 +194,6 @@ module.exports = {
|
||||
getCookies,
|
||||
deleteCookie,
|
||||
getSetCookies,
|
||||
setCookie
|
||||
setCookie,
|
||||
parseCookie
|
||||
}
|
15
node_modules/undici/lib/cookies/parse.js → node_modules/undici/lib/web/cookies/parse.js
generated
vendored
15
node_modules/undici/lib/cookies/parse.js → node_modules/undici/lib/web/cookies/parse.js
generated
vendored
@ -2,14 +2,15 @@
|
||||
|
||||
const { maxNameValuePairSize, maxAttributeValueSize } = require('./constants')
|
||||
const { isCTLExcludingHtab } = require('./util')
|
||||
const { collectASequenceOfCodePointsFast } = require('../fetch/dataURL')
|
||||
const assert = require('assert')
|
||||
const { collectASequenceOfCodePointsFast } = require('../fetch/data-url')
|
||||
const assert = require('node:assert')
|
||||
const { unescape } = require('node:querystring')
|
||||
|
||||
/**
|
||||
* @description Parses the field-value attributes of a set-cookie header string.
|
||||
* @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
|
||||
* @param {string} header
|
||||
* @returns if the header is invalid, null will be returned
|
||||
* @returns {import('./index').Cookie|null} if the header is invalid, null will be returned
|
||||
*/
|
||||
function parseSetCookie (header) {
|
||||
// 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F
|
||||
@ -76,8 +77,12 @@ function parseSetCookie (header) {
|
||||
|
||||
// 6. The cookie-name is the name string, and the cookie-value is the
|
||||
// value string.
|
||||
// https://datatracker.ietf.org/doc/html/rfc6265
|
||||
// To maximize compatibility with user agents, servers that wish to
|
||||
// store arbitrary data in a cookie-value SHOULD encode that data, for
|
||||
// example, using Base64 [RFC4648].
|
||||
return {
|
||||
name, value, ...parseUnparsedAttributes(unparsedAttributes)
|
||||
name, value: unescape(value), ...parseUnparsedAttributes(unparsedAttributes)
|
||||
}
|
||||
}
|
||||
|
||||
@ -85,7 +90,7 @@ function parseSetCookie (header) {
|
||||
* Parses the remaining attributes of a set-cookie header
|
||||
* @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
|
||||
* @param {string} unparsedAttributes
|
||||
* @param {[Object.<string, unknown>]={}} cookieAttributeList
|
||||
* @param {Object.<string, unknown>} [cookieAttributeList={}]
|
||||
*/
|
||||
function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) {
|
||||
// 1. If the unparsed-attributes string is empty, skip the rest of
|
161
node_modules/undici/lib/cookies/util.js → node_modules/undici/lib/web/cookies/util.js
generated
vendored
161
node_modules/undici/lib/cookies/util.js → node_modules/undici/lib/web/cookies/util.js
generated
vendored
@ -1,24 +1,22 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('assert')
|
||||
const { kHeadersList } = require('../core/symbols')
|
||||
|
||||
/**
|
||||
* @param {string} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isCTLExcludingHtab (value) {
|
||||
if (value.length === 0) {
|
||||
return false
|
||||
}
|
||||
|
||||
for (const char of value) {
|
||||
const code = char.charCodeAt(0)
|
||||
for (let i = 0; i < value.length; ++i) {
|
||||
const code = value.charCodeAt(i)
|
||||
|
||||
if (
|
||||
(code >= 0x00 || code <= 0x08) ||
|
||||
(code >= 0x0A || code <= 0x1F) ||
|
||||
(code >= 0x00 && code <= 0x08) ||
|
||||
(code >= 0x0A && code <= 0x1F) ||
|
||||
code === 0x7F
|
||||
) {
|
||||
return false
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
@ -31,28 +29,29 @@ function isCTLExcludingHtab (value) {
|
||||
* @param {string} name
|
||||
*/
|
||||
function validateCookieName (name) {
|
||||
for (const char of name) {
|
||||
const code = char.charCodeAt(0)
|
||||
for (let i = 0; i < name.length; ++i) {
|
||||
const code = name.charCodeAt(i)
|
||||
|
||||
if (
|
||||
(code <= 0x20 || code > 0x7F) ||
|
||||
char === '(' ||
|
||||
char === ')' ||
|
||||
char === '>' ||
|
||||
char === '<' ||
|
||||
char === '@' ||
|
||||
char === ',' ||
|
||||
char === ';' ||
|
||||
char === ':' ||
|
||||
char === '\\' ||
|
||||
char === '"' ||
|
||||
char === '/' ||
|
||||
char === '[' ||
|
||||
char === ']' ||
|
||||
char === '?' ||
|
||||
char === '=' ||
|
||||
char === '{' ||
|
||||
char === '}'
|
||||
code < 0x21 || // exclude CTLs (0-31), SP and HT
|
||||
code > 0x7E || // exclude non-ascii and DEL
|
||||
code === 0x22 || // "
|
||||
code === 0x28 || // (
|
||||
code === 0x29 || // )
|
||||
code === 0x3C || // <
|
||||
code === 0x3E || // >
|
||||
code === 0x40 || // @
|
||||
code === 0x2C || // ,
|
||||
code === 0x3B || // ;
|
||||
code === 0x3A || // :
|
||||
code === 0x5C || // \
|
||||
code === 0x2F || // /
|
||||
code === 0x5B || // [
|
||||
code === 0x5D || // ]
|
||||
code === 0x3F || // ?
|
||||
code === 0x3D || // =
|
||||
code === 0x7B || // {
|
||||
code === 0x7D // }
|
||||
) {
|
||||
throw new Error('Invalid cookie name')
|
||||
}
|
||||
@ -68,18 +67,30 @@ function validateCookieName (name) {
|
||||
* @param {string} value
|
||||
*/
|
||||
function validateCookieValue (value) {
|
||||
for (const char of value) {
|
||||
const code = char.charCodeAt(0)
|
||||
let len = value.length
|
||||
let i = 0
|
||||
|
||||
// if the value is wrapped in DQUOTE
|
||||
if (value[0] === '"') {
|
||||
if (len === 1 || value[len - 1] !== '"') {
|
||||
throw new Error('Invalid cookie value')
|
||||
}
|
||||
--len
|
||||
++i
|
||||
}
|
||||
|
||||
while (i < len) {
|
||||
const code = value.charCodeAt(i++)
|
||||
|
||||
if (
|
||||
code < 0x21 || // exclude CTLs (0-31)
|
||||
code === 0x22 ||
|
||||
code === 0x2C ||
|
||||
code === 0x3B ||
|
||||
code === 0x5C ||
|
||||
code > 0x7E // non-ascii
|
||||
code > 0x7E || // non-ascii and DEL (127)
|
||||
code === 0x22 || // "
|
||||
code === 0x2C || // ,
|
||||
code === 0x3B || // ;
|
||||
code === 0x5C // \
|
||||
) {
|
||||
throw new Error('Invalid header value')
|
||||
throw new Error('Invalid cookie value')
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -89,10 +100,14 @@ function validateCookieValue (value) {
|
||||
* @param {string} path
|
||||
*/
|
||||
function validateCookiePath (path) {
|
||||
for (const char of path) {
|
||||
const code = char.charCodeAt(0)
|
||||
for (let i = 0; i < path.length; ++i) {
|
||||
const code = path.charCodeAt(i)
|
||||
|
||||
if (code < 0x21 || char === ';') {
|
||||
if (
|
||||
code < 0x20 || // exclude CTLs (0-31)
|
||||
code === 0x7F || // DEL
|
||||
code === 0x3B // ;
|
||||
) {
|
||||
throw new Error('Invalid cookie path')
|
||||
}
|
||||
}
|
||||
@ -113,6 +128,18 @@ function validateCookieDomain (domain) {
|
||||
}
|
||||
}
|
||||
|
||||
const IMFDays = [
|
||||
'Sun', 'Mon', 'Tue', 'Wed',
|
||||
'Thu', 'Fri', 'Sat'
|
||||
]
|
||||
|
||||
const IMFMonths = [
|
||||
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
|
||||
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'
|
||||
]
|
||||
|
||||
const IMFPaddedNumbers = Array(61).fill(0).map((_, i) => i.toString().padStart(2, '0'))
|
||||
|
||||
/**
|
||||
* @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1
|
||||
* @param {number|Date} date
|
||||
@ -159,25 +186,7 @@ function toIMFDate (date) {
|
||||
date = new Date(date)
|
||||
}
|
||||
|
||||
const days = [
|
||||
'Sun', 'Mon', 'Tue', 'Wed',
|
||||
'Thu', 'Fri', 'Sat'
|
||||
]
|
||||
|
||||
const months = [
|
||||
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
|
||||
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'
|
||||
]
|
||||
|
||||
const dayName = days[date.getUTCDay()]
|
||||
const day = date.getUTCDate().toString().padStart(2, '0')
|
||||
const month = months[date.getUTCMonth()]
|
||||
const year = date.getUTCFullYear()
|
||||
const hour = date.getUTCHours().toString().padStart(2, '0')
|
||||
const minute = date.getUTCMinutes().toString().padStart(2, '0')
|
||||
const second = date.getUTCSeconds().toString().padStart(2, '0')
|
||||
|
||||
return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT`
|
||||
return `${IMFDays[date.getUTCDay()]}, ${IMFPaddedNumbers[date.getUTCDate()]} ${IMFMonths[date.getUTCMonth()]} ${date.getUTCFullYear()} ${IMFPaddedNumbers[date.getUTCHours()]}:${IMFPaddedNumbers[date.getUTCMinutes()]}:${IMFPaddedNumbers[date.getUTCSeconds()]} GMT`
|
||||
}
|
||||
|
||||
/**
|
||||
@ -263,29 +272,11 @@ function stringify (cookie) {
|
||||
return out.join('; ')
|
||||
}
|
||||
|
||||
let kHeadersListNode
|
||||
|
||||
function getHeadersList (headers) {
|
||||
if (headers[kHeadersList]) {
|
||||
return headers[kHeadersList]
|
||||
}
|
||||
|
||||
if (!kHeadersListNode) {
|
||||
kHeadersListNode = Object.getOwnPropertySymbols(headers).find(
|
||||
(symbol) => symbol.description === 'headers list'
|
||||
)
|
||||
|
||||
assert(kHeadersListNode, 'Headers cannot be parsed')
|
||||
}
|
||||
|
||||
const headersList = headers[kHeadersListNode]
|
||||
assert(headersList)
|
||||
|
||||
return headersList
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
isCTLExcludingHtab,
|
||||
stringify,
|
||||
getHeadersList
|
||||
validateCookieName,
|
||||
validateCookiePath,
|
||||
validateCookieValue,
|
||||
toIMFDate,
|
||||
stringify
|
||||
}
|
399
node_modules/undici/lib/web/eventsource/eventsource-stream.js
generated
vendored
Normal file
399
node_modules/undici/lib/web/eventsource/eventsource-stream.js
generated
vendored
Normal file
@ -0,0 +1,399 @@
|
||||
'use strict'
|
||||
const { Transform } = require('node:stream')
|
||||
const { isASCIINumber, isValidLastEventId } = require('./util')
|
||||
|
||||
/**
|
||||
* @type {number[]} BOM
|
||||
*/
|
||||
const BOM = [0xEF, 0xBB, 0xBF]
|
||||
/**
|
||||
* @type {10} LF
|
||||
*/
|
||||
const LF = 0x0A
|
||||
/**
|
||||
* @type {13} CR
|
||||
*/
|
||||
const CR = 0x0D
|
||||
/**
|
||||
* @type {58} COLON
|
||||
*/
|
||||
const COLON = 0x3A
|
||||
/**
|
||||
* @type {32} SPACE
|
||||
*/
|
||||
const SPACE = 0x20
|
||||
|
||||
/**
|
||||
* @typedef {object} EventSourceStreamEvent
|
||||
* @type {object}
|
||||
* @property {string} [event] The event type.
|
||||
* @property {string} [data] The data of the message.
|
||||
* @property {string} [id] A unique ID for the event.
|
||||
* @property {string} [retry] The reconnection time, in milliseconds.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef eventSourceSettings
|
||||
* @type {object}
|
||||
* @property {string} [lastEventId] The last event ID received from the server.
|
||||
* @property {string} [origin] The origin of the event source.
|
||||
* @property {number} [reconnectionTime] The reconnection time, in milliseconds.
|
||||
*/
|
||||
|
||||
class EventSourceStream extends Transform {
|
||||
/**
|
||||
* @type {eventSourceSettings}
|
||||
*/
|
||||
state
|
||||
|
||||
/**
|
||||
* Leading byte-order-mark check.
|
||||
* @type {boolean}
|
||||
*/
|
||||
checkBOM = true
|
||||
|
||||
/**
|
||||
* @type {boolean}
|
||||
*/
|
||||
crlfCheck = false
|
||||
|
||||
/**
|
||||
* @type {boolean}
|
||||
*/
|
||||
eventEndCheck = false
|
||||
|
||||
/**
|
||||
* @type {Buffer|null}
|
||||
*/
|
||||
buffer = null
|
||||
|
||||
pos = 0
|
||||
|
||||
event = {
|
||||
data: undefined,
|
||||
event: undefined,
|
||||
id: undefined,
|
||||
retry: undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {object} options
|
||||
* @param {boolean} [options.readableObjectMode]
|
||||
* @param {eventSourceSettings} [options.eventSourceSettings]
|
||||
* @param {(chunk: any, encoding?: BufferEncoding | undefined) => boolean} [options.push]
|
||||
*/
|
||||
constructor (options = {}) {
|
||||
// Enable object mode as EventSourceStream emits objects of shape
|
||||
// EventSourceStreamEvent
|
||||
options.readableObjectMode = true
|
||||
|
||||
super(options)
|
||||
|
||||
this.state = options.eventSourceSettings || {}
|
||||
if (options.push) {
|
||||
this.push = options.push
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer} chunk
|
||||
* @param {string} _encoding
|
||||
* @param {Function} callback
|
||||
* @returns {void}
|
||||
*/
|
||||
_transform (chunk, _encoding, callback) {
|
||||
if (chunk.length === 0) {
|
||||
callback()
|
||||
return
|
||||
}
|
||||
|
||||
// Cache the chunk in the buffer, as the data might not be complete while
|
||||
// processing it
|
||||
// TODO: Investigate if there is a more performant way to handle
|
||||
// incoming chunks
|
||||
// see: https://github.com/nodejs/undici/issues/2630
|
||||
if (this.buffer) {
|
||||
this.buffer = Buffer.concat([this.buffer, chunk])
|
||||
} else {
|
||||
this.buffer = chunk
|
||||
}
|
||||
|
||||
// Strip leading byte-order-mark if we opened the stream and started
|
||||
// the processing of the incoming data
|
||||
if (this.checkBOM) {
|
||||
switch (this.buffer.length) {
|
||||
case 1:
|
||||
// Check if the first byte is the same as the first byte of the BOM
|
||||
if (this.buffer[0] === BOM[0]) {
|
||||
// If it is, we need to wait for more data
|
||||
callback()
|
||||
return
|
||||
}
|
||||
// Set the checkBOM flag to false as we don't need to check for the
|
||||
// BOM anymore
|
||||
this.checkBOM = false
|
||||
|
||||
// The buffer only contains one byte so we need to wait for more data
|
||||
callback()
|
||||
return
|
||||
case 2:
|
||||
// Check if the first two bytes are the same as the first two bytes
|
||||
// of the BOM
|
||||
if (
|
||||
this.buffer[0] === BOM[0] &&
|
||||
this.buffer[1] === BOM[1]
|
||||
) {
|
||||
// If it is, we need to wait for more data, because the third byte
|
||||
// is needed to determine if it is the BOM or not
|
||||
callback()
|
||||
return
|
||||
}
|
||||
|
||||
// Set the checkBOM flag to false as we don't need to check for the
|
||||
// BOM anymore
|
||||
this.checkBOM = false
|
||||
break
|
||||
case 3:
|
||||
// Check if the first three bytes are the same as the first three
|
||||
// bytes of the BOM
|
||||
if (
|
||||
this.buffer[0] === BOM[0] &&
|
||||
this.buffer[1] === BOM[1] &&
|
||||
this.buffer[2] === BOM[2]
|
||||
) {
|
||||
// If it is, we can drop the buffered data, as it is only the BOM
|
||||
this.buffer = Buffer.alloc(0)
|
||||
// Set the checkBOM flag to false as we don't need to check for the
|
||||
// BOM anymore
|
||||
this.checkBOM = false
|
||||
|
||||
// Await more data
|
||||
callback()
|
||||
return
|
||||
}
|
||||
// If it is not the BOM, we can start processing the data
|
||||
this.checkBOM = false
|
||||
break
|
||||
default:
|
||||
// The buffer is longer than 3 bytes, so we can drop the BOM if it is
|
||||
// present
|
||||
if (
|
||||
this.buffer[0] === BOM[0] &&
|
||||
this.buffer[1] === BOM[1] &&
|
||||
this.buffer[2] === BOM[2]
|
||||
) {
|
||||
// Remove the BOM from the buffer
|
||||
this.buffer = this.buffer.subarray(3)
|
||||
}
|
||||
|
||||
// Set the checkBOM flag to false as we don't need to check for the
|
||||
this.checkBOM = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
while (this.pos < this.buffer.length) {
|
||||
// If the previous line ended with an end-of-line, we need to check
|
||||
// if the next character is also an end-of-line.
|
||||
if (this.eventEndCheck) {
|
||||
// If the the current character is an end-of-line, then the event
|
||||
// is finished and we can process it
|
||||
|
||||
// If the previous line ended with a carriage return, we need to
|
||||
// check if the current character is a line feed and remove it
|
||||
// from the buffer.
|
||||
if (this.crlfCheck) {
|
||||
// If the current character is a line feed, we can remove it
|
||||
// from the buffer and reset the crlfCheck flag
|
||||
if (this.buffer[this.pos] === LF) {
|
||||
this.buffer = this.buffer.subarray(this.pos + 1)
|
||||
this.pos = 0
|
||||
this.crlfCheck = false
|
||||
|
||||
// It is possible that the line feed is not the end of the
|
||||
// event. We need to check if the next character is an
|
||||
// end-of-line character to determine if the event is
|
||||
// finished. We simply continue the loop to check the next
|
||||
// character.
|
||||
|
||||
// As we removed the line feed from the buffer and set the
|
||||
// crlfCheck flag to false, we basically don't make any
|
||||
// distinction between a line feed and a carriage return.
|
||||
continue
|
||||
}
|
||||
this.crlfCheck = false
|
||||
}
|
||||
|
||||
if (this.buffer[this.pos] === LF || this.buffer[this.pos] === CR) {
|
||||
// If the current character is a carriage return, we need to
|
||||
// set the crlfCheck flag to true, as we need to check if the
|
||||
// next character is a line feed so we can remove it from the
|
||||
// buffer
|
||||
if (this.buffer[this.pos] === CR) {
|
||||
this.crlfCheck = true
|
||||
}
|
||||
|
||||
this.buffer = this.buffer.subarray(this.pos + 1)
|
||||
this.pos = 0
|
||||
if (
|
||||
this.event.data !== undefined || this.event.event || this.event.id || this.event.retry) {
|
||||
this.processEvent(this.event)
|
||||
}
|
||||
this.clearEvent()
|
||||
continue
|
||||
}
|
||||
// If the current character is not an end-of-line, then the event
|
||||
// is not finished and we have to reset the eventEndCheck flag
|
||||
this.eventEndCheck = false
|
||||
continue
|
||||
}
|
||||
|
||||
// If the current character is an end-of-line, we can process the
|
||||
// line
|
||||
if (this.buffer[this.pos] === LF || this.buffer[this.pos] === CR) {
|
||||
// If the current character is a carriage return, we need to
|
||||
// set the crlfCheck flag to true, as we need to check if the
|
||||
// next character is a line feed
|
||||
if (this.buffer[this.pos] === CR) {
|
||||
this.crlfCheck = true
|
||||
}
|
||||
|
||||
// In any case, we can process the line as we reached an
|
||||
// end-of-line character
|
||||
this.parseLine(this.buffer.subarray(0, this.pos), this.event)
|
||||
|
||||
// Remove the processed line from the buffer
|
||||
this.buffer = this.buffer.subarray(this.pos + 1)
|
||||
// Reset the position as we removed the processed line from the buffer
|
||||
this.pos = 0
|
||||
// A line was processed and this could be the end of the event. We need
|
||||
// to check if the next line is empty to determine if the event is
|
||||
// finished.
|
||||
this.eventEndCheck = true
|
||||
continue
|
||||
}
|
||||
|
||||
this.pos++
|
||||
}
|
||||
|
||||
callback()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer} line
|
||||
* @param {EventSourceStreamEvent} event
|
||||
*/
|
||||
parseLine (line, event) {
|
||||
// If the line is empty (a blank line)
|
||||
// Dispatch the event, as defined below.
|
||||
// This will be handled in the _transform method
|
||||
if (line.length === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
// If the line starts with a U+003A COLON character (:)
|
||||
// Ignore the line.
|
||||
const colonPosition = line.indexOf(COLON)
|
||||
if (colonPosition === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
let field = ''
|
||||
let value = ''
|
||||
|
||||
// If the line contains a U+003A COLON character (:)
|
||||
if (colonPosition !== -1) {
|
||||
// Collect the characters on the line before the first U+003A COLON
|
||||
// character (:), and let field be that string.
|
||||
// TODO: Investigate if there is a more performant way to extract the
|
||||
// field
|
||||
// see: https://github.com/nodejs/undici/issues/2630
|
||||
field = line.subarray(0, colonPosition).toString('utf8')
|
||||
|
||||
// Collect the characters on the line after the first U+003A COLON
|
||||
// character (:), and let value be that string.
|
||||
// If value starts with a U+0020 SPACE character, remove it from value.
|
||||
let valueStart = colonPosition + 1
|
||||
if (line[valueStart] === SPACE) {
|
||||
++valueStart
|
||||
}
|
||||
// TODO: Investigate if there is a more performant way to extract the
|
||||
// value
|
||||
// see: https://github.com/nodejs/undici/issues/2630
|
||||
value = line.subarray(valueStart).toString('utf8')
|
||||
|
||||
// Otherwise, the string is not empty but does not contain a U+003A COLON
|
||||
// character (:)
|
||||
} else {
|
||||
// Process the field using the steps described below, using the whole
|
||||
// line as the field name, and the empty string as the field value.
|
||||
field = line.toString('utf8')
|
||||
value = ''
|
||||
}
|
||||
|
||||
// Modify the event with the field name and value. The value is also
|
||||
// decoded as UTF-8
|
||||
switch (field) {
|
||||
case 'data':
|
||||
if (event[field] === undefined) {
|
||||
event[field] = value
|
||||
} else {
|
||||
event[field] += `\n${value}`
|
||||
}
|
||||
break
|
||||
case 'retry':
|
||||
if (isASCIINumber(value)) {
|
||||
event[field] = value
|
||||
}
|
||||
break
|
||||
case 'id':
|
||||
if (isValidLastEventId(value)) {
|
||||
event[field] = value
|
||||
}
|
||||
break
|
||||
case 'event':
|
||||
if (value.length > 0) {
|
||||
event[field] = value
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {EventSourceStreamEvent} event
|
||||
*/
|
||||
processEvent (event) {
|
||||
if (event.retry && isASCIINumber(event.retry)) {
|
||||
this.state.reconnectionTime = parseInt(event.retry, 10)
|
||||
}
|
||||
|
||||
if (event.id && isValidLastEventId(event.id)) {
|
||||
this.state.lastEventId = event.id
|
||||
}
|
||||
|
||||
// only dispatch event, when data is provided
|
||||
if (event.data !== undefined) {
|
||||
this.push({
|
||||
type: event.event || 'message',
|
||||
options: {
|
||||
data: event.data,
|
||||
lastEventId: this.state.lastEventId,
|
||||
origin: this.state.origin
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
clearEvent () {
|
||||
this.event = {
|
||||
data: undefined,
|
||||
event: undefined,
|
||||
id: undefined,
|
||||
retry: undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
EventSourceStream
|
||||
}
|
484
node_modules/undici/lib/web/eventsource/eventsource.js
generated
vendored
Normal file
484
node_modules/undici/lib/web/eventsource/eventsource.js
generated
vendored
Normal file
@ -0,0 +1,484 @@
|
||||
'use strict'
|
||||
|
||||
const { pipeline } = require('node:stream')
|
||||
const { fetching } = require('../fetch')
|
||||
const { makeRequest } = require('../fetch/request')
|
||||
const { webidl } = require('../fetch/webidl')
|
||||
const { EventSourceStream } = require('./eventsource-stream')
|
||||
const { parseMIMEType } = require('../fetch/data-url')
|
||||
const { createFastMessageEvent } = require('../websocket/events')
|
||||
const { isNetworkError } = require('../fetch/response')
|
||||
const { delay } = require('./util')
|
||||
const { kEnumerableProperty } = require('../../core/util')
|
||||
const { environmentSettingsObject } = require('../fetch/util')
|
||||
|
||||
let experimentalWarned = false
|
||||
|
||||
/**
|
||||
* A reconnection time, in milliseconds. This must initially be an implementation-defined value,
|
||||
* probably in the region of a few seconds.
|
||||
*
|
||||
* In Comparison:
|
||||
* - Chrome uses 3000ms.
|
||||
* - Deno uses 5000ms.
|
||||
*
|
||||
* @type {3000}
|
||||
*/
|
||||
const defaultReconnectionTime = 3000
|
||||
|
||||
/**
|
||||
* The readyState attribute represents the state of the connection.
|
||||
* @typedef ReadyState
|
||||
* @type {0|1|2}
|
||||
* @readonly
|
||||
* @see https://html.spec.whatwg.org/multipage/server-sent-events.html#dom-eventsource-readystate-dev
|
||||
*/
|
||||
|
||||
/**
|
||||
* The connection has not yet been established, or it was closed and the user
|
||||
* agent is reconnecting.
|
||||
* @type {0}
|
||||
*/
|
||||
const CONNECTING = 0
|
||||
|
||||
/**
|
||||
* The user agent has an open connection and is dispatching events as it
|
||||
* receives them.
|
||||
* @type {1}
|
||||
*/
|
||||
const OPEN = 1
|
||||
|
||||
/**
|
||||
* The connection is not open, and the user agent is not trying to reconnect.
|
||||
* @type {2}
|
||||
*/
|
||||
const CLOSED = 2
|
||||
|
||||
/**
|
||||
* Requests for the element will have their mode set to "cors" and their credentials mode set to "same-origin".
|
||||
* @type {'anonymous'}
|
||||
*/
|
||||
const ANONYMOUS = 'anonymous'
|
||||
|
||||
/**
|
||||
* Requests for the element will have their mode set to "cors" and their credentials mode set to "include".
|
||||
* @type {'use-credentials'}
|
||||
*/
|
||||
const USE_CREDENTIALS = 'use-credentials'
|
||||
|
||||
/**
|
||||
* The EventSource interface is used to receive server-sent events. It
|
||||
* connects to a server over HTTP and receives events in text/event-stream
|
||||
* format without closing the connection.
|
||||
* @extends {EventTarget}
|
||||
* @see https://html.spec.whatwg.org/multipage/server-sent-events.html#server-sent-events
|
||||
* @api public
|
||||
*/
|
||||
class EventSource extends EventTarget {
|
||||
#events = {
|
||||
open: null,
|
||||
error: null,
|
||||
message: null
|
||||
}
|
||||
|
||||
#url
|
||||
#withCredentials = false
|
||||
|
||||
/**
|
||||
* @type {ReadyState}
|
||||
*/
|
||||
#readyState = CONNECTING
|
||||
|
||||
#request = null
|
||||
#controller = null
|
||||
|
||||
#dispatcher
|
||||
|
||||
/**
|
||||
* @type {import('./eventsource-stream').eventSourceSettings}
|
||||
*/
|
||||
#state
|
||||
|
||||
/**
|
||||
* Creates a new EventSource object.
|
||||
* @param {string} url
|
||||
* @param {EventSourceInit} [eventSourceInitDict={}]
|
||||
* @see https://html.spec.whatwg.org/multipage/server-sent-events.html#the-eventsource-interface
|
||||
*/
|
||||
constructor (url, eventSourceInitDict = {}) {
|
||||
// 1. Let ev be a new EventSource object.
|
||||
super()
|
||||
|
||||
webidl.util.markAsUncloneable(this)
|
||||
|
||||
const prefix = 'EventSource constructor'
|
||||
webidl.argumentLengthCheck(arguments, 1, prefix)
|
||||
|
||||
if (!experimentalWarned) {
|
||||
experimentalWarned = true
|
||||
process.emitWarning('EventSource is experimental, expect them to change at any time.', {
|
||||
code: 'UNDICI-ES'
|
||||
})
|
||||
}
|
||||
|
||||
url = webidl.converters.USVString(url)
|
||||
eventSourceInitDict = webidl.converters.EventSourceInitDict(eventSourceInitDict, prefix, 'eventSourceInitDict')
|
||||
|
||||
this.#dispatcher = eventSourceInitDict.dispatcher
|
||||
this.#state = {
|
||||
lastEventId: '',
|
||||
reconnectionTime: defaultReconnectionTime
|
||||
}
|
||||
|
||||
// 2. Let settings be ev's relevant settings object.
|
||||
// https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object
|
||||
const settings = environmentSettingsObject
|
||||
|
||||
let urlRecord
|
||||
|
||||
try {
|
||||
// 3. Let urlRecord be the result of encoding-parsing a URL given url, relative to settings.
|
||||
urlRecord = new URL(url, settings.settingsObject.baseUrl)
|
||||
this.#state.origin = urlRecord.origin
|
||||
} catch (e) {
|
||||
// 4. If urlRecord is failure, then throw a "SyntaxError" DOMException.
|
||||
throw new DOMException(e, 'SyntaxError')
|
||||
}
|
||||
|
||||
// 5. Set ev's url to urlRecord.
|
||||
this.#url = urlRecord.href
|
||||
|
||||
// 6. Let corsAttributeState be Anonymous.
|
||||
let corsAttributeState = ANONYMOUS
|
||||
|
||||
// 7. If the value of eventSourceInitDict's withCredentials member is true,
|
||||
// then set corsAttributeState to Use Credentials and set ev's
|
||||
// withCredentials attribute to true.
|
||||
if (eventSourceInitDict.withCredentials === true) {
|
||||
corsAttributeState = USE_CREDENTIALS
|
||||
this.#withCredentials = true
|
||||
}
|
||||
|
||||
// 8. Let request be the result of creating a potential-CORS request given
|
||||
// urlRecord, the empty string, and corsAttributeState.
|
||||
const initRequest = {
|
||||
redirect: 'follow',
|
||||
keepalive: true,
|
||||
// @see https://html.spec.whatwg.org/multipage/urls-and-fetching.html#cors-settings-attributes
|
||||
mode: 'cors',
|
||||
credentials: corsAttributeState === 'anonymous'
|
||||
? 'same-origin'
|
||||
: 'omit',
|
||||
referrer: 'no-referrer'
|
||||
}
|
||||
|
||||
// 9. Set request's client to settings.
|
||||
initRequest.client = environmentSettingsObject.settingsObject
|
||||
|
||||
// 10. User agents may set (`Accept`, `text/event-stream`) in request's header list.
|
||||
initRequest.headersList = [['accept', { name: 'accept', value: 'text/event-stream' }]]
|
||||
|
||||
// 11. Set request's cache mode to "no-store".
|
||||
initRequest.cache = 'no-store'
|
||||
|
||||
// 12. Set request's initiator type to "other".
|
||||
initRequest.initiator = 'other'
|
||||
|
||||
initRequest.urlList = [new URL(this.#url)]
|
||||
|
||||
// 13. Set ev's request to request.
|
||||
this.#request = makeRequest(initRequest)
|
||||
|
||||
this.#connect()
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the state of this EventSource object's connection. It can have the
|
||||
* values described below.
|
||||
* @returns {ReadyState}
|
||||
* @readonly
|
||||
*/
|
||||
get readyState () {
|
||||
return this.#readyState
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the URL providing the event stream.
|
||||
* @readonly
|
||||
* @returns {string}
|
||||
*/
|
||||
get url () {
|
||||
return this.#url
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a boolean indicating whether the EventSource object was
|
||||
* instantiated with CORS credentials set (true), or not (false, the default).
|
||||
*/
|
||||
get withCredentials () {
|
||||
return this.#withCredentials
|
||||
}
|
||||
|
||||
#connect () {
|
||||
if (this.#readyState === CLOSED) return
|
||||
|
||||
this.#readyState = CONNECTING
|
||||
|
||||
const fetchParams = {
|
||||
request: this.#request,
|
||||
dispatcher: this.#dispatcher
|
||||
}
|
||||
|
||||
// 14. Let processEventSourceEndOfBody given response res be the following step: if res is not a network error, then reestablish the connection.
|
||||
const processEventSourceEndOfBody = (response) => {
|
||||
if (isNetworkError(response)) {
|
||||
this.dispatchEvent(new Event('error'))
|
||||
this.close()
|
||||
}
|
||||
|
||||
this.#reconnect()
|
||||
}
|
||||
|
||||
// 15. Fetch request, with processResponseEndOfBody set to processEventSourceEndOfBody...
|
||||
fetchParams.processResponseEndOfBody = processEventSourceEndOfBody
|
||||
|
||||
// and processResponse set to the following steps given response res:
|
||||
fetchParams.processResponse = (response) => {
|
||||
// 1. If res is an aborted network error, then fail the connection.
|
||||
|
||||
if (isNetworkError(response)) {
|
||||
// 1. When a user agent is to fail the connection, the user agent
|
||||
// must queue a task which, if the readyState attribute is set to a
|
||||
// value other than CLOSED, sets the readyState attribute to CLOSED
|
||||
// and fires an event named error at the EventSource object. Once the
|
||||
// user agent has failed the connection, it does not attempt to
|
||||
// reconnect.
|
||||
if (response.aborted) {
|
||||
this.close()
|
||||
this.dispatchEvent(new Event('error'))
|
||||
return
|
||||
// 2. Otherwise, if res is a network error, then reestablish the
|
||||
// connection, unless the user agent knows that to be futile, in
|
||||
// which case the user agent may fail the connection.
|
||||
} else {
|
||||
this.#reconnect()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Otherwise, if res's status is not 200, or if res's `Content-Type`
|
||||
// is not `text/event-stream`, then fail the connection.
|
||||
const contentType = response.headersList.get('content-type', true)
|
||||
const mimeType = contentType !== null ? parseMIMEType(contentType) : 'failure'
|
||||
const contentTypeValid = mimeType !== 'failure' && mimeType.essence === 'text/event-stream'
|
||||
if (
|
||||
response.status !== 200 ||
|
||||
contentTypeValid === false
|
||||
) {
|
||||
this.close()
|
||||
this.dispatchEvent(new Event('error'))
|
||||
return
|
||||
}
|
||||
|
||||
// 4. Otherwise, announce the connection and interpret res's body
|
||||
// line by line.
|
||||
|
||||
// When a user agent is to announce the connection, the user agent
|
||||
// must queue a task which, if the readyState attribute is set to a
|
||||
// value other than CLOSED, sets the readyState attribute to OPEN
|
||||
// and fires an event named open at the EventSource object.
|
||||
// @see https://html.spec.whatwg.org/multipage/server-sent-events.html#sse-processing-model
|
||||
this.#readyState = OPEN
|
||||
this.dispatchEvent(new Event('open'))
|
||||
|
||||
// If redirected to a different origin, set the origin to the new origin.
|
||||
this.#state.origin = response.urlList[response.urlList.length - 1].origin
|
||||
|
||||
const eventSourceStream = new EventSourceStream({
|
||||
eventSourceSettings: this.#state,
|
||||
push: (event) => {
|
||||
this.dispatchEvent(createFastMessageEvent(
|
||||
event.type,
|
||||
event.options
|
||||
))
|
||||
}
|
||||
})
|
||||
|
||||
pipeline(response.body.stream,
|
||||
eventSourceStream,
|
||||
(error) => {
|
||||
if (
|
||||
error?.aborted === false
|
||||
) {
|
||||
this.close()
|
||||
this.dispatchEvent(new Event('error'))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
this.#controller = fetching(fetchParams)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://html.spec.whatwg.org/multipage/server-sent-events.html#sse-processing-model
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async #reconnect () {
|
||||
// When a user agent is to reestablish the connection, the user agent must
|
||||
// run the following steps. These steps are run in parallel, not as part of
|
||||
// a task. (The tasks that it queues, of course, are run like normal tasks
|
||||
// and not themselves in parallel.)
|
||||
|
||||
// 1. Queue a task to run the following steps:
|
||||
|
||||
// 1. If the readyState attribute is set to CLOSED, abort the task.
|
||||
if (this.#readyState === CLOSED) return
|
||||
|
||||
// 2. Set the readyState attribute to CONNECTING.
|
||||
this.#readyState = CONNECTING
|
||||
|
||||
// 3. Fire an event named error at the EventSource object.
|
||||
this.dispatchEvent(new Event('error'))
|
||||
|
||||
// 2. Wait a delay equal to the reconnection time of the event source.
|
||||
await delay(this.#state.reconnectionTime)
|
||||
|
||||
// 5. Queue a task to run the following steps:
|
||||
|
||||
// 1. If the EventSource object's readyState attribute is not set to
|
||||
// CONNECTING, then return.
|
||||
if (this.#readyState !== CONNECTING) return
|
||||
|
||||
// 2. Let request be the EventSource object's request.
|
||||
// 3. If the EventSource object's last event ID string is not the empty
|
||||
// string, then:
|
||||
// 1. Let lastEventIDValue be the EventSource object's last event ID
|
||||
// string, encoded as UTF-8.
|
||||
// 2. Set (`Last-Event-ID`, lastEventIDValue) in request's header
|
||||
// list.
|
||||
if (this.#state.lastEventId.length) {
|
||||
this.#request.headersList.set('last-event-id', this.#state.lastEventId, true)
|
||||
}
|
||||
|
||||
// 4. Fetch request and process the response obtained in this fashion, if any, as described earlier in this section.
|
||||
this.#connect()
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the connection, if any, and sets the readyState attribute to
|
||||
* CLOSED.
|
||||
*/
|
||||
close () {
|
||||
webidl.brandCheck(this, EventSource)
|
||||
|
||||
if (this.#readyState === CLOSED) return
|
||||
this.#readyState = CLOSED
|
||||
this.#controller.abort()
|
||||
this.#request = null
|
||||
}
|
||||
|
||||
get onopen () {
|
||||
return this.#events.open
|
||||
}
|
||||
|
||||
set onopen (fn) {
|
||||
if (this.#events.open) {
|
||||
this.removeEventListener('open', this.#events.open)
|
||||
}
|
||||
|
||||
if (typeof fn === 'function') {
|
||||
this.#events.open = fn
|
||||
this.addEventListener('open', fn)
|
||||
} else {
|
||||
this.#events.open = null
|
||||
}
|
||||
}
|
||||
|
||||
get onmessage () {
|
||||
return this.#events.message
|
||||
}
|
||||
|
||||
set onmessage (fn) {
|
||||
if (this.#events.message) {
|
||||
this.removeEventListener('message', this.#events.message)
|
||||
}
|
||||
|
||||
if (typeof fn === 'function') {
|
||||
this.#events.message = fn
|
||||
this.addEventListener('message', fn)
|
||||
} else {
|
||||
this.#events.message = null
|
||||
}
|
||||
}
|
||||
|
||||
get onerror () {
|
||||
return this.#events.error
|
||||
}
|
||||
|
||||
set onerror (fn) {
|
||||
if (this.#events.error) {
|
||||
this.removeEventListener('error', this.#events.error)
|
||||
}
|
||||
|
||||
if (typeof fn === 'function') {
|
||||
this.#events.error = fn
|
||||
this.addEventListener('error', fn)
|
||||
} else {
|
||||
this.#events.error = null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const constantsPropertyDescriptors = {
|
||||
CONNECTING: {
|
||||
__proto__: null,
|
||||
configurable: false,
|
||||
enumerable: true,
|
||||
value: CONNECTING,
|
||||
writable: false
|
||||
},
|
||||
OPEN: {
|
||||
__proto__: null,
|
||||
configurable: false,
|
||||
enumerable: true,
|
||||
value: OPEN,
|
||||
writable: false
|
||||
},
|
||||
CLOSED: {
|
||||
__proto__: null,
|
||||
configurable: false,
|
||||
enumerable: true,
|
||||
value: CLOSED,
|
||||
writable: false
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperties(EventSource, constantsPropertyDescriptors)
|
||||
Object.defineProperties(EventSource.prototype, constantsPropertyDescriptors)
|
||||
|
||||
Object.defineProperties(EventSource.prototype, {
|
||||
close: kEnumerableProperty,
|
||||
onerror: kEnumerableProperty,
|
||||
onmessage: kEnumerableProperty,
|
||||
onopen: kEnumerableProperty,
|
||||
readyState: kEnumerableProperty,
|
||||
url: kEnumerableProperty,
|
||||
withCredentials: kEnumerableProperty
|
||||
})
|
||||
|
||||
webidl.converters.EventSourceInitDict = webidl.dictionaryConverter([
|
||||
{
|
||||
key: 'withCredentials',
|
||||
converter: webidl.converters.boolean,
|
||||
defaultValue: () => false
|
||||
},
|
||||
{
|
||||
key: 'dispatcher', // undici only
|
||||
converter: webidl.converters.any
|
||||
}
|
||||
])
|
||||
|
||||
module.exports = {
|
||||
EventSource,
|
||||
defaultReconnectionTime
|
||||
}
|
37
node_modules/undici/lib/web/eventsource/util.js
generated
vendored
Normal file
37
node_modules/undici/lib/web/eventsource/util.js
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Checks if the given value is a valid LastEventId.
|
||||
* @param {string} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isValidLastEventId (value) {
|
||||
// LastEventId should not contain U+0000 NULL
|
||||
return value.indexOf('\u0000') === -1
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the given value is a base 10 digit.
|
||||
* @param {string} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isASCIINumber (value) {
|
||||
if (value.length === 0) return false
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
if (value.charCodeAt(i) < 0x30 || value.charCodeAt(i) > 0x39) return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// https://github.com/nodejs/undici/issues/2664
|
||||
function delay (ms) {
|
||||
return new Promise((resolve) => {
|
||||
setTimeout(resolve, ms).unref()
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
isValidLastEventId,
|
||||
isASCIINumber,
|
||||
delay
|
||||
}
|
391
node_modules/undici/lib/fetch/body.js → node_modules/undici/lib/web/fetch/body.js
generated
vendored
391
node_modules/undici/lib/fetch/body.js → node_modules/undici/lib/web/fetch/body.js
generated
vendored
@ -1,75 +1,78 @@
|
||||
'use strict'
|
||||
|
||||
const Busboy = require('@fastify/busboy')
|
||||
const util = require('../core/util')
|
||||
const util = require('../../core/util')
|
||||
const {
|
||||
ReadableStreamFrom,
|
||||
isBlobLike,
|
||||
isReadableStreamLike,
|
||||
readableStreamClose,
|
||||
createDeferredPromise,
|
||||
fullyReadBody
|
||||
fullyReadBody,
|
||||
extractMimeType,
|
||||
utf8DecodeBytes
|
||||
} = require('./util')
|
||||
const { FormData } = require('./formdata')
|
||||
const { kState } = require('./symbols')
|
||||
const { FormData, setFormDataState } = require('./formdata')
|
||||
const { webidl } = require('./webidl')
|
||||
const { DOMException, structuredClone } = require('./constants')
|
||||
const { Blob, File: NativeFile } = require('buffer')
|
||||
const { kBodyUsed } = require('../core/symbols')
|
||||
const assert = require('assert')
|
||||
const { isErrored } = require('../core/util')
|
||||
const { isUint8Array, isArrayBuffer } = require('util/types')
|
||||
const { File: UndiciFile } = require('./file')
|
||||
const { parseMIMEType, serializeAMimeType } = require('./dataURL')
|
||||
|
||||
const { Blob } = require('node:buffer')
|
||||
const assert = require('node:assert')
|
||||
const { isErrored, isDisturbed } = require('node:stream')
|
||||
const { isArrayBuffer } = require('node:util/types')
|
||||
const { serializeAMimeType } = require('./data-url')
|
||||
const { multipartFormDataParser } = require('./formdata-parser')
|
||||
let random
|
||||
|
||||
try {
|
||||
const crypto = require('node:crypto')
|
||||
random = (max) => crypto.randomInt(0, max)
|
||||
} catch {
|
||||
random = (max) => Math.floor(Math.random(max))
|
||||
random = (max) => Math.floor(Math.random() * max)
|
||||
}
|
||||
|
||||
let ReadableStream = globalThis.ReadableStream
|
||||
|
||||
/** @type {globalThis['File']} */
|
||||
const File = NativeFile ?? UndiciFile
|
||||
const textEncoder = new TextEncoder()
|
||||
const textDecoder = new TextDecoder()
|
||||
function noop () {}
|
||||
|
||||
const hasFinalizationRegistry = globalThis.FinalizationRegistry && process.version.indexOf('v18') !== 0
|
||||
let streamRegistry
|
||||
|
||||
if (hasFinalizationRegistry) {
|
||||
streamRegistry = new FinalizationRegistry((weakRef) => {
|
||||
const stream = weakRef.deref()
|
||||
if (stream && !stream.locked && !isDisturbed(stream) && !isErrored(stream)) {
|
||||
stream.cancel('Response object has been garbage collected').catch(noop)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#concept-bodyinit-extract
|
||||
function extractBody (object, keepalive = false) {
|
||||
if (!ReadableStream) {
|
||||
ReadableStream = require('stream/web').ReadableStream
|
||||
}
|
||||
|
||||
// 1. Let stream be null.
|
||||
let stream = null
|
||||
|
||||
// 2. If object is a ReadableStream object, then set stream to object.
|
||||
if (object instanceof ReadableStream) {
|
||||
if (webidl.is.ReadableStream(object)) {
|
||||
stream = object
|
||||
} else if (isBlobLike(object)) {
|
||||
} else if (webidl.is.Blob(object)) {
|
||||
// 3. Otherwise, if object is a Blob object, set stream to the
|
||||
// result of running object’s get stream.
|
||||
stream = object.stream()
|
||||
} else {
|
||||
// 4. Otherwise, set stream to a new ReadableStream object, and set
|
||||
// up stream.
|
||||
// up stream with byte reading support.
|
||||
stream = new ReadableStream({
|
||||
async pull (controller) {
|
||||
controller.enqueue(
|
||||
typeof source === 'string' ? textEncoder.encode(source) : source
|
||||
)
|
||||
const buffer = typeof source === 'string' ? textEncoder.encode(source) : source
|
||||
|
||||
if (buffer.byteLength) {
|
||||
controller.enqueue(buffer)
|
||||
}
|
||||
|
||||
queueMicrotask(() => readableStreamClose(controller))
|
||||
},
|
||||
start () {},
|
||||
type: undefined
|
||||
type: 'bytes'
|
||||
})
|
||||
}
|
||||
|
||||
// 5. Assert: stream is a ReadableStream object.
|
||||
assert(isReadableStreamLike(stream))
|
||||
assert(webidl.is.ReadableStream(stream))
|
||||
|
||||
// 6. Let action be null.
|
||||
let action = null
|
||||
@ -91,7 +94,7 @@ function extractBody (object, keepalive = false) {
|
||||
|
||||
// Set type to `text/plain;charset=UTF-8`.
|
||||
type = 'text/plain;charset=UTF-8'
|
||||
} else if (object instanceof URLSearchParams) {
|
||||
} else if (webidl.is.URLSearchParams(object)) {
|
||||
// URLSearchParams
|
||||
|
||||
// spec says to run application/x-www-form-urlencoded on body.list
|
||||
@ -114,7 +117,7 @@ function extractBody (object, keepalive = false) {
|
||||
|
||||
// Set source to a copy of the bytes held by object.
|
||||
source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength))
|
||||
} else if (util.isFormDataLike(object)) {
|
||||
} else if (webidl.is.FormData(object)) {
|
||||
const boundary = `----formdata-undici-0${`${random(1e11)}`.padStart(11, '0')}`
|
||||
const prefix = `--${boundary}\r\nContent-Disposition: form-data`
|
||||
|
||||
@ -156,7 +159,10 @@ function extractBody (object, keepalive = false) {
|
||||
}
|
||||
}
|
||||
|
||||
const chunk = textEncoder.encode(`--${boundary}--`)
|
||||
// CRLF is appended to the body to function with legacy servers and match other implementations.
|
||||
// https://github.com/curl/curl/blob/3434c6b46e682452973972e8313613dfa58cd690/lib/mime.c#L1029-L1030
|
||||
// https://github.com/form-data/form-data/issues/63
|
||||
const chunk = textEncoder.encode(`--${boundary}--\r\n`)
|
||||
blobParts.push(chunk)
|
||||
length += chunk.byteLength
|
||||
if (hasUnknownSizeValue) {
|
||||
@ -179,8 +185,8 @@ function extractBody (object, keepalive = false) {
|
||||
// Set type to `multipart/form-data; boundary=`,
|
||||
// followed by the multipart/form-data boundary string generated
|
||||
// by the multipart/form-data encoding algorithm.
|
||||
type = 'multipart/form-data; boundary=' + boundary
|
||||
} else if (isBlobLike(object)) {
|
||||
type = `multipart/form-data; boundary=${boundary}`
|
||||
} else if (webidl.is.Blob(object)) {
|
||||
// Blob
|
||||
|
||||
// Set source to object.
|
||||
@ -208,7 +214,7 @@ function extractBody (object, keepalive = false) {
|
||||
}
|
||||
|
||||
stream =
|
||||
object instanceof ReadableStream ? object : ReadableStreamFrom(object)
|
||||
webidl.is.ReadableStream(object) ? object : ReadableStreamFrom(object)
|
||||
}
|
||||
|
||||
// 11. If source is a byte sequence, then set action to a
|
||||
@ -231,13 +237,17 @@ function extractBody (object, keepalive = false) {
|
||||
// When running action is done, close stream.
|
||||
queueMicrotask(() => {
|
||||
controller.close()
|
||||
controller.byobRequest?.respond(0)
|
||||
})
|
||||
} else {
|
||||
// Whenever one or more bytes are available and stream is not errored,
|
||||
// enqueue a Uint8Array wrapping an ArrayBuffer containing the available
|
||||
// bytes into stream.
|
||||
if (!isErrored(stream)) {
|
||||
controller.enqueue(new Uint8Array(value))
|
||||
const buffer = new Uint8Array(value)
|
||||
if (buffer.byteLength) {
|
||||
controller.enqueue(buffer)
|
||||
}
|
||||
}
|
||||
}
|
||||
return controller.desiredSize > 0
|
||||
@ -245,7 +255,7 @@ function extractBody (object, keepalive = false) {
|
||||
async cancel (reason) {
|
||||
await iterator.return()
|
||||
},
|
||||
type: undefined
|
||||
type: 'bytes'
|
||||
})
|
||||
}
|
||||
|
||||
@ -259,16 +269,11 @@ function extractBody (object, keepalive = false) {
|
||||
|
||||
// https://fetch.spec.whatwg.org/#bodyinit-safely-extract
|
||||
function safelyExtractBody (object, keepalive = false) {
|
||||
if (!ReadableStream) {
|
||||
// istanbul ignore next
|
||||
ReadableStream = require('stream/web').ReadableStream
|
||||
}
|
||||
|
||||
// To safely extract a body and a `Content-Type` value from
|
||||
// a byte sequence or BodyInit object object, run these steps:
|
||||
|
||||
// 1. If object is a ReadableStream object, then:
|
||||
if (object instanceof ReadableStream) {
|
||||
if (webidl.is.ReadableStream(object)) {
|
||||
// Assert: object is neither disturbed nor locked.
|
||||
// istanbul ignore next
|
||||
assert(!util.isDisturbed(object), 'The body has already been consumed.')
|
||||
@ -280,59 +285,36 @@ function safelyExtractBody (object, keepalive = false) {
|
||||
return extractBody(object, keepalive)
|
||||
}
|
||||
|
||||
function cloneBody (body) {
|
||||
function cloneBody (instance, body) {
|
||||
// To clone a body body, run these steps:
|
||||
|
||||
// https://fetch.spec.whatwg.org/#concept-body-clone
|
||||
|
||||
// 1. Let « out1, out2 » be the result of teeing body’s stream.
|
||||
const [out1, out2] = body.stream.tee()
|
||||
const out2Clone = structuredClone(out2, { transfer: [out2] })
|
||||
// This, for whatever reasons, unrefs out2Clone which allows
|
||||
// the process to exit by itself.
|
||||
const [, finalClone] = out2Clone.tee()
|
||||
|
||||
if (hasFinalizationRegistry) {
|
||||
streamRegistry.register(instance, new WeakRef(out1))
|
||||
}
|
||||
|
||||
// 2. Set body’s stream to out1.
|
||||
body.stream = out1
|
||||
|
||||
// 3. Return a body whose stream is out2 and other members are copied from body.
|
||||
return {
|
||||
stream: finalClone,
|
||||
stream: out2,
|
||||
length: body.length,
|
||||
source: body.source
|
||||
}
|
||||
}
|
||||
|
||||
async function * consumeBody (body) {
|
||||
if (body) {
|
||||
if (isUint8Array(body)) {
|
||||
yield body
|
||||
} else {
|
||||
const stream = body.stream
|
||||
|
||||
if (util.isDisturbed(stream)) {
|
||||
throw new TypeError('The body has already been consumed.')
|
||||
}
|
||||
|
||||
if (stream.locked) {
|
||||
throw new TypeError('The stream is locked.')
|
||||
}
|
||||
|
||||
// Compat.
|
||||
stream[kBodyUsed] = true
|
||||
|
||||
yield * stream
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function throwIfAborted (state) {
|
||||
if (state.aborted) {
|
||||
throw new DOMException('The operation was aborted.', 'AbortError')
|
||||
}
|
||||
}
|
||||
|
||||
function bodyMixinMethods (instance) {
|
||||
function bodyMixinMethods (instance, getInternalState) {
|
||||
const methods = {
|
||||
blob () {
|
||||
// The blob() method steps are to return the result of
|
||||
@ -340,10 +322,10 @@ function bodyMixinMethods (instance) {
|
||||
// given a byte sequence bytes: return a Blob whose
|
||||
// contents are bytes and whose type attribute is this’s
|
||||
// MIME type.
|
||||
return specConsumeBody(this, (bytes) => {
|
||||
let mimeType = bodyMimeType(this)
|
||||
return consumeBody(this, (bytes) => {
|
||||
let mimeType = bodyMimeType(getInternalState(this))
|
||||
|
||||
if (mimeType === 'failure') {
|
||||
if (mimeType === null) {
|
||||
mimeType = ''
|
||||
} else if (mimeType) {
|
||||
mimeType = serializeAMimeType(mimeType)
|
||||
@ -352,7 +334,7 @@ function bodyMixinMethods (instance) {
|
||||
// Return a Blob whose contents are bytes and type attribute
|
||||
// is mimeType.
|
||||
return new Blob([bytes], { type: mimeType })
|
||||
}, instance)
|
||||
}, instance, getInternalState)
|
||||
},
|
||||
|
||||
arrayBuffer () {
|
||||
@ -360,161 +342,108 @@ function bodyMixinMethods (instance) {
|
||||
// of running consume body with this and the following step
|
||||
// given a byte sequence bytes: return a new ArrayBuffer
|
||||
// whose contents are bytes.
|
||||
return specConsumeBody(this, (bytes) => {
|
||||
return consumeBody(this, (bytes) => {
|
||||
return new Uint8Array(bytes).buffer
|
||||
}, instance)
|
||||
}, instance, getInternalState)
|
||||
},
|
||||
|
||||
text () {
|
||||
// The text() method steps are to return the result of running
|
||||
// consume body with this and UTF-8 decode.
|
||||
return specConsumeBody(this, utf8DecodeBytes, instance)
|
||||
return consumeBody(this, utf8DecodeBytes, instance, getInternalState)
|
||||
},
|
||||
|
||||
json () {
|
||||
// The json() method steps are to return the result of running
|
||||
// consume body with this and parse JSON from bytes.
|
||||
return specConsumeBody(this, parseJSONFromBytes, instance)
|
||||
return consumeBody(this, parseJSONFromBytes, instance, getInternalState)
|
||||
},
|
||||
|
||||
async formData () {
|
||||
webidl.brandCheck(this, instance)
|
||||
formData () {
|
||||
// The formData() method steps are to return the result of running
|
||||
// consume body with this and the following step given a byte sequence bytes:
|
||||
return consumeBody(this, (value) => {
|
||||
// 1. Let mimeType be the result of get the MIME type with this.
|
||||
const mimeType = bodyMimeType(getInternalState(this))
|
||||
|
||||
throwIfAborted(this[kState])
|
||||
// 2. If mimeType is non-null, then switch on mimeType’s essence and run
|
||||
// the corresponding steps:
|
||||
if (mimeType !== null) {
|
||||
switch (mimeType.essence) {
|
||||
case 'multipart/form-data': {
|
||||
// 1. ... [long step]
|
||||
// 2. If that fails for some reason, then throw a TypeError.
|
||||
const parsed = multipartFormDataParser(value, mimeType)
|
||||
|
||||
const contentType = this.headers.get('Content-Type')
|
||||
// 3. Return a new FormData object, appending each entry,
|
||||
// resulting from the parsing operation, to its entry list.
|
||||
const fd = new FormData()
|
||||
setFormDataState(fd, parsed)
|
||||
|
||||
// If mimeType’s essence is "multipart/form-data", then:
|
||||
if (/multipart\/form-data/.test(contentType)) {
|
||||
const headers = {}
|
||||
for (const [key, value] of this.headers) headers[key.toLowerCase()] = value
|
||||
|
||||
const responseFormData = new FormData()
|
||||
|
||||
let busboy
|
||||
|
||||
try {
|
||||
busboy = new Busboy({
|
||||
headers,
|
||||
preservePath: true
|
||||
})
|
||||
} catch (err) {
|
||||
throw new DOMException(`${err}`, 'AbortError')
|
||||
}
|
||||
|
||||
busboy.on('field', (name, value) => {
|
||||
responseFormData.append(name, value)
|
||||
})
|
||||
busboy.on('file', (name, value, filename, encoding, mimeType) => {
|
||||
const chunks = []
|
||||
|
||||
if (encoding === 'base64' || encoding.toLowerCase() === 'base64') {
|
||||
let base64chunk = ''
|
||||
|
||||
value.on('data', (chunk) => {
|
||||
base64chunk += chunk.toString().replace(/[\r\n]/gm, '')
|
||||
|
||||
const end = base64chunk.length - base64chunk.length % 4
|
||||
chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64'))
|
||||
|
||||
base64chunk = base64chunk.slice(end)
|
||||
})
|
||||
value.on('end', () => {
|
||||
chunks.push(Buffer.from(base64chunk, 'base64'))
|
||||
responseFormData.append(name, new File(chunks, filename, { type: mimeType }))
|
||||
})
|
||||
} else {
|
||||
value.on('data', (chunk) => {
|
||||
chunks.push(chunk)
|
||||
})
|
||||
value.on('end', () => {
|
||||
responseFormData.append(name, new File(chunks, filename, { type: mimeType }))
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
const busboyResolve = new Promise((resolve, reject) => {
|
||||
busboy.on('finish', resolve)
|
||||
busboy.on('error', (err) => reject(new TypeError(err)))
|
||||
})
|
||||
|
||||
if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk)
|
||||
busboy.end()
|
||||
await busboyResolve
|
||||
|
||||
return responseFormData
|
||||
} else if (/application\/x-www-form-urlencoded/.test(contentType)) {
|
||||
// Otherwise, if mimeType’s essence is "application/x-www-form-urlencoded", then:
|
||||
|
||||
// 1. Let entries be the result of parsing bytes.
|
||||
let entries
|
||||
try {
|
||||
let text = ''
|
||||
// application/x-www-form-urlencoded parser will keep the BOM.
|
||||
// https://url.spec.whatwg.org/#concept-urlencoded-parser
|
||||
// Note that streaming decoder is stateful and cannot be reused
|
||||
const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true })
|
||||
|
||||
for await (const chunk of consumeBody(this[kState].body)) {
|
||||
if (!isUint8Array(chunk)) {
|
||||
throw new TypeError('Expected Uint8Array chunk')
|
||||
return fd
|
||||
}
|
||||
case 'application/x-www-form-urlencoded': {
|
||||
// 1. Let entries be the result of parsing bytes.
|
||||
const entries = new URLSearchParams(value.toString())
|
||||
|
||||
// 2. If entries is failure, then throw a TypeError.
|
||||
|
||||
// 3. Return a new FormData object whose entry list is entries.
|
||||
const fd = new FormData()
|
||||
|
||||
for (const [name, value] of entries) {
|
||||
fd.append(name, value)
|
||||
}
|
||||
|
||||
return fd
|
||||
}
|
||||
text += streamingDecoder.decode(chunk, { stream: true })
|
||||
}
|
||||
text += streamingDecoder.decode()
|
||||
entries = new URLSearchParams(text)
|
||||
} catch (err) {
|
||||
// istanbul ignore next: Unclear when new URLSearchParams can fail on a string.
|
||||
// 2. If entries is failure, then throw a TypeError.
|
||||
throw Object.assign(new TypeError(), { cause: err })
|
||||
}
|
||||
|
||||
// 3. Return a new FormData object whose entries are entries.
|
||||
const formData = new FormData()
|
||||
for (const [name, value] of entries) {
|
||||
formData.append(name, value)
|
||||
}
|
||||
return formData
|
||||
} else {
|
||||
// Wait a tick before checking if the request has been aborted.
|
||||
// Otherwise, a TypeError can be thrown when an AbortError should.
|
||||
await Promise.resolve()
|
||||
// 3. Throw a TypeError.
|
||||
throw new TypeError(
|
||||
'Content-Type was not one of "multipart/form-data" or "application/x-www-form-urlencoded".'
|
||||
)
|
||||
}, instance, getInternalState)
|
||||
},
|
||||
|
||||
throwIfAborted(this[kState])
|
||||
|
||||
// Otherwise, throw a TypeError.
|
||||
throw webidl.errors.exception({
|
||||
header: `${instance.name}.formData`,
|
||||
message: 'Could not parse content as FormData.'
|
||||
})
|
||||
}
|
||||
bytes () {
|
||||
// The bytes() method steps are to return the result of running consume body
|
||||
// with this and the following step given a byte sequence bytes: return the
|
||||
// result of creating a Uint8Array from bytes in this’s relevant realm.
|
||||
return consumeBody(this, (bytes) => {
|
||||
return new Uint8Array(bytes)
|
||||
}, instance, getInternalState)
|
||||
}
|
||||
}
|
||||
|
||||
return methods
|
||||
}
|
||||
|
||||
function mixinBody (prototype) {
|
||||
Object.assign(prototype.prototype, bodyMixinMethods(prototype))
|
||||
function mixinBody (prototype, getInternalState) {
|
||||
Object.assign(prototype.prototype, bodyMixinMethods(prototype, getInternalState))
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#concept-body-consume-body
|
||||
* @param {Response|Request} object
|
||||
* @param {any} object internal state
|
||||
* @param {(value: unknown) => unknown} convertBytesToJSValue
|
||||
* @param {Response|Request} instance
|
||||
* @param {any} instance
|
||||
* @param {(target: any) => any} getInternalState
|
||||
*/
|
||||
async function specConsumeBody (object, convertBytesToJSValue, instance) {
|
||||
async function consumeBody (object, convertBytesToJSValue, instance, getInternalState) {
|
||||
webidl.brandCheck(object, instance)
|
||||
|
||||
throwIfAborted(object[kState])
|
||||
const state = getInternalState(object)
|
||||
|
||||
// 1. If object is unusable, then return a promise rejected
|
||||
// with a TypeError.
|
||||
if (bodyUnusable(object[kState].body)) {
|
||||
throw new TypeError('Body is unusable')
|
||||
if (bodyUnusable(state)) {
|
||||
throw new TypeError('Body is unusable: Body has already been read')
|
||||
}
|
||||
|
||||
throwIfAborted(state)
|
||||
|
||||
// 2. Let promise be a new promise.
|
||||
const promise = createDeferredPromise()
|
||||
|
||||
@ -535,53 +464,32 @@ async function specConsumeBody (object, convertBytesToJSValue, instance) {
|
||||
|
||||
// 5. If object’s body is null, then run successSteps with an
|
||||
// empty byte sequence.
|
||||
if (object[kState].body == null) {
|
||||
successSteps(new Uint8Array())
|
||||
if (state.body == null) {
|
||||
successSteps(Buffer.allocUnsafe(0))
|
||||
return promise.promise
|
||||
}
|
||||
|
||||
// 6. Otherwise, fully read object’s body given successSteps,
|
||||
// errorSteps, and object’s relevant global object.
|
||||
await fullyReadBody(object[kState].body, successSteps, errorSteps)
|
||||
fullyReadBody(state.body, successSteps, errorSteps)
|
||||
|
||||
// 7. Return promise.
|
||||
return promise.promise
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#body-unusable
|
||||
function bodyUnusable (body) {
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#body-unusable
|
||||
* @param {any} object internal state
|
||||
*/
|
||||
function bodyUnusable (object) {
|
||||
const body = object.body
|
||||
|
||||
// An object including the Body interface mixin is
|
||||
// said to be unusable if its body is non-null and
|
||||
// its body’s stream is disturbed or locked.
|
||||
return body != null && (body.stream.locked || util.isDisturbed(body.stream))
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://encoding.spec.whatwg.org/#utf-8-decode
|
||||
* @param {Buffer} buffer
|
||||
*/
|
||||
function utf8DecodeBytes (buffer) {
|
||||
if (buffer.length === 0) {
|
||||
return ''
|
||||
}
|
||||
|
||||
// 1. Let buffer be the result of peeking three bytes from
|
||||
// ioQueue, converted to a byte sequence.
|
||||
|
||||
// 2. If buffer is 0xEF 0xBB 0xBF, then read three
|
||||
// bytes from ioQueue. (Do nothing with those bytes.)
|
||||
if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) {
|
||||
buffer = buffer.subarray(3)
|
||||
}
|
||||
|
||||
// 3. Process a queue with an instance of UTF-8’s
|
||||
// decoder, ioQueue, output, and "replacement".
|
||||
const output = textDecoder.decode(buffer)
|
||||
|
||||
// 4. Return output.
|
||||
return output
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value
|
||||
* @param {Uint8Array} bytes
|
||||
@ -592,22 +500,33 @@ function parseJSONFromBytes (bytes) {
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#concept-body-mime-type
|
||||
* @param {import('./response').Response|import('./request').Request} object
|
||||
* @param {any} requestOrResponse internal state
|
||||
*/
|
||||
function bodyMimeType (object) {
|
||||
const { headersList } = object[kState]
|
||||
const contentType = headersList.get('content-type')
|
||||
function bodyMimeType (requestOrResponse) {
|
||||
// 1. Let headers be null.
|
||||
// 2. If requestOrResponse is a Request object, then set headers to requestOrResponse’s request’s header list.
|
||||
// 3. Otherwise, set headers to requestOrResponse’s response’s header list.
|
||||
/** @type {import('./headers').HeadersList} */
|
||||
const headers = requestOrResponse.headersList
|
||||
|
||||
if (contentType === null) {
|
||||
return 'failure'
|
||||
// 4. Let mimeType be the result of extracting a MIME type from headers.
|
||||
const mimeType = extractMimeType(headers)
|
||||
|
||||
// 5. If mimeType is failure, then return null.
|
||||
if (mimeType === 'failure') {
|
||||
return null
|
||||
}
|
||||
|
||||
return parseMIMEType(contentType)
|
||||
// 6. Return mimeType.
|
||||
return mimeType
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
extractBody,
|
||||
safelyExtractBody,
|
||||
cloneBody,
|
||||
mixinBody
|
||||
mixinBody,
|
||||
streamRegistry,
|
||||
hasFinalizationRegistry,
|
||||
bodyUnusable
|
||||
}
|
131
node_modules/undici/lib/web/fetch/constants.js
generated
vendored
Normal file
131
node_modules/undici/lib/web/fetch/constants.js
generated
vendored
Normal file
@ -0,0 +1,131 @@
|
||||
'use strict'
|
||||
|
||||
const corsSafeListedMethods = /** @type {const} */ (['GET', 'HEAD', 'POST'])
|
||||
const corsSafeListedMethodsSet = new Set(corsSafeListedMethods)
|
||||
|
||||
const nullBodyStatus = /** @type {const} */ ([101, 204, 205, 304])
|
||||
|
||||
const redirectStatus = /** @type {const} */ ([301, 302, 303, 307, 308])
|
||||
const redirectStatusSet = new Set(redirectStatus)
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#block-bad-port
|
||||
*/
|
||||
const badPorts = /** @type {const} */ ([
|
||||
'1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79',
|
||||
'87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137',
|
||||
'139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532',
|
||||
'540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723',
|
||||
'2049', '3659', '4045', '4190', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6679',
|
||||
'6697', '10080'
|
||||
])
|
||||
const badPortsSet = new Set(badPorts)
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-header
|
||||
*/
|
||||
const referrerPolicyTokens = /** @type {const} */ ([
|
||||
'no-referrer',
|
||||
'no-referrer-when-downgrade',
|
||||
'same-origin',
|
||||
'origin',
|
||||
'strict-origin',
|
||||
'origin-when-cross-origin',
|
||||
'strict-origin-when-cross-origin',
|
||||
'unsafe-url'
|
||||
])
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/webappsec-referrer-policy/#referrer-policies
|
||||
*/
|
||||
const referrerPolicy = /** @type {const} */ ([
|
||||
'',
|
||||
...referrerPolicyTokens
|
||||
])
|
||||
const referrerPolicyTokensSet = new Set(referrerPolicyTokens)
|
||||
|
||||
const requestRedirect = /** @type {const} */ (['follow', 'manual', 'error'])
|
||||
|
||||
const safeMethods = /** @type {const} */ (['GET', 'HEAD', 'OPTIONS', 'TRACE'])
|
||||
const safeMethodsSet = new Set(safeMethods)
|
||||
|
||||
const requestMode = /** @type {const} */ (['navigate', 'same-origin', 'no-cors', 'cors'])
|
||||
|
||||
const requestCredentials = /** @type {const} */ (['omit', 'same-origin', 'include'])
|
||||
|
||||
const requestCache = /** @type {const} */ ([
|
||||
'default',
|
||||
'no-store',
|
||||
'reload',
|
||||
'no-cache',
|
||||
'force-cache',
|
||||
'only-if-cached'
|
||||
])
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#request-body-header-name
|
||||
*/
|
||||
const requestBodyHeader = /** @type {const} */ ([
|
||||
'content-encoding',
|
||||
'content-language',
|
||||
'content-location',
|
||||
'content-type',
|
||||
// See https://github.com/nodejs/undici/issues/2021
|
||||
// 'Content-Length' is a forbidden header name, which is typically
|
||||
// removed in the Headers implementation. However, undici doesn't
|
||||
// filter out headers, so we add it here.
|
||||
'content-length'
|
||||
])
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#enumdef-requestduplex
|
||||
*/
|
||||
const requestDuplex = /** @type {const} */ ([
|
||||
'half'
|
||||
])
|
||||
|
||||
/**
|
||||
* @see http://fetch.spec.whatwg.org/#forbidden-method
|
||||
*/
|
||||
const forbiddenMethods = /** @type {const} */ (['CONNECT', 'TRACE', 'TRACK'])
|
||||
const forbiddenMethodsSet = new Set(forbiddenMethods)
|
||||
|
||||
const subresource = /** @type {const} */ ([
|
||||
'audio',
|
||||
'audioworklet',
|
||||
'font',
|
||||
'image',
|
||||
'manifest',
|
||||
'paintworklet',
|
||||
'script',
|
||||
'style',
|
||||
'track',
|
||||
'video',
|
||||
'xslt',
|
||||
''
|
||||
])
|
||||
const subresourceSet = new Set(subresource)
|
||||
|
||||
module.exports = {
|
||||
subresource,
|
||||
forbiddenMethods,
|
||||
requestBodyHeader,
|
||||
referrerPolicy,
|
||||
requestRedirect,
|
||||
requestMode,
|
||||
requestCredentials,
|
||||
requestCache,
|
||||
redirectStatus,
|
||||
corsSafeListedMethods,
|
||||
nullBodyStatus,
|
||||
safeMethods,
|
||||
badPorts,
|
||||
requestDuplex,
|
||||
subresourceSet,
|
||||
badPortsSet,
|
||||
redirectStatusSet,
|
||||
corsSafeListedMethodsSet,
|
||||
safeMethodsSet,
|
||||
forbiddenMethodsSet,
|
||||
referrerPolicyTokens: referrerPolicyTokensSet
|
||||
}
|
231
node_modules/undici/lib/fetch/dataURL.js → node_modules/undici/lib/web/fetch/data-url.js
generated
vendored
231
node_modules/undici/lib/fetch/dataURL.js → node_modules/undici/lib/web/fetch/data-url.js
generated
vendored
@ -1,18 +1,19 @@
|
||||
const assert = require('assert')
|
||||
const { atob } = require('buffer')
|
||||
const { isomorphicDecode } = require('./util')
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
|
||||
/**
|
||||
* @see https://mimesniff.spec.whatwg.org/#http-token-code-point
|
||||
*/
|
||||
const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/
|
||||
const HTTP_WHITESPACE_REGEX = /(\u000A|\u000D|\u0009|\u0020)/ // eslint-disable-line
|
||||
const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+\-.^_|~A-Za-z0-9]+$/
|
||||
const HTTP_WHITESPACE_REGEX = /[\u000A\u000D\u0009\u0020]/ // eslint-disable-line
|
||||
const ASCII_WHITESPACE_REPLACE_REGEX = /[\u0009\u000A\u000C\u000D\u0020]/g // eslint-disable-line
|
||||
/**
|
||||
* @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point
|
||||
*/
|
||||
const HTTP_QUOTED_STRING_TOKENS = /[\u0009|\u0020-\u007E|\u0080-\u00FF]/ // eslint-disable-line
|
||||
const HTTP_QUOTED_STRING_TOKENS = /^[\u0009\u0020-\u007E\u0080-\u00FF]+$/ // eslint-disable-line
|
||||
|
||||
// https://fetch.spec.whatwg.org/#data-url-processor
|
||||
/** @param {URL} dataURL */
|
||||
@ -126,7 +127,13 @@ function URLSerializer (url, excludeFragment = false) {
|
||||
const href = url.href
|
||||
const hashLength = url.hash.length
|
||||
|
||||
return hashLength === 0 ? href : href.substring(0, href.length - hashLength)
|
||||
const serialized = hashLength === 0 ? href : href.substring(0, href.length - hashLength)
|
||||
|
||||
if (!hashLength && href.endsWith('#')) {
|
||||
return serialized.slice(0, -1)
|
||||
}
|
||||
|
||||
return serialized
|
||||
}
|
||||
|
||||
// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points
|
||||
@ -182,20 +189,43 @@ function stringPercentDecode (input) {
|
||||
return percentDecode(bytes)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} byte
|
||||
*/
|
||||
function isHexCharByte (byte) {
|
||||
// 0-9 A-F a-f
|
||||
return (byte >= 0x30 && byte <= 0x39) || (byte >= 0x41 && byte <= 0x46) || (byte >= 0x61 && byte <= 0x66)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} byte
|
||||
*/
|
||||
function hexByteToNumber (byte) {
|
||||
return (
|
||||
// 0-9
|
||||
byte >= 0x30 && byte <= 0x39
|
||||
? (byte - 48)
|
||||
// Convert to uppercase
|
||||
// ((byte & 0xDF) - 65) + 10
|
||||
: ((byte & 0xDF) - 55)
|
||||
)
|
||||
}
|
||||
|
||||
// https://url.spec.whatwg.org/#percent-decode
|
||||
/** @param {Uint8Array} input */
|
||||
function percentDecode (input) {
|
||||
const length = input.length
|
||||
// 1. Let output be an empty byte sequence.
|
||||
/** @type {number[]} */
|
||||
const output = []
|
||||
|
||||
/** @type {Uint8Array} */
|
||||
const output = new Uint8Array(length)
|
||||
let j = 0
|
||||
// 2. For each byte byte in input:
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
for (let i = 0; i < length; ++i) {
|
||||
const byte = input[i]
|
||||
|
||||
// 1. If byte is not 0x25 (%), then append byte to output.
|
||||
if (byte !== 0x25) {
|
||||
output.push(byte)
|
||||
output[j++] = byte
|
||||
|
||||
// 2. Otherwise, if byte is 0x25 (%) and the next two bytes
|
||||
// after byte in input are not in the ranges
|
||||
@ -204,19 +234,16 @@ function percentDecode (input) {
|
||||
// to output.
|
||||
} else if (
|
||||
byte === 0x25 &&
|
||||
!/^[0-9A-Fa-f]{2}$/i.test(String.fromCharCode(input[i + 1], input[i + 2]))
|
||||
!(isHexCharByte(input[i + 1]) && isHexCharByte(input[i + 2]))
|
||||
) {
|
||||
output.push(0x25)
|
||||
output[j++] = 0x25
|
||||
|
||||
// 3. Otherwise:
|
||||
} else {
|
||||
// 1. Let bytePoint be the two bytes after byte in input,
|
||||
// decoded, and then interpreted as hexadecimal number.
|
||||
const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2])
|
||||
const bytePoint = Number.parseInt(nextTwoBytes, 16)
|
||||
|
||||
// 2. Append a byte whose value is bytePoint to output.
|
||||
output.push(bytePoint)
|
||||
output[j++] = (hexByteToNumber(input[i + 1]) << 4) | hexByteToNumber(input[i + 2])
|
||||
|
||||
// 3. Skip the next two bytes in input.
|
||||
i += 2
|
||||
@ -224,7 +251,7 @@ function percentDecode (input) {
|
||||
}
|
||||
|
||||
// 3. Return output.
|
||||
return Uint8Array.from(output)
|
||||
return length === j ? output : output.subarray(0, j)
|
||||
}
|
||||
|
||||
// https://mimesniff.spec.whatwg.org/#parse-a-mime-type
|
||||
@ -256,7 +283,7 @@ function parseMIMEType (input) {
|
||||
|
||||
// 5. If position is past the end of input, then return
|
||||
// failure
|
||||
if (position.position > input.length) {
|
||||
if (position.position >= input.length) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
@ -337,7 +364,7 @@ function parseMIMEType (input) {
|
||||
}
|
||||
|
||||
// 6. If position is past the end of input, then break.
|
||||
if (position.position > input.length) {
|
||||
if (position.position >= input.length) {
|
||||
break
|
||||
}
|
||||
|
||||
@ -404,19 +431,25 @@ function parseMIMEType (input) {
|
||||
/** @param {string} data */
|
||||
function forgivingBase64 (data) {
|
||||
// 1. Remove all ASCII whitespace from data.
|
||||
data = data.replace(/[\u0009\u000A\u000C\u000D\u0020]/g, '') // eslint-disable-line
|
||||
data = data.replace(ASCII_WHITESPACE_REPLACE_REGEX, '')
|
||||
|
||||
let dataLength = data.length
|
||||
// 2. If data’s code point length divides by 4 leaving
|
||||
// no remainder, then:
|
||||
if (data.length % 4 === 0) {
|
||||
if (dataLength % 4 === 0) {
|
||||
// 1. If data ends with one or two U+003D (=) code points,
|
||||
// then remove them from data.
|
||||
data = data.replace(/=?=$/, '')
|
||||
if (data.charCodeAt(dataLength - 1) === 0x003D) {
|
||||
--dataLength
|
||||
if (data.charCodeAt(dataLength - 1) === 0x003D) {
|
||||
--dataLength
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 3. If data’s code point length divides by 4 leaving
|
||||
// a remainder of 1, then return failure.
|
||||
if (data.length % 4 === 1) {
|
||||
if (dataLength % 4 === 1) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
@ -425,18 +458,12 @@ function forgivingBase64 (data) {
|
||||
// U+002F (/)
|
||||
// ASCII alphanumeric
|
||||
// then return failure.
|
||||
if (/[^+/0-9A-Za-z]/.test(data)) {
|
||||
if (/[^+/0-9A-Za-z]/.test(data.length === dataLength ? data : data.substring(0, dataLength))) {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
const binary = atob(data)
|
||||
const bytes = new Uint8Array(binary.length)
|
||||
|
||||
for (let byte = 0; byte < binary.length; byte++) {
|
||||
bytes[byte] = binary.charCodeAt(byte)
|
||||
}
|
||||
|
||||
return bytes
|
||||
const buffer = Buffer.from(data, 'base64')
|
||||
return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength)
|
||||
}
|
||||
|
||||
// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string
|
||||
@ -444,9 +471,9 @@ function forgivingBase64 (data) {
|
||||
/**
|
||||
* @param {string} input
|
||||
* @param {{ position: number }} position
|
||||
* @param {boolean?} extractValue
|
||||
* @param {boolean} [extractValue=false]
|
||||
*/
|
||||
function collectAnHTTPQuotedString (input, position, extractValue) {
|
||||
function collectAnHTTPQuotedString (input, position, extractValue = false) {
|
||||
// 1. Let positionStart be position.
|
||||
const positionStart = position.position
|
||||
|
||||
@ -543,7 +570,7 @@ function serializeAMimeType (mimeType) {
|
||||
// 4. If value does not solely contain HTTP token code
|
||||
// points or value is the empty string, then:
|
||||
if (!HTTP_TOKEN_CODEPOINTS.test(value)) {
|
||||
// 1. Precede each occurence of U+0022 (") or
|
||||
// 1. Precede each occurrence of U+0022 (") or
|
||||
// U+005C (\) in value with U+005C (\).
|
||||
value = value.replace(/(\\|")/g, '\\$1')
|
||||
|
||||
@ -564,55 +591,140 @@ function serializeAMimeType (mimeType) {
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#http-whitespace
|
||||
* @param {string} char
|
||||
* @param {number} char
|
||||
*/
|
||||
function isHTTPWhiteSpace (char) {
|
||||
return char === '\r' || char === '\n' || char === '\t' || char === ' '
|
||||
// "\r\n\t "
|
||||
return char === 0x00d || char === 0x00a || char === 0x009 || char === 0x020
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://fetch.spec.whatwg.org/#http-whitespace
|
||||
* @param {string} str
|
||||
* @param {boolean} [leading=true]
|
||||
* @param {boolean} [trailing=true]
|
||||
*/
|
||||
function removeHTTPWhitespace (str, leading = true, trailing = true) {
|
||||
let lead = 0
|
||||
let trail = str.length - 1
|
||||
|
||||
if (leading) {
|
||||
for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++);
|
||||
}
|
||||
|
||||
if (trailing) {
|
||||
for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--);
|
||||
}
|
||||
|
||||
return str.slice(lead, trail + 1)
|
||||
return removeChars(str, leading, trailing, isHTTPWhiteSpace)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://infra.spec.whatwg.org/#ascii-whitespace
|
||||
* @param {string} char
|
||||
* @param {number} char
|
||||
*/
|
||||
function isASCIIWhitespace (char) {
|
||||
return char === '\r' || char === '\n' || char === '\t' || char === '\f' || char === ' '
|
||||
// "\r\n\t\f "
|
||||
return char === 0x00d || char === 0x00a || char === 0x009 || char === 0x00c || char === 0x020
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace
|
||||
* @param {string} str
|
||||
* @param {boolean} [leading=true]
|
||||
* @param {boolean} [trailing=true]
|
||||
*/
|
||||
function removeASCIIWhitespace (str, leading = true, trailing = true) {
|
||||
return removeChars(str, leading, trailing, isASCIIWhitespace)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} str
|
||||
* @param {boolean} leading
|
||||
* @param {boolean} trailing
|
||||
* @param {(charCode: number) => boolean} predicate
|
||||
* @returns
|
||||
*/
|
||||
function removeChars (str, leading, trailing, predicate) {
|
||||
let lead = 0
|
||||
let trail = str.length - 1
|
||||
|
||||
if (leading) {
|
||||
for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++);
|
||||
while (lead < str.length && predicate(str.charCodeAt(lead))) lead++
|
||||
}
|
||||
|
||||
if (trailing) {
|
||||
for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--);
|
||||
while (trail > 0 && predicate(str.charCodeAt(trail))) trail--
|
||||
}
|
||||
|
||||
return str.slice(lead, trail + 1)
|
||||
return lead === 0 && trail === str.length - 1 ? str : str.slice(lead, trail + 1)
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://infra.spec.whatwg.org/#isomorphic-decode
|
||||
* @param {Uint8Array} input
|
||||
* @returns {string}
|
||||
*/
|
||||
function isomorphicDecode (input) {
|
||||
// 1. To isomorphic decode a byte sequence input, return a string whose code point
|
||||
// length is equal to input’s length and whose code points have the same values
|
||||
// as the values of input’s bytes, in the same order.
|
||||
const length = input.length
|
||||
if ((2 << 15) - 1 > length) {
|
||||
return String.fromCharCode.apply(null, input)
|
||||
}
|
||||
let result = ''; let i = 0
|
||||
let addition = (2 << 15) - 1
|
||||
while (i < length) {
|
||||
if (i + addition > length) {
|
||||
addition = length - i
|
||||
}
|
||||
result += String.fromCharCode.apply(null, input.subarray(i, i += addition))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://mimesniff.spec.whatwg.org/#minimize-a-supported-mime-type
|
||||
* @param {Exclude<ReturnType<typeof parseMIMEType>, 'failure'>} mimeType
|
||||
*/
|
||||
function minimizeSupportedMimeType (mimeType) {
|
||||
switch (mimeType.essence) {
|
||||
case 'application/ecmascript':
|
||||
case 'application/javascript':
|
||||
case 'application/x-ecmascript':
|
||||
case 'application/x-javascript':
|
||||
case 'text/ecmascript':
|
||||
case 'text/javascript':
|
||||
case 'text/javascript1.0':
|
||||
case 'text/javascript1.1':
|
||||
case 'text/javascript1.2':
|
||||
case 'text/javascript1.3':
|
||||
case 'text/javascript1.4':
|
||||
case 'text/javascript1.5':
|
||||
case 'text/jscript':
|
||||
case 'text/livescript':
|
||||
case 'text/x-ecmascript':
|
||||
case 'text/x-javascript':
|
||||
// 1. If mimeType is a JavaScript MIME type, then return "text/javascript".
|
||||
return 'text/javascript'
|
||||
case 'application/json':
|
||||
case 'text/json':
|
||||
// 2. If mimeType is a JSON MIME type, then return "application/json".
|
||||
return 'application/json'
|
||||
case 'image/svg+xml':
|
||||
// 3. If mimeType’s essence is "image/svg+xml", then return "image/svg+xml".
|
||||
return 'image/svg+xml'
|
||||
case 'text/xml':
|
||||
case 'application/xml':
|
||||
// 4. If mimeType is an XML MIME type, then return "application/xml".
|
||||
return 'application/xml'
|
||||
}
|
||||
|
||||
// 2. If mimeType is a JSON MIME type, then return "application/json".
|
||||
if (mimeType.subtype.endsWith('+json')) {
|
||||
return 'application/json'
|
||||
}
|
||||
|
||||
// 4. If mimeType is an XML MIME type, then return "application/xml".
|
||||
if (mimeType.subtype.endsWith('+xml')) {
|
||||
return 'application/xml'
|
||||
}
|
||||
|
||||
// 5. If mimeType is supported by the user agent, then return mimeType’s essence.
|
||||
// Technically, node doesn't support any mimetypes.
|
||||
|
||||
// 6. Return the empty string.
|
||||
return ''
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
@ -623,5 +735,10 @@ module.exports = {
|
||||
stringPercentDecode,
|
||||
parseMIMEType,
|
||||
collectAnHTTPQuotedString,
|
||||
serializeAMimeType
|
||||
serializeAMimeType,
|
||||
removeChars,
|
||||
removeHTTPWhitespace,
|
||||
minimizeSupportedMimeType,
|
||||
HTTP_TOKEN_CODEPOINTS,
|
||||
isomorphicDecode
|
||||
}
|
@ -1,8 +1,6 @@
|
||||
'use strict'
|
||||
|
||||
/* istanbul ignore file: only for Node 12 */
|
||||
|
||||
const { kConnected, kSize } = require('../core/symbols')
|
||||
const { kConnected, kSize } = require('../../core/symbols')
|
||||
|
||||
class CompatWeakRef {
|
||||
constructor (value) {
|
||||
@ -30,19 +28,19 @@ class CompatFinalizer {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
unregister (key) {}
|
||||
}
|
||||
|
||||
module.exports = function () {
|
||||
// FIXME: remove workaround when the Node bug is fixed
|
||||
// FIXME: remove workaround when the Node bug is backported to v18
|
||||
// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
|
||||
if (process.env.NODE_V8_COVERAGE) {
|
||||
if (process.env.NODE_V8_COVERAGE && process.version.startsWith('v18')) {
|
||||
process._rawDebug('Using compatibility WeakRef and FinalizationRegistry')
|
||||
return {
|
||||
WeakRef: CompatWeakRef,
|
||||
FinalizationRegistry: CompatFinalizer
|
||||
}
|
||||
}
|
||||
return {
|
||||
WeakRef: global.WeakRef || CompatWeakRef,
|
||||
FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer
|
||||
}
|
||||
return { WeakRef, FinalizationRegistry }
|
||||
}
|
501
node_modules/undici/lib/web/fetch/formdata-parser.js
generated
vendored
Normal file
501
node_modules/undici/lib/web/fetch/formdata-parser.js
generated
vendored
Normal file
@ -0,0 +1,501 @@
|
||||
'use strict'
|
||||
|
||||
const { isUSVString, bufferToLowerCasedHeaderName } = require('../../core/util')
|
||||
const { utf8DecodeBytes } = require('./util')
|
||||
const { HTTP_TOKEN_CODEPOINTS, isomorphicDecode } = require('./data-url')
|
||||
const { makeEntry } = require('./formdata')
|
||||
const { webidl } = require('./webidl')
|
||||
const assert = require('node:assert')
|
||||
const { File: NodeFile } = require('node:buffer')
|
||||
|
||||
const File = globalThis.File ?? NodeFile
|
||||
|
||||
const formDataNameBuffer = Buffer.from('form-data; name="')
|
||||
const filenameBuffer = Buffer.from('filename')
|
||||
const dd = Buffer.from('--')
|
||||
const ddcrlf = Buffer.from('--\r\n')
|
||||
|
||||
/**
|
||||
* @param {string} chars
|
||||
*/
|
||||
function isAsciiString (chars) {
|
||||
for (let i = 0; i < chars.length; ++i) {
|
||||
if ((chars.charCodeAt(i) & ~0x7F) !== 0) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://andreubotella.github.io/multipart-form-data/#multipart-form-data-boundary
|
||||
* @param {string} boundary
|
||||
*/
|
||||
function validateBoundary (boundary) {
|
||||
const length = boundary.length
|
||||
|
||||
// - its length is greater or equal to 27 and lesser or equal to 70, and
|
||||
if (length < 27 || length > 70) {
|
||||
return false
|
||||
}
|
||||
|
||||
// - it is composed by bytes in the ranges 0x30 to 0x39, 0x41 to 0x5A, or
|
||||
// 0x61 to 0x7A, inclusive (ASCII alphanumeric), or which are 0x27 ('),
|
||||
// 0x2D (-) or 0x5F (_).
|
||||
for (let i = 0; i < length; ++i) {
|
||||
const cp = boundary.charCodeAt(i)
|
||||
|
||||
if (!(
|
||||
(cp >= 0x30 && cp <= 0x39) ||
|
||||
(cp >= 0x41 && cp <= 0x5a) ||
|
||||
(cp >= 0x61 && cp <= 0x7a) ||
|
||||
cp === 0x27 ||
|
||||
cp === 0x2d ||
|
||||
cp === 0x5f
|
||||
)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://andreubotella.github.io/multipart-form-data/#multipart-form-data-parser
|
||||
* @param {Buffer} input
|
||||
* @param {ReturnType<import('./data-url')['parseMIMEType']>} mimeType
|
||||
*/
|
||||
function multipartFormDataParser (input, mimeType) {
|
||||
// 1. Assert: mimeType’s essence is "multipart/form-data".
|
||||
assert(mimeType !== 'failure' && mimeType.essence === 'multipart/form-data')
|
||||
|
||||
const boundaryString = mimeType.parameters.get('boundary')
|
||||
|
||||
// 2. If mimeType’s parameters["boundary"] does not exist, return failure.
|
||||
// Otherwise, let boundary be the result of UTF-8 decoding mimeType’s
|
||||
// parameters["boundary"].
|
||||
if (boundaryString === undefined) {
|
||||
throw parsingError('missing boundary in content-type header')
|
||||
}
|
||||
|
||||
const boundary = Buffer.from(`--${boundaryString}`, 'utf8')
|
||||
|
||||
// 3. Let entry list be an empty entry list.
|
||||
const entryList = []
|
||||
|
||||
// 4. Let position be a pointer to a byte in input, initially pointing at
|
||||
// the first byte.
|
||||
const position = { position: 0 }
|
||||
|
||||
// Note: undici addition, allows leading and trailing CRLFs.
|
||||
while (input[position.position] === 0x0d && input[position.position + 1] === 0x0a) {
|
||||
position.position += 2
|
||||
}
|
||||
|
||||
let trailing = input.length
|
||||
|
||||
while (input[trailing - 1] === 0x0a && input[trailing - 2] === 0x0d) {
|
||||
trailing -= 2
|
||||
}
|
||||
|
||||
if (trailing !== input.length) {
|
||||
input = input.subarray(0, trailing)
|
||||
}
|
||||
|
||||
// 5. While true:
|
||||
while (true) {
|
||||
// 5.1. If position points to a sequence of bytes starting with 0x2D 0x2D
|
||||
// (`--`) followed by boundary, advance position by 2 + the length of
|
||||
// boundary. Otherwise, return failure.
|
||||
// Note: boundary is padded with 2 dashes already, no need to add 2.
|
||||
if (input.subarray(position.position, position.position + boundary.length).equals(boundary)) {
|
||||
position.position += boundary.length
|
||||
} else {
|
||||
throw parsingError('expected a value starting with -- and the boundary')
|
||||
}
|
||||
|
||||
// 5.2. If position points to the sequence of bytes 0x2D 0x2D 0x0D 0x0A
|
||||
// (`--` followed by CR LF) followed by the end of input, return entry list.
|
||||
// Note: a body does NOT need to end with CRLF. It can end with --.
|
||||
if (
|
||||
(position.position === input.length - 2 && bufferStartsWith(input, dd, position)) ||
|
||||
(position.position === input.length - 4 && bufferStartsWith(input, ddcrlf, position))
|
||||
) {
|
||||
return entryList
|
||||
}
|
||||
|
||||
// 5.3. If position does not point to a sequence of bytes starting with 0x0D
|
||||
// 0x0A (CR LF), return failure.
|
||||
if (input[position.position] !== 0x0d || input[position.position + 1] !== 0x0a) {
|
||||
throw parsingError('expected CRLF')
|
||||
}
|
||||
|
||||
// 5.4. Advance position by 2. (This skips past the newline.)
|
||||
position.position += 2
|
||||
|
||||
// 5.5. Let name, filename and contentType be the result of parsing
|
||||
// multipart/form-data headers on input and position, if the result
|
||||
// is not failure. Otherwise, return failure.
|
||||
const result = parseMultipartFormDataHeaders(input, position)
|
||||
|
||||
let { name, filename, contentType, encoding } = result
|
||||
|
||||
// 5.6. Advance position by 2. (This skips past the empty line that marks
|
||||
// the end of the headers.)
|
||||
position.position += 2
|
||||
|
||||
// 5.7. Let body be the empty byte sequence.
|
||||
let body
|
||||
|
||||
// 5.8. Body loop: While position is not past the end of input:
|
||||
// TODO: the steps here are completely wrong
|
||||
{
|
||||
const boundaryIndex = input.indexOf(boundary.subarray(2), position.position)
|
||||
|
||||
if (boundaryIndex === -1) {
|
||||
throw parsingError('expected boundary after body')
|
||||
}
|
||||
|
||||
body = input.subarray(position.position, boundaryIndex - 4)
|
||||
|
||||
position.position += body.length
|
||||
|
||||
// Note: position must be advanced by the body's length before being
|
||||
// decoded, otherwise the parsing will fail.
|
||||
if (encoding === 'base64') {
|
||||
body = Buffer.from(body.toString(), 'base64')
|
||||
}
|
||||
}
|
||||
|
||||
// 5.9. If position does not point to a sequence of bytes starting with
|
||||
// 0x0D 0x0A (CR LF), return failure. Otherwise, advance position by 2.
|
||||
if (input[position.position] !== 0x0d || input[position.position + 1] !== 0x0a) {
|
||||
throw parsingError('expected CRLF')
|
||||
} else {
|
||||
position.position += 2
|
||||
}
|
||||
|
||||
// 5.10. If filename is not null:
|
||||
let value
|
||||
|
||||
if (filename !== null) {
|
||||
// 5.10.1. If contentType is null, set contentType to "text/plain".
|
||||
contentType ??= 'text/plain'
|
||||
|
||||
// 5.10.2. If contentType is not an ASCII string, set contentType to the empty string.
|
||||
|
||||
// Note: `buffer.isAscii` can be used at zero-cost, but converting a string to a buffer is a high overhead.
|
||||
// Content-Type is a relatively small string, so it is faster to use `String#charCodeAt`.
|
||||
if (!isAsciiString(contentType)) {
|
||||
contentType = ''
|
||||
}
|
||||
|
||||
// 5.10.3. Let value be a new File object with name filename, type contentType, and body body.
|
||||
value = new File([body], filename, { type: contentType })
|
||||
} else {
|
||||
// 5.11. Otherwise:
|
||||
|
||||
// 5.11.1. Let value be the UTF-8 decoding without BOM of body.
|
||||
value = utf8DecodeBytes(Buffer.from(body))
|
||||
}
|
||||
|
||||
// 5.12. Assert: name is a scalar value string and value is either a scalar value string or a File object.
|
||||
assert(isUSVString(name))
|
||||
assert((typeof value === 'string' && isUSVString(value)) || webidl.is.File(value))
|
||||
|
||||
// 5.13. Create an entry with name and value, and append it to entry list.
|
||||
entryList.push(makeEntry(name, value, filename))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://andreubotella.github.io/multipart-form-data/#parse-multipart-form-data-headers
|
||||
* @param {Buffer} input
|
||||
* @param {{ position: number }} position
|
||||
*/
|
||||
function parseMultipartFormDataHeaders (input, position) {
|
||||
// 1. Let name, filename and contentType be null.
|
||||
let name = null
|
||||
let filename = null
|
||||
let contentType = null
|
||||
let encoding = null
|
||||
|
||||
// 2. While true:
|
||||
while (true) {
|
||||
// 2.1. If position points to a sequence of bytes starting with 0x0D 0x0A (CR LF):
|
||||
if (input[position.position] === 0x0d && input[position.position + 1] === 0x0a) {
|
||||
// 2.1.1. If name is null, return failure.
|
||||
if (name === null) {
|
||||
throw parsingError('header name is null')
|
||||
}
|
||||
|
||||
// 2.1.2. Return name, filename and contentType.
|
||||
return { name, filename, contentType, encoding }
|
||||
}
|
||||
|
||||
// 2.2. Let header name be the result of collecting a sequence of bytes that are
|
||||
// not 0x0A (LF), 0x0D (CR) or 0x3A (:), given position.
|
||||
let headerName = collectASequenceOfBytes(
|
||||
(char) => char !== 0x0a && char !== 0x0d && char !== 0x3a,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 2.3. Remove any HTTP tab or space bytes from the start or end of header name.
|
||||
headerName = removeChars(headerName, true, true, (char) => char === 0x9 || char === 0x20)
|
||||
|
||||
// 2.4. If header name does not match the field-name token production, return failure.
|
||||
if (!HTTP_TOKEN_CODEPOINTS.test(headerName.toString())) {
|
||||
throw parsingError('header name does not match the field-name token production')
|
||||
}
|
||||
|
||||
// 2.5. If the byte at position is not 0x3A (:), return failure.
|
||||
if (input[position.position] !== 0x3a) {
|
||||
throw parsingError('expected :')
|
||||
}
|
||||
|
||||
// 2.6. Advance position by 1.
|
||||
position.position++
|
||||
|
||||
// 2.7. Collect a sequence of bytes that are HTTP tab or space bytes given position.
|
||||
// (Do nothing with those bytes.)
|
||||
collectASequenceOfBytes(
|
||||
(char) => char === 0x20 || char === 0x09,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 2.8. Byte-lowercase header name and switch on the result:
|
||||
switch (bufferToLowerCasedHeaderName(headerName)) {
|
||||
case 'content-disposition': {
|
||||
// 1. Set name and filename to null.
|
||||
name = filename = null
|
||||
|
||||
// 2. If position does not point to a sequence of bytes starting with
|
||||
// `form-data; name="`, return failure.
|
||||
if (!bufferStartsWith(input, formDataNameBuffer, position)) {
|
||||
throw parsingError('expected form-data; name=" for content-disposition header')
|
||||
}
|
||||
|
||||
// 3. Advance position so it points at the byte after the next 0x22 (")
|
||||
// byte (the one in the sequence of bytes matched above).
|
||||
position.position += 17
|
||||
|
||||
// 4. Set name to the result of parsing a multipart/form-data name given
|
||||
// input and position, if the result is not failure. Otherwise, return
|
||||
// failure.
|
||||
name = parseMultipartFormDataName(input, position)
|
||||
|
||||
// 5. If position points to a sequence of bytes starting with `; filename="`:
|
||||
if (input[position.position] === 0x3b /* ; */ && input[position.position + 1] === 0x20 /* ' ' */) {
|
||||
const at = { position: position.position + 2 }
|
||||
|
||||
if (bufferStartsWith(input, filenameBuffer, at)) {
|
||||
if (input[at.position + 8] === 0x2a /* '*' */) {
|
||||
at.position += 10 // skip past filename*=
|
||||
|
||||
// Remove leading http tab and spaces. See RFC for examples.
|
||||
// https://datatracker.ietf.org/doc/html/rfc6266#section-5
|
||||
collectASequenceOfBytes(
|
||||
(char) => char === 0x20 || char === 0x09,
|
||||
input,
|
||||
at
|
||||
)
|
||||
|
||||
const headerValue = collectASequenceOfBytes(
|
||||
(char) => char !== 0x20 && char !== 0x0d && char !== 0x0a, // ' ' or CRLF
|
||||
input,
|
||||
at
|
||||
)
|
||||
|
||||
if (
|
||||
(headerValue[0] !== 0x75 && headerValue[0] !== 0x55) || // u or U
|
||||
(headerValue[1] !== 0x74 && headerValue[1] !== 0x54) || // t or T
|
||||
(headerValue[2] !== 0x66 && headerValue[2] !== 0x46) || // f or F
|
||||
headerValue[3] !== 0x2d || // -
|
||||
headerValue[4] !== 0x38 // 8
|
||||
) {
|
||||
throw parsingError('unknown encoding, expected utf-8\'\'')
|
||||
}
|
||||
|
||||
// skip utf-8''
|
||||
filename = decodeURIComponent(new TextDecoder().decode(headerValue.subarray(7)))
|
||||
|
||||
position.position = at.position
|
||||
} else {
|
||||
// 1. Advance position so it points at the byte after the next 0x22 (") byte
|
||||
// (the one in the sequence of bytes matched above).
|
||||
position.position += 11
|
||||
|
||||
// Remove leading http tab and spaces. See RFC for examples.
|
||||
// https://datatracker.ietf.org/doc/html/rfc6266#section-5
|
||||
collectASequenceOfBytes(
|
||||
(char) => char === 0x20 || char === 0x09,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
position.position++ // skip past " after removing whitespace
|
||||
|
||||
// 2. Set filename to the result of parsing a multipart/form-data name given
|
||||
// input and position, if the result is not failure. Otherwise, return failure.
|
||||
filename = parseMultipartFormDataName(input, position)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
case 'content-type': {
|
||||
// 1. Let header value be the result of collecting a sequence of bytes that are
|
||||
// not 0x0A (LF) or 0x0D (CR), given position.
|
||||
let headerValue = collectASequenceOfBytes(
|
||||
(char) => char !== 0x0a && char !== 0x0d,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 2. Remove any HTTP tab or space bytes from the end of header value.
|
||||
headerValue = removeChars(headerValue, false, true, (char) => char === 0x9 || char === 0x20)
|
||||
|
||||
// 3. Set contentType to the isomorphic decoding of header value.
|
||||
contentType = isomorphicDecode(headerValue)
|
||||
|
||||
break
|
||||
}
|
||||
case 'content-transfer-encoding': {
|
||||
let headerValue = collectASequenceOfBytes(
|
||||
(char) => char !== 0x0a && char !== 0x0d,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
headerValue = removeChars(headerValue, false, true, (char) => char === 0x9 || char === 0x20)
|
||||
|
||||
encoding = isomorphicDecode(headerValue)
|
||||
|
||||
break
|
||||
}
|
||||
default: {
|
||||
// Collect a sequence of bytes that are not 0x0A (LF) or 0x0D (CR), given position.
|
||||
// (Do nothing with those bytes.)
|
||||
collectASequenceOfBytes(
|
||||
(char) => char !== 0x0a && char !== 0x0d,
|
||||
input,
|
||||
position
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// 2.9. If position does not point to a sequence of bytes starting with 0x0D 0x0A
|
||||
// (CR LF), return failure. Otherwise, advance position by 2 (past the newline).
|
||||
if (input[position.position] !== 0x0d && input[position.position + 1] !== 0x0a) {
|
||||
throw parsingError('expected CRLF')
|
||||
} else {
|
||||
position.position += 2
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see https://andreubotella.github.io/multipart-form-data/#parse-a-multipart-form-data-name
|
||||
* @param {Buffer} input
|
||||
* @param {{ position: number }} position
|
||||
*/
|
||||
function parseMultipartFormDataName (input, position) {
|
||||
// 1. Assert: The byte at (position - 1) is 0x22 (").
|
||||
assert(input[position.position - 1] === 0x22)
|
||||
|
||||
// 2. Let name be the result of collecting a sequence of bytes that are not 0x0A (LF), 0x0D (CR) or 0x22 ("), given position.
|
||||
/** @type {string | Buffer} */
|
||||
let name = collectASequenceOfBytes(
|
||||
(char) => char !== 0x0a && char !== 0x0d && char !== 0x22,
|
||||
input,
|
||||
position
|
||||
)
|
||||
|
||||
// 3. If the byte at position is not 0x22 ("), return failure. Otherwise, advance position by 1.
|
||||
if (input[position.position] !== 0x22) {
|
||||
throw parsingError('expected "')
|
||||
} else {
|
||||
position.position++
|
||||
}
|
||||
|
||||
// 4. Replace any occurrence of the following subsequences in name with the given byte:
|
||||
// - `%0A`: 0x0A (LF)
|
||||
// - `%0D`: 0x0D (CR)
|
||||
// - `%22`: 0x22 (")
|
||||
name = new TextDecoder().decode(name)
|
||||
.replace(/%0A/ig, '\n')
|
||||
.replace(/%0D/ig, '\r')
|
||||
.replace(/%22/g, '"')
|
||||
|
||||
// 5. Return the UTF-8 decoding without BOM of name.
|
||||
return name
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {(char: number) => boolean} condition
|
||||
* @param {Buffer} input
|
||||
* @param {{ position: number }} position
|
||||
*/
|
||||
function collectASequenceOfBytes (condition, input, position) {
|
||||
let start = position.position
|
||||
|
||||
while (start < input.length && condition(input[start])) {
|
||||
++start
|
||||
}
|
||||
|
||||
return input.subarray(position.position, (position.position = start))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer} buf
|
||||
* @param {boolean} leading
|
||||
* @param {boolean} trailing
|
||||
* @param {(charCode: number) => boolean} predicate
|
||||
* @returns {Buffer}
|
||||
*/
|
||||
function removeChars (buf, leading, trailing, predicate) {
|
||||
let lead = 0
|
||||
let trail = buf.length - 1
|
||||
|
||||
if (leading) {
|
||||
while (lead < buf.length && predicate(buf[lead])) lead++
|
||||
}
|
||||
|
||||
if (trailing) {
|
||||
while (trail > 0 && predicate(buf[trail])) trail--
|
||||
}
|
||||
|
||||
return lead === 0 && trail === buf.length - 1 ? buf : buf.subarray(lead, trail + 1)
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if {@param buffer} starts with {@param start}
|
||||
* @param {Buffer} buffer
|
||||
* @param {Buffer} start
|
||||
* @param {{ position: number }} position
|
||||
*/
|
||||
function bufferStartsWith (buffer, start, position) {
|
||||
if (buffer.length < start.length) {
|
||||
return false
|
||||
}
|
||||
|
||||
for (let i = 0; i < start.length; i++) {
|
||||
if (start[i] !== buffer[position.position + i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function parsingError (cause) {
|
||||
return new TypeError('Failed to parse body as FormData.', { cause: new TypeError(cause) })
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
multipartFormDataParser,
|
||||
validateBoundary
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user