format: prettify entire project

This commit is contained in:
Rim
2025-04-02 06:50:39 -04:00
parent 86f0782a98
commit 7ccc0be712
1711 changed files with 755867 additions and 235931 deletions

86
node_modules/fastq/bench.js generated vendored
View File

@ -1,66 +1,72 @@
'use strict'
'use strict';
const max = 1000000
const fastqueue = require('./')(worker, 1)
const { promisify } = require('util')
const immediate = promisify(setImmediate)
const qPromise = require('./').promise(immediate, 1)
const async = require('async')
const neo = require('neo-async')
const asyncqueue = async.queue(worker, 1)
const neoqueue = neo.queue(worker, 1)
const max = 1000000;
const fastqueue = require('./')(worker, 1);
const { promisify } = require('util');
const immediate = promisify(setImmediate);
const qPromise = require('./').promise(immediate, 1);
const async = require('async');
const neo = require('neo-async');
const asyncqueue = async.queue(worker, 1);
const neoqueue = neo.queue(worker, 1);
function bench (func, done) {
const key = max + '*' + func.name
let count = -1
function bench(func, done) {
const key = max + '*' + func.name;
let count = -1;
console.time(key)
end()
console.time(key);
end();
function end () {
function end() {
if (++count < max) {
func(end)
func(end);
} else {
console.timeEnd(key)
console.timeEnd(key);
if (done) {
done()
done();
}
}
}
}
function benchFastQ (done) {
fastqueue.push(42, done)
function benchFastQ(done) {
fastqueue.push(42, done);
}
function benchAsyncQueue (done) {
asyncqueue.push(42, done)
function benchAsyncQueue(done) {
asyncqueue.push(42, done);
}
function benchNeoQueue (done) {
neoqueue.push(42, done)
function benchNeoQueue(done) {
neoqueue.push(42, done);
}
function worker (arg, cb) {
setImmediate(cb)
function worker(arg, cb) {
setImmediate(cb);
}
function benchSetImmediate (cb) {
worker(42, cb)
function benchSetImmediate(cb) {
worker(42, cb);
}
function benchFastQPromise (done) {
qPromise.push(42).then(function () { done() }, done)
function benchFastQPromise(done) {
qPromise.push(42).then(function () {
done();
}, done);
}
function runBench (done) {
async.eachSeries([
benchSetImmediate,
benchFastQ,
benchNeoQueue,
benchAsyncQueue,
benchFastQPromise
], bench, done)
function runBench(done) {
async.eachSeries(
[
benchSetImmediate,
benchFastQ,
benchNeoQueue,
benchAsyncQueue,
benchFastQPromise,
],
bench,
done
);
}
runBench(runBench)
runBench(runBench);

16
node_modules/fastq/example.js generated vendored
View File

@ -1,14 +1,16 @@
'use strict'
'use strict';
/* eslint-disable no-var */
var queue = require('./')(worker, 1)
var queue = require('./')(worker, 1);
queue.push(42, function (err, result) {
if (err) { throw err }
console.log('the result is', result)
})
if (err) {
throw err;
}
console.log('the result is', result);
});
function worker (arg, cb) {
cb(null, 42 * 2)
function worker(arg, cb) {
cb(null, 42 * 2);
}

10
node_modules/fastq/example.mjs generated vendored
View File

@ -1,11 +1,11 @@
import { promise as queueAsPromised } from './queue.js'
import { promise as queueAsPromised } from './queue.js';
/* eslint-disable */
const queue = queueAsPromised(worker, 1)
const queue = queueAsPromised(worker, 1);
console.log('the result is', await queue.push(42))
console.log('the result is', await queue.push(42));
async function worker (arg) {
return 42 * 2
async function worker(arg) {
return 42 * 2;
}

74
node_modules/fastq/index.d.ts generated vendored
View File

@ -1,57 +1,75 @@
declare function fastq<C, T = any, R = any>(context: C, worker: fastq.worker<C, T, R>, concurrency: number): fastq.queue<T, R>
declare function fastq<C, T = any, R = any>(worker: fastq.worker<C, T, R>, concurrency: number): fastq.queue<T, R>
declare function fastq<C, T = any, R = any>(
context: C,
worker: fastq.worker<C, T, R>,
concurrency: number
): fastq.queue<T, R>;
declare function fastq<C, T = any, R = any>(
worker: fastq.worker<C, T, R>,
concurrency: number
): fastq.queue<T, R>;
declare namespace fastq {
type worker<C, T = any, R = any> = (this: C, task: T, cb: fastq.done<R>) => void
type asyncWorker<C, T = any, R = any> = (this: C, task: T) => Promise<R>
type done<R = any> = (err: Error | null, result?: R) => void
type errorHandler<T = any> = (err: Error, task: T) => void
type worker<C, T = any, R = any> = (
this: C,
task: T,
cb: fastq.done<R>
) => void;
type asyncWorker<C, T = any, R = any> = (this: C, task: T) => Promise<R>;
type done<R = any> = (err: Error | null, result?: R) => void;
type errorHandler<T = any> = (err: Error, task: T) => void;
interface queue<T = any, R = any> {
/** Add a task at the end of the queue. `done(err, result)` will be called when the task was processed. */
push(task: T, done?: done<R>): void
push(task: T, done?: done<R>): void;
/** Add a task at the beginning of the queue. `done(err, result)` will be called when the task was processed. */
unshift(task: T, done?: done<R>): void
unshift(task: T, done?: done<R>): void;
/** Pause the processing of tasks. Currently worked tasks are not stopped. */
pause(): any
pause(): any;
/** Resume the processing of tasks. */
resume(): any
running(): number
resume(): any;
running(): number;
/** Returns `false` if there are tasks being processed or waiting to be processed. `true` otherwise. */
idle(): boolean
idle(): boolean;
/** Returns the number of tasks waiting to be processed (in the queue). */
length(): number
length(): number;
/** Returns all the tasks be processed (in the queue). Returns empty array when there are no tasks */
getQueue(): T[]
getQueue(): T[];
/** Removes all tasks waiting to be processed, and reset `drain` to an empty function. */
kill(): any
kill(): any;
/** Same than `kill` but the `drain` function will be called before reset to empty. */
killAndDrain(): any
killAndDrain(): any;
/** Set a global error handler. `handler(err, task)` will be called each time a task is completed, `err` will be not null if the task has thrown an error. */
error(handler: errorHandler<T>): void
error(handler: errorHandler<T>): void;
/** Property that returns the number of concurrent tasks that could be executed in parallel. It can be altered at runtime. */
concurrency: number
concurrency: number;
/** Property (Read-Only) that returns `true` when the queue is in a paused state. */
readonly paused: boolean
readonly paused: boolean;
/** Function that will be called when the last item from the queue has been processed by a worker. It can be altered at runtime. */
drain(): any
drain(): any;
/** Function that will be called when the last item from the queue has been assigned to a worker. It can be altered at runtime. */
empty: () => void
empty: () => void;
/** Function that will be called when the queue hits the concurrency limit. It can be altered at runtime. */
saturated: () => void
saturated: () => void;
}
interface queueAsPromised<T = any, R = any> extends queue<T, R> {
/** Add a task at the end of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */
push(task: T): Promise<R>
push(task: T): Promise<R>;
/** Add a task at the beginning of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */
unshift(task: T): Promise<R>
unshift(task: T): Promise<R>;
/** Wait for the queue to be drained. The returned `Promise` will be resolved when all tasks in the queue have been processed by a worker. */
drained(): Promise<void>
drained(): Promise<void>;
}
function promise<C, T = any, R = any>(context: C, worker: fastq.asyncWorker<C, T, R>, concurrency: number): fastq.queueAsPromised<T, R>
function promise<C, T = any, R = any>(worker: fastq.asyncWorker<C, T, R>, concurrency: number): fastq.queueAsPromised<T, R>
function promise<C, T = any, R = any>(
context: C,
worker: fastq.asyncWorker<C, T, R>,
concurrency: number
): fastq.queueAsPromised<T, R>;
function promise<C, T = any, R = any>(
worker: fastq.asyncWorker<C, T, R>,
concurrency: number
): fastq.queueAsPromised<T, R>;
}
export = fastq
export = fastq;

331
node_modules/fastq/queue.js generated vendored
View File

@ -1,25 +1,25 @@
'use strict'
'use strict';
/* eslint-disable no-var */
var reusify = require('reusify')
var reusify = require('reusify');
function fastqueue (context, worker, _concurrency) {
function fastqueue(context, worker, _concurrency) {
if (typeof context === 'function') {
_concurrency = worker
worker = context
context = null
_concurrency = worker;
worker = context;
context = null;
}
if (!(_concurrency >= 1)) {
throw new Error('fastqueue concurrency must be equal to or greater than 1')
throw new Error('fastqueue concurrency must be equal to or greater than 1');
}
var cache = reusify(Task)
var queueHead = null
var queueTail = null
var _running = 0
var errorHandler = null
var cache = reusify(Task);
var queueHead = null;
var queueTail = null;
var _running = 0;
var errorHandler = null;
var self = {
push: push,
@ -28,19 +28,21 @@ function fastqueue (context, worker, _concurrency) {
pause: pause,
paused: false,
get concurrency () {
return _concurrency
get concurrency() {
return _concurrency;
},
set concurrency (value) {
set concurrency(value) {
if (!(value >= 1)) {
throw new Error('fastqueue concurrency must be equal to or greater than 1')
throw new Error(
'fastqueue concurrency must be equal to or greater than 1'
);
}
_concurrency = value
_concurrency = value;
if (self.paused) return
for (; queueHead && _running < _concurrency;) {
_running++
release()
if (self.paused) return;
for (; queueHead && _running < _concurrency; ) {
_running++;
release();
}
},
@ -53,259 +55,258 @@ function fastqueue (context, worker, _concurrency) {
empty: noop,
kill: kill,
killAndDrain: killAndDrain,
error: error
error: error,
};
return self;
function running() {
return _running;
}
return self
function running () {
return _running
function pause() {
self.paused = true;
}
function pause () {
self.paused = true
}
function length () {
var current = queueHead
var counter = 0
function length() {
var current = queueHead;
var counter = 0;
while (current) {
current = current.next
counter++
current = current.next;
counter++;
}
return counter
return counter;
}
function getQueue () {
var current = queueHead
var tasks = []
function getQueue() {
var current = queueHead;
var tasks = [];
while (current) {
tasks.push(current.value)
current = current.next
tasks.push(current.value);
current = current.next;
}
return tasks
return tasks;
}
function resume () {
if (!self.paused) return
self.paused = false
function resume() {
if (!self.paused) return;
self.paused = false;
if (queueHead === null) {
_running++
release()
return
_running++;
release();
return;
}
for (; queueHead && _running < _concurrency;) {
_running++
release()
for (; queueHead && _running < _concurrency; ) {
_running++;
release();
}
}
function idle () {
return _running === 0 && self.length() === 0
function idle() {
return _running === 0 && self.length() === 0;
}
function push (value, done) {
var current = cache.get()
function push(value, done) {
var current = cache.get();
current.context = context
current.release = release
current.value = value
current.callback = done || noop
current.errorHandler = errorHandler
current.context = context;
current.release = release;
current.value = value;
current.callback = done || noop;
current.errorHandler = errorHandler;
if (_running >= _concurrency || self.paused) {
if (queueTail) {
queueTail.next = current
queueTail = current
queueTail.next = current;
queueTail = current;
} else {
queueHead = current
queueTail = current
self.saturated()
queueHead = current;
queueTail = current;
self.saturated();
}
} else {
_running++
worker.call(context, current.value, current.worked)
_running++;
worker.call(context, current.value, current.worked);
}
}
function unshift (value, done) {
var current = cache.get()
function unshift(value, done) {
var current = cache.get();
current.context = context
current.release = release
current.value = value
current.callback = done || noop
current.errorHandler = errorHandler
current.context = context;
current.release = release;
current.value = value;
current.callback = done || noop;
current.errorHandler = errorHandler;
if (_running >= _concurrency || self.paused) {
if (queueHead) {
current.next = queueHead
queueHead = current
current.next = queueHead;
queueHead = current;
} else {
queueHead = current
queueTail = current
self.saturated()
queueHead = current;
queueTail = current;
self.saturated();
}
} else {
_running++
worker.call(context, current.value, current.worked)
_running++;
worker.call(context, current.value, current.worked);
}
}
function release (holder) {
function release(holder) {
if (holder) {
cache.release(holder)
cache.release(holder);
}
var next = queueHead
var next = queueHead;
if (next && _running <= _concurrency) {
if (!self.paused) {
if (queueTail === queueHead) {
queueTail = null
queueTail = null;
}
queueHead = next.next
next.next = null
worker.call(context, next.value, next.worked)
queueHead = next.next;
next.next = null;
worker.call(context, next.value, next.worked);
if (queueTail === null) {
self.empty()
self.empty();
}
} else {
_running--
_running--;
}
} else if (--_running === 0) {
self.drain()
self.drain();
}
}
function kill () {
queueHead = null
queueTail = null
self.drain = noop
function kill() {
queueHead = null;
queueTail = null;
self.drain = noop;
}
function killAndDrain () {
queueHead = null
queueTail = null
self.drain()
self.drain = noop
function killAndDrain() {
queueHead = null;
queueTail = null;
self.drain();
self.drain = noop;
}
function error (handler) {
errorHandler = handler
function error(handler) {
errorHandler = handler;
}
}
function noop () {}
function noop() {}
function Task () {
this.value = null
this.callback = noop
this.next = null
this.release = noop
this.context = null
this.errorHandler = null
function Task() {
this.value = null;
this.callback = noop;
this.next = null;
this.release = noop;
this.context = null;
this.errorHandler = null;
var self = this
var self = this;
this.worked = function worked (err, result) {
var callback = self.callback
var errorHandler = self.errorHandler
var val = self.value
self.value = null
self.callback = noop
this.worked = function worked(err, result) {
var callback = self.callback;
var errorHandler = self.errorHandler;
var val = self.value;
self.value = null;
self.callback = noop;
if (self.errorHandler) {
errorHandler(err, val)
errorHandler(err, val);
}
callback.call(self.context, err, result)
self.release(self)
}
callback.call(self.context, err, result);
self.release(self);
};
}
function queueAsPromised (context, worker, _concurrency) {
function queueAsPromised(context, worker, _concurrency) {
if (typeof context === 'function') {
_concurrency = worker
worker = context
context = null
_concurrency = worker;
worker = context;
context = null;
}
function asyncWrapper (arg, cb) {
worker.call(this, arg)
.then(function (res) {
cb(null, res)
}, cb)
function asyncWrapper(arg, cb) {
worker.call(this, arg).then(function (res) {
cb(null, res);
}, cb);
}
var queue = fastqueue(context, asyncWrapper, _concurrency)
var queue = fastqueue(context, asyncWrapper, _concurrency);
var pushCb = queue.push
var unshiftCb = queue.unshift
var pushCb = queue.push;
var unshiftCb = queue.unshift;
queue.push = push
queue.unshift = unshift
queue.drained = drained
queue.push = push;
queue.unshift = unshift;
queue.drained = drained;
return queue
return queue;
function push (value) {
function push(value) {
var p = new Promise(function (resolve, reject) {
pushCb(value, function (err, result) {
if (err) {
reject(err)
return
reject(err);
return;
}
resolve(result)
})
})
resolve(result);
});
});
// Let's fork the promise chain to
// make the error bubble up to the user but
// not lead to a unhandledRejection
p.catch(noop)
p.catch(noop);
return p
return p;
}
function unshift (value) {
function unshift(value) {
var p = new Promise(function (resolve, reject) {
unshiftCb(value, function (err, result) {
if (err) {
reject(err)
return
reject(err);
return;
}
resolve(result)
})
})
resolve(result);
});
});
// Let's fork the promise chain to
// make the error bubble up to the user but
// not lead to a unhandledRejection
p.catch(noop)
p.catch(noop);
return p
return p;
}
function drained () {
function drained() {
var p = new Promise(function (resolve) {
process.nextTick(function () {
if (queue.idle()) {
resolve()
resolve();
} else {
var previousDrain = queue.drain
var previousDrain = queue.drain;
queue.drain = function () {
if (typeof previousDrain === 'function') previousDrain()
resolve()
queue.drain = previousDrain
}
if (typeof previousDrain === 'function') previousDrain();
resolve();
queue.drain = previousDrain;
};
}
})
})
});
});
return p
return p;
}
}
module.exports = fastqueue
module.exports.promise = queueAsPromised
module.exports = fastqueue;
module.exports.promise = queueAsPromised;