chore: add pkg dependencies
This commit is contained in:
19
node_modules/pkg/lib-es5/bin.js
generated
vendored
Normal file
19
node_modules/pkg/lib-es5/bin.js
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env node
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const index_1 = require("./index");
|
||||
const log_1 = require("./log");
|
||||
async function main() {
|
||||
if (process.env.CHDIR && process.env.CHDIR !== process.cwd()) {
|
||||
// allow to override cwd by CHDIR env var
|
||||
// https://github.com/resin-io/etcher/pull/1713
|
||||
process.chdir(process.env.CHDIR);
|
||||
}
|
||||
await (0, index_1.exec)(process.argv.slice(2));
|
||||
}
|
||||
main().catch((error) => {
|
||||
if (!error.wasReported)
|
||||
log_1.log.error(error);
|
||||
process.exit(2);
|
||||
});
|
||||
//# sourceMappingURL=bin.js.map
|
15
node_modules/pkg/lib-es5/chmod.js
generated
vendored
Normal file
15
node_modules/pkg/lib-es5/chmod.js
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.plusx = void 0;
|
||||
const fs_extra_1 = require("fs-extra");
|
||||
async function plusx(file) {
|
||||
const s = await (0, fs_extra_1.stat)(file);
|
||||
const newMode = s.mode | 64 | 8 | 1;
|
||||
if (s.mode === newMode) {
|
||||
return;
|
||||
}
|
||||
const base8 = newMode.toString(8).slice(-3);
|
||||
await (0, fs_extra_1.chmod)(file, base8);
|
||||
}
|
||||
exports.plusx = plusx;
|
||||
//# sourceMappingURL=chmod.js.map
|
235
node_modules/pkg/lib-es5/common.js
generated
vendored
Normal file
235
node_modules/pkg/lib-es5/common.js
generated
vendored
Normal file
@ -0,0 +1,235 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.toNormalizedRealPath = exports.removeUplevels = exports.stripSnapshot = exports.insideSnapshot = exports.snapshotify = exports.substituteDenominator = exports.retrieveDenominator = exports.isDotNODE = exports.isDotJSON = exports.isDotJS = exports.isPackageJson = exports.normalizePath = exports.isRootPath = exports.ALIAS_AS_RESOLVABLE = exports.ALIAS_AS_RELATIVE = exports.STORE_STAT = exports.STORE_LINKS = exports.STORE_CONTENT = exports.STORE_BLOB = void 0;
|
||||
const assert_1 = __importDefault(require("assert"));
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
exports.STORE_BLOB = 0;
|
||||
exports.STORE_CONTENT = 1;
|
||||
exports.STORE_LINKS = 2;
|
||||
exports.STORE_STAT = 3;
|
||||
exports.ALIAS_AS_RELATIVE = 0; // require("./file.js") // file or directory
|
||||
exports.ALIAS_AS_RESOLVABLE = 1; // require("package")
|
||||
const win32 = process.platform === 'win32';
|
||||
const hasURL = typeof URL !== 'undefined';
|
||||
function uppercaseDriveLetter(f) {
|
||||
if (f.slice(1, 3) !== ':\\')
|
||||
return f;
|
||||
return f[0].toUpperCase() + f.slice(1);
|
||||
}
|
||||
function removeTrailingSlashes(f) {
|
||||
if (f === '/') {
|
||||
return f; // dont remove from "/"
|
||||
}
|
||||
if (f.slice(1) === ':\\') {
|
||||
return f; // dont remove from "D:\"
|
||||
}
|
||||
let last = f.length - 1;
|
||||
while (true) {
|
||||
const char = f.charAt(last);
|
||||
if (char === '\\') {
|
||||
f = f.slice(0, -1);
|
||||
last -= 1;
|
||||
}
|
||||
else if (char === '/') {
|
||||
f = f.slice(0, -1);
|
||||
last -= 1;
|
||||
}
|
||||
else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return f;
|
||||
}
|
||||
const isUrl = (p) => hasURL && p instanceof URL;
|
||||
function pathToString(p, win) {
|
||||
let result;
|
||||
if (Buffer.isBuffer(p)) {
|
||||
result = p.toString();
|
||||
}
|
||||
else if (isUrl(p)) {
|
||||
result = win ? p.pathname.replace(/^\//, '') : p.pathname;
|
||||
}
|
||||
else {
|
||||
result = p;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function isRootPath(p) {
|
||||
let file = pathToString(p, false);
|
||||
if (file === '.') {
|
||||
file = path_1.default.resolve(file);
|
||||
}
|
||||
return path_1.default.dirname(file) === p;
|
||||
}
|
||||
exports.isRootPath = isRootPath;
|
||||
function normalizePath(f) {
|
||||
let file = pathToString(f, win32);
|
||||
if (!/^.:$/.test(file)) {
|
||||
file = path_1.default.normalize(file);
|
||||
} // 'c:' -> 'c:.'
|
||||
if (win32) {
|
||||
file = uppercaseDriveLetter(file);
|
||||
}
|
||||
return removeTrailingSlashes(file);
|
||||
}
|
||||
exports.normalizePath = normalizePath;
|
||||
function isPackageJson(file) {
|
||||
return path_1.default.basename(file) === 'package.json';
|
||||
}
|
||||
exports.isPackageJson = isPackageJson;
|
||||
function isDotJS(file) {
|
||||
return path_1.default.extname(file) === '.js';
|
||||
}
|
||||
exports.isDotJS = isDotJS;
|
||||
function isDotJSON(file) {
|
||||
return path_1.default.extname(file) === '.json';
|
||||
}
|
||||
exports.isDotJSON = isDotJSON;
|
||||
function isDotNODE(file) {
|
||||
return path_1.default.extname(file) === '.node';
|
||||
}
|
||||
exports.isDotNODE = isDotNODE;
|
||||
function replaceSlashes(file, slash) {
|
||||
if (/^.:\\/.test(file)) {
|
||||
if (slash === '/') {
|
||||
return file.slice(2).replace(/\\/g, '/');
|
||||
}
|
||||
}
|
||||
else if (/^\//.test(file)) {
|
||||
if (slash === '\\') {
|
||||
return `C:${file.replace(/\//g, '\\')}`;
|
||||
}
|
||||
}
|
||||
return file;
|
||||
}
|
||||
function injectSnapshot(file) {
|
||||
if (/^.:\\/.test(file)) {
|
||||
// C:\path\to
|
||||
if (file.length === 3) {
|
||||
// C:\
|
||||
file = file.slice(0, -1);
|
||||
}
|
||||
// by convention, on windows we use C:\\snapshot
|
||||
return `C:\\snapshot${file.slice(2)}`;
|
||||
}
|
||||
if (/^\//.test(file)) {
|
||||
// /home/user/project
|
||||
if (file.length === 1) {
|
||||
// /
|
||||
file = file.slice(0, -1);
|
||||
}
|
||||
return `/snapshot${file}`;
|
||||
}
|
||||
return file;
|
||||
}
|
||||
function longestCommonLength(s1, s2) {
|
||||
const length = Math.min(s1.length, s2.length);
|
||||
for (let i = 0; i < length; i += 1) {
|
||||
if (s1.charCodeAt(i) !== s2.charCodeAt(i)) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return length;
|
||||
}
|
||||
function withoutNodeModules(file) {
|
||||
return file.split(`${path_1.default.sep}node_modules${path_1.default.sep}`)[0];
|
||||
}
|
||||
function retrieveDenominator(files) {
|
||||
(0, assert_1.default)(files.length > 0);
|
||||
let s1 = withoutNodeModules(files[0]) + path_1.default.sep;
|
||||
for (let i = 1; i < files.length; i += 1) {
|
||||
const s2 = withoutNodeModules(files[i]) + path_1.default.sep;
|
||||
s1 = s1.slice(0, longestCommonLength(s1, s2));
|
||||
}
|
||||
if (s1 === '') {
|
||||
return win32 ? 2 : 0;
|
||||
}
|
||||
return s1.lastIndexOf(path_1.default.sep);
|
||||
}
|
||||
exports.retrieveDenominator = retrieveDenominator;
|
||||
function substituteDenominator(f, denominator) {
|
||||
const rootLength = win32 ? 2 : 0;
|
||||
return f.slice(0, rootLength) + f.slice(denominator);
|
||||
}
|
||||
exports.substituteDenominator = substituteDenominator;
|
||||
function snapshotify(file, slash) {
|
||||
return injectSnapshot(replaceSlashes(file, slash));
|
||||
}
|
||||
exports.snapshotify = snapshotify;
|
||||
function insideSnapshot(f) {
|
||||
f = pathToString(f, win32);
|
||||
if (typeof f !== 'string') {
|
||||
return false;
|
||||
}
|
||||
if (win32) {
|
||||
const slice112 = f.slice(1, 12);
|
||||
return (slice112 === ':\\snapshot\\' ||
|
||||
slice112 === ':/snapshot\\' ||
|
||||
slice112 === ':\\snapshot/' ||
|
||||
slice112 === ':/snapshot/' ||
|
||||
slice112 === ':\\snapshot' ||
|
||||
slice112 === ':/snapshot');
|
||||
}
|
||||
const slice010 = f.slice(0, 10);
|
||||
return slice010 === '/snapshot/' || slice010 === '/snapshot';
|
||||
}
|
||||
exports.insideSnapshot = insideSnapshot;
|
||||
function stripSnapshot(f) {
|
||||
const file = normalizePath(f);
|
||||
if (/^.:\\snapshot$/.test(file)) {
|
||||
return `${file[0]}:\\**\\`;
|
||||
}
|
||||
if (/^.:\\snapshot\\/.test(file)) {
|
||||
return `${file[0]}:\\**${file.slice(11)}`;
|
||||
}
|
||||
if (/^\/snapshot$/.test(file)) {
|
||||
return '/**/';
|
||||
}
|
||||
if (/^\/snapshot\//.test(file)) {
|
||||
return `/**${file.slice(9)}`;
|
||||
}
|
||||
return f; // not inside
|
||||
}
|
||||
exports.stripSnapshot = stripSnapshot;
|
||||
function removeUplevels(f) {
|
||||
if (win32) {
|
||||
while (true) {
|
||||
if (f.slice(0, 3) === '..\\') {
|
||||
f = f.slice(3);
|
||||
}
|
||||
else if (f === '..') {
|
||||
f = '.';
|
||||
}
|
||||
else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return f;
|
||||
}
|
||||
while (true) {
|
||||
if (f.slice(0, 3) === '../') {
|
||||
f = f.slice(3);
|
||||
}
|
||||
else if (f === '..') {
|
||||
f = '.';
|
||||
}
|
||||
else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return f;
|
||||
}
|
||||
exports.removeUplevels = removeUplevels;
|
||||
function toNormalizedRealPath(requestPath) {
|
||||
const file = normalizePath(requestPath);
|
||||
if (fs_1.default.existsSync(file)) {
|
||||
return fs_1.default.realpathSync(file);
|
||||
}
|
||||
return file;
|
||||
}
|
||||
exports.toNormalizedRealPath = toNormalizedRealPath;
|
||||
//# sourceMappingURL=common.js.map
|
10
node_modules/pkg/lib-es5/compress_type.js
generated
vendored
Normal file
10
node_modules/pkg/lib-es5/compress_type.js
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CompressType = void 0;
|
||||
var CompressType;
|
||||
(function (CompressType) {
|
||||
CompressType[CompressType["None"] = 0] = "None";
|
||||
CompressType[CompressType["GZip"] = 1] = "GZip";
|
||||
CompressType[CompressType["Brotli"] = 2] = "Brotli";
|
||||
})(CompressType = exports.CompressType || (exports.CompressType = {}));
|
||||
//# sourceMappingURL=compress_type.js.map
|
420
node_modules/pkg/lib-es5/detector.js
generated
vendored
Normal file
420
node_modules/pkg/lib-es5/detector.js
generated
vendored
Normal file
@ -0,0 +1,420 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.detect = exports.parse = exports.visitorUseSCWD = exports.visitorMalformed = exports.visitorNonLiteral = exports.visitorSuccessful = void 0;
|
||||
const babelTypes = __importStar(require("@babel/types"));
|
||||
const babel = __importStar(require("@babel/parser"));
|
||||
const generator_1 = __importDefault(require("@babel/generator"));
|
||||
const log_1 = require("./log");
|
||||
const common_1 = require("./common");
|
||||
function isLiteral(node) {
|
||||
if (node == null) {
|
||||
return false;
|
||||
}
|
||||
if (!node.type.endsWith('Literal')) {
|
||||
return false;
|
||||
}
|
||||
if (node.type === 'TemplateLiteral' && node.expressions.length !== 0) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function getLiteralValue(node) {
|
||||
if (node.type === 'TemplateLiteral') {
|
||||
return node.quasis[0].value.raw;
|
||||
}
|
||||
if (node.type === 'NullLiteral') {
|
||||
throw new Error('Unexpected null in require expression');
|
||||
}
|
||||
if (node.type === 'RegExpLiteral') {
|
||||
throw new Error('Unexpected regexp in require expression');
|
||||
}
|
||||
return node.value;
|
||||
}
|
||||
function reconstructSpecifiers(specs) {
|
||||
if (!specs || !specs.length) {
|
||||
return '';
|
||||
}
|
||||
const defaults = [];
|
||||
for (const spec of specs) {
|
||||
if (babelTypes.isImportDefaultSpecifier(spec)) {
|
||||
defaults.push(spec.local.name);
|
||||
}
|
||||
}
|
||||
const nonDefaults = [];
|
||||
for (const spec of specs) {
|
||||
if (babelTypes.isImportSpecifier(spec)) {
|
||||
const importedName = babelTypes.isIdentifier(spec.imported)
|
||||
? spec.imported.name
|
||||
: spec.imported.value;
|
||||
if (spec.local.name === importedName) {
|
||||
nonDefaults.push(spec.local.name);
|
||||
}
|
||||
else {
|
||||
nonDefaults.push(`${importedName} as ${spec.local.name}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (nonDefaults.length) {
|
||||
defaults.push(`{ ${nonDefaults.join(', ')} }`);
|
||||
}
|
||||
return defaults.join(', ');
|
||||
}
|
||||
function reconstruct(node) {
|
||||
let v = (0, generator_1.default)(node, { comments: false }).code.replace(/\n/g, '');
|
||||
let v2;
|
||||
while (true) {
|
||||
v2 = v.replace(/\[ /g, '[').replace(/ \]/g, ']').replace(/ {2}/g, ' ');
|
||||
if (v2 === v) {
|
||||
break;
|
||||
}
|
||||
v = v2;
|
||||
}
|
||||
return v2;
|
||||
}
|
||||
function forge(pattern, was) {
|
||||
return pattern
|
||||
.replace('{c1}', ', ')
|
||||
.replace('{v1}', `"${was.v1}"`)
|
||||
.replace('{c2}', was.v2 ? ', ' : '')
|
||||
.replace('{v2}', was.v2 ? `"${was.v2}"` : '')
|
||||
.replace('{c3}', was.v3 ? ' from ' : '')
|
||||
.replace('{v3}', was.v3 ? was.v3 : '');
|
||||
}
|
||||
function valid2(v2) {
|
||||
return (v2 === undefined ||
|
||||
v2 === null ||
|
||||
v2 === 'must-exclude' ||
|
||||
v2 === 'may-exclude');
|
||||
}
|
||||
function visitorRequireResolve(n) {
|
||||
if (!babelTypes.isCallExpression(n)) {
|
||||
return null;
|
||||
}
|
||||
if (!babelTypes.isMemberExpression(n.callee)) {
|
||||
return null;
|
||||
}
|
||||
const ci = n.callee.object.type === 'Identifier' &&
|
||||
n.callee.object.name === 'require' &&
|
||||
n.callee.property.type === 'Identifier' &&
|
||||
n.callee.property.name === 'resolve';
|
||||
if (!ci) {
|
||||
return null;
|
||||
}
|
||||
if (!n.arguments || !isLiteral(n.arguments[0])) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
v1: getLiteralValue(n.arguments[0]),
|
||||
v2: isLiteral(n.arguments[1]) ? getLiteralValue(n.arguments[1]) : null,
|
||||
};
|
||||
}
|
||||
function visitorRequire(n) {
|
||||
if (!babelTypes.isCallExpression(n)) {
|
||||
return null;
|
||||
}
|
||||
if (!babelTypes.isIdentifier(n.callee)) {
|
||||
return null;
|
||||
}
|
||||
if (n.callee.name !== 'require') {
|
||||
return null;
|
||||
}
|
||||
if (!n.arguments || !isLiteral(n.arguments[0])) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
v1: getLiteralValue(n.arguments[0]),
|
||||
v2: isLiteral(n.arguments[1]) ? getLiteralValue(n.arguments[1]) : null,
|
||||
};
|
||||
}
|
||||
function visitorImport(n) {
|
||||
if (!babelTypes.isImportDeclaration(n)) {
|
||||
return null;
|
||||
}
|
||||
return { v1: n.source.value, v3: reconstructSpecifiers(n.specifiers) };
|
||||
}
|
||||
function visitorPathJoin(n) {
|
||||
if (!babelTypes.isCallExpression(n)) {
|
||||
return null;
|
||||
}
|
||||
if (!babelTypes.isMemberExpression(n.callee)) {
|
||||
return null;
|
||||
}
|
||||
const ci = n.callee.object &&
|
||||
n.callee.object.type === 'Identifier' &&
|
||||
n.callee.object.name === 'path' &&
|
||||
n.callee.property &&
|
||||
n.callee.property.type === 'Identifier' &&
|
||||
n.callee.property.name === 'join';
|
||||
if (!ci) {
|
||||
return null;
|
||||
}
|
||||
const dn = n.arguments[0] &&
|
||||
n.arguments[0].type === 'Identifier' &&
|
||||
n.arguments[0].name === '__dirname';
|
||||
if (!dn) {
|
||||
return null;
|
||||
}
|
||||
const f = n.arguments && isLiteral(n.arguments[1]) && n.arguments.length === 2; // TODO concat them
|
||||
if (!f) {
|
||||
return null;
|
||||
}
|
||||
return { v1: getLiteralValue(n.arguments[1]) };
|
||||
}
|
||||
function visitorSuccessful(node, test = false) {
|
||||
let was = visitorRequireResolve(node);
|
||||
if (was) {
|
||||
if (test) {
|
||||
return forge('require.resolve({v1}{c2}{v2})', was);
|
||||
}
|
||||
if (!valid2(was.v2)) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
alias: was.v1,
|
||||
aliasType: common_1.ALIAS_AS_RESOLVABLE,
|
||||
mustExclude: was.v2 === 'must-exclude',
|
||||
mayExclude: was.v2 === 'may-exclude',
|
||||
};
|
||||
}
|
||||
was = visitorRequire(node);
|
||||
if (was) {
|
||||
if (test) {
|
||||
return forge('require({v1}{c2}{v2})', was);
|
||||
}
|
||||
if (!valid2(was.v2)) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
alias: was.v1,
|
||||
aliasType: common_1.ALIAS_AS_RESOLVABLE,
|
||||
mustExclude: was.v2 === 'must-exclude',
|
||||
mayExclude: was.v2 === 'may-exclude',
|
||||
};
|
||||
}
|
||||
was = visitorImport(node);
|
||||
if (was) {
|
||||
if (test) {
|
||||
return forge('import {v3}{c3}{v1}', was);
|
||||
}
|
||||
return { alias: was.v1, aliasType: common_1.ALIAS_AS_RESOLVABLE };
|
||||
}
|
||||
was = visitorPathJoin(node);
|
||||
if (was) {
|
||||
if (test) {
|
||||
return forge('path.join(__dirname{c1}{v1})', was);
|
||||
}
|
||||
return { alias: was.v1, aliasType: common_1.ALIAS_AS_RELATIVE, mayExclude: false };
|
||||
}
|
||||
return null;
|
||||
}
|
||||
exports.visitorSuccessful = visitorSuccessful;
|
||||
function nonLiteralRequireResolve(n) {
|
||||
if (!babelTypes.isCallExpression(n)) {
|
||||
return null;
|
||||
}
|
||||
if (!babelTypes.isMemberExpression(n.callee)) {
|
||||
return null;
|
||||
}
|
||||
const ci = n.callee.object.type === 'Identifier' &&
|
||||
n.callee.object.name === 'require' &&
|
||||
n.callee.property.type === 'Identifier' &&
|
||||
n.callee.property.name === 'resolve';
|
||||
if (!ci) {
|
||||
return null;
|
||||
}
|
||||
if (isLiteral(n.arguments[0])) {
|
||||
return null;
|
||||
}
|
||||
const m = n.arguments[1];
|
||||
if (!m) {
|
||||
return { v1: reconstruct(n.arguments[0]) };
|
||||
}
|
||||
if (!isLiteral(n.arguments[1])) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
v1: reconstruct(n.arguments[0]),
|
||||
v2: getLiteralValue(n.arguments[1]),
|
||||
};
|
||||
}
|
||||
function nonLiteralRequire(n) {
|
||||
if (!babelTypes.isCallExpression(n)) {
|
||||
return null;
|
||||
}
|
||||
if (!babelTypes.isIdentifier(n.callee)) {
|
||||
return null;
|
||||
}
|
||||
if (n.callee.name !== 'require') {
|
||||
return null;
|
||||
}
|
||||
if (isLiteral(n.arguments[0])) {
|
||||
return null;
|
||||
}
|
||||
const m = n.arguments[1];
|
||||
if (!m) {
|
||||
return { v1: reconstruct(n.arguments[0]) };
|
||||
}
|
||||
if (!isLiteral(n.arguments[1])) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
v1: reconstruct(n.arguments[0]),
|
||||
v2: getLiteralValue(n.arguments[1]),
|
||||
};
|
||||
}
|
||||
function visitorNonLiteral(n) {
|
||||
const was = nonLiteralRequireResolve(n) || nonLiteralRequire(n);
|
||||
if (was) {
|
||||
if (!valid2(was.v2)) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
alias: was.v1,
|
||||
mustExclude: was.v2 === 'must-exclude',
|
||||
mayExclude: was.v2 === 'may-exclude',
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
exports.visitorNonLiteral = visitorNonLiteral;
|
||||
function isRequire(n) {
|
||||
if (!babelTypes.isCallExpression(n)) {
|
||||
return null;
|
||||
}
|
||||
if (!babelTypes.isIdentifier(n.callee)) {
|
||||
return null;
|
||||
}
|
||||
if (n.callee.name !== 'require') {
|
||||
return null;
|
||||
}
|
||||
const f = n.arguments && n.arguments[0];
|
||||
if (!f) {
|
||||
return null;
|
||||
}
|
||||
return { v1: reconstruct(n.arguments[0]) };
|
||||
}
|
||||
function isRequireResolve(n) {
|
||||
if (!babelTypes.isCallExpression(n)) {
|
||||
return null;
|
||||
}
|
||||
if (!babelTypes.isMemberExpression(n.callee)) {
|
||||
return null;
|
||||
}
|
||||
const ci = n.callee.object.type === 'Identifier' &&
|
||||
n.callee.object.name === 'require' &&
|
||||
n.callee.property.type === 'Identifier' &&
|
||||
n.callee.property.name === 'resolve';
|
||||
if (!ci) {
|
||||
return null;
|
||||
}
|
||||
const f = n.type === 'CallExpression' && n.arguments && n.arguments[0];
|
||||
if (!f) {
|
||||
return null;
|
||||
}
|
||||
return { v1: reconstruct(n.arguments[0]) };
|
||||
}
|
||||
function visitorMalformed(n) {
|
||||
const was = isRequireResolve(n) || isRequire(n);
|
||||
if (was) {
|
||||
return { alias: was.v1 };
|
||||
}
|
||||
return null;
|
||||
}
|
||||
exports.visitorMalformed = visitorMalformed;
|
||||
function visitorUseSCWD(n) {
|
||||
if (!babelTypes.isCallExpression(n)) {
|
||||
return null;
|
||||
}
|
||||
if (!babelTypes.isMemberExpression(n.callee)) {
|
||||
return null;
|
||||
}
|
||||
const ci = n.callee.object.type === 'Identifier' &&
|
||||
n.callee.object.name === 'path' &&
|
||||
n.callee.property.type === 'Identifier' &&
|
||||
n.callee.property.name === 'resolve';
|
||||
if (!ci) {
|
||||
return null;
|
||||
}
|
||||
const was = { v1: n.arguments.map(reconstruct).join(', ') };
|
||||
if (was) {
|
||||
return { alias: was.v1 };
|
||||
}
|
||||
return null;
|
||||
}
|
||||
exports.visitorUseSCWD = visitorUseSCWD;
|
||||
function traverse(ast, visitor) {
|
||||
// modified esprima-walk to support
|
||||
// visitor return value and "trying" flag
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const stack = [[ast, false]];
|
||||
for (let i = 0; i < stack.length; i += 1) {
|
||||
const item = stack[i];
|
||||
const [node] = item;
|
||||
if (node) {
|
||||
const trying = item[1] || babelTypes.isTryStatement(node);
|
||||
if (visitor(node, trying)) {
|
||||
for (const key in node) {
|
||||
if (node[key]) {
|
||||
const child = node[key];
|
||||
if (child instanceof Array) {
|
||||
for (let j = 0; j < child.length; j += 1) {
|
||||
stack.push([child[j], trying]);
|
||||
}
|
||||
}
|
||||
else if (child && typeof child.type === 'string') {
|
||||
stack.push([child, trying]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
function parse(body) {
|
||||
return babel.parse(body, {
|
||||
allowImportExportEverywhere: true,
|
||||
allowReturnOutsideFunction: true,
|
||||
});
|
||||
}
|
||||
exports.parse = parse;
|
||||
function detect(body, visitor) {
|
||||
let json;
|
||||
try {
|
||||
json = parse(body);
|
||||
}
|
||||
catch (error) {
|
||||
log_1.log.warn(`Babel parse has failed: ${error.message}`);
|
||||
}
|
||||
if (!json) {
|
||||
return;
|
||||
}
|
||||
traverse(json, visitor);
|
||||
}
|
||||
exports.detect = detect;
|
||||
//# sourceMappingURL=detector.js.map
|
141
node_modules/pkg/lib-es5/fabricator.js
generated
vendored
Normal file
141
node_modules/pkg/lib-es5/fabricator.js
generated
vendored
Normal file
@ -0,0 +1,141 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.shutdown = exports.fabricateTwice = exports.fabricate = void 0;
|
||||
const child_process_1 = require("child_process");
|
||||
const log_1 = require("./log");
|
||||
const script = `
|
||||
var vm = require('vm');
|
||||
var module = require('module');
|
||||
var stdin = Buffer.alloc(0);
|
||||
process.stdin.on('data', function (data) {
|
||||
stdin = Buffer.concat([ stdin, data ]);
|
||||
if (stdin.length >= 4) {
|
||||
var sizeOfSnap = stdin.readInt32LE(0);
|
||||
if (stdin.length >= 4 + sizeOfSnap + 4) {
|
||||
var sizeOfBody = stdin.readInt32LE(4 + sizeOfSnap);
|
||||
if (stdin.length >= 4 + sizeOfSnap + 4 + sizeOfBody) {
|
||||
var snap = stdin.toString('utf8', 4, 4 + sizeOfSnap);
|
||||
var body = Buffer.alloc(sizeOfBody);
|
||||
var startOfBody = 4 + sizeOfSnap + 4;
|
||||
stdin.copy(body, 0, startOfBody, startOfBody + sizeOfBody);
|
||||
stdin = Buffer.alloc(0);
|
||||
var code = module.wrap(body);
|
||||
var s = new vm.Script(code, {
|
||||
filename: snap,
|
||||
produceCachedData: true,
|
||||
sourceless: true
|
||||
});
|
||||
if (!s.cachedDataProduced) {
|
||||
console.error('Pkg: Cached data not produced.');
|
||||
process.exit(2);
|
||||
}
|
||||
var h = Buffer.alloc(4);
|
||||
var b = s.cachedData;
|
||||
h.writeInt32LE(b.length, 0);
|
||||
process.stdout.write(h);
|
||||
process.stdout.write(b);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
process.stdin.resume();
|
||||
`;
|
||||
const children = {};
|
||||
function fabricate(bakes, fabricator, snap, body, cb) {
|
||||
const activeBakes = bakes.filter((bake) => {
|
||||
// list of bakes that don't influence the bytecode
|
||||
const bake2 = bake.replace(/_/g, '-');
|
||||
return !['--prof', '--v8-options', '--trace-opt', '--trace-deopt'].includes(bake2);
|
||||
});
|
||||
const cmd = fabricator.binaryPath;
|
||||
const key = JSON.stringify([cmd, activeBakes]);
|
||||
let child = children[key];
|
||||
if (!child) {
|
||||
const stderr = log_1.log.debugMode ? process.stdout : 'ignore';
|
||||
children[key] = (0, child_process_1.spawn)(cmd, activeBakes.concat('-e', script), {
|
||||
stdio: ['pipe', 'pipe', stderr],
|
||||
env: { PKG_EXECPATH: 'PKG_INVOKE_NODEJS' },
|
||||
});
|
||||
child = children[key];
|
||||
}
|
||||
function kill() {
|
||||
delete children[key];
|
||||
child.kill();
|
||||
}
|
||||
let stdout = Buffer.alloc(0);
|
||||
function onError(error) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||
removeListeners();
|
||||
kill();
|
||||
cb(new Error(`Failed to make bytecode ${fabricator.nodeRange}-${fabricator.arch} for file ${snap} error (${error.message})`));
|
||||
}
|
||||
function onClose(code) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||
removeListeners();
|
||||
kill();
|
||||
if (code !== 0) {
|
||||
return cb(new Error(`Failed to make bytecode ${fabricator.nodeRange}-${fabricator.arch} for file ${snap}`));
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(stdout.toString());
|
||||
return cb(new Error(`${cmd} closed unexpectedly`));
|
||||
}
|
||||
function onData(data) {
|
||||
stdout = Buffer.concat([stdout, data]);
|
||||
if (stdout.length >= 4) {
|
||||
const sizeOfBlob = stdout.readInt32LE(0);
|
||||
if (stdout.length >= 4 + sizeOfBlob) {
|
||||
const blob = Buffer.alloc(sizeOfBlob);
|
||||
stdout.copy(blob, 0, 4, 4 + sizeOfBlob);
|
||||
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||
removeListeners();
|
||||
return cb(undefined, blob);
|
||||
}
|
||||
}
|
||||
}
|
||||
function removeListeners() {
|
||||
child.removeListener('error', onError);
|
||||
child.removeListener('close', onClose);
|
||||
child.stdin.removeListener('error', onError);
|
||||
child.stdout.removeListener('error', onError);
|
||||
child.stdout.removeListener('data', onData);
|
||||
}
|
||||
child.on('error', onError);
|
||||
child.on('close', onClose);
|
||||
child.stdin.on('error', onError);
|
||||
child.stdout.on('error', onError);
|
||||
child.stdout.on('data', onData);
|
||||
const h = Buffer.alloc(4);
|
||||
let b = Buffer.from(snap);
|
||||
h.writeInt32LE(b.length, 0);
|
||||
child.stdin.write(h);
|
||||
child.stdin.write(b);
|
||||
b = body;
|
||||
h.writeInt32LE(b.length, 0);
|
||||
child.stdin.write(h);
|
||||
child.stdin.write(b);
|
||||
}
|
||||
exports.fabricate = fabricate;
|
||||
function fabricateTwice(bakes, fabricator, snap, body, cb) {
|
||||
fabricate(bakes, fabricator, snap, body, (error, buffer) => {
|
||||
// node0 can not produce second time, even if first time produced fine,
|
||||
// probably because of 'filename' cache. also, there are weird cases
|
||||
// when node4 can not compile as well, for example file 'lib/js-yaml/dumper.js'
|
||||
// of package js-yaml@3.9.0 does not get bytecode second time on node4-win-x64
|
||||
if (error)
|
||||
return fabricate(bakes, fabricator, snap, body, cb);
|
||||
cb(undefined, buffer);
|
||||
});
|
||||
}
|
||||
exports.fabricateTwice = fabricateTwice;
|
||||
function shutdown() {
|
||||
for (const key in children) {
|
||||
if (children[key]) {
|
||||
const child = children[key];
|
||||
delete children[key];
|
||||
child.kill();
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.shutdown = shutdown;
|
||||
//# sourceMappingURL=fabricator.js.map
|
91
node_modules/pkg/lib-es5/follow.js
generated
vendored
Normal file
91
node_modules/pkg/lib-es5/follow.js
generated
vendored
Normal file
@ -0,0 +1,91 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.follow = void 0;
|
||||
const resolve_1 = require("resolve");
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const common_1 = require("./common");
|
||||
const PROOF = 'a-proof-that-main-is-captured.js';
|
||||
function parentDirectoriesContain(parent, directory) {
|
||||
let currentParent = parent;
|
||||
while (true) {
|
||||
if (currentParent === directory) {
|
||||
return true;
|
||||
}
|
||||
const newParent = path_1.default.dirname(currentParent);
|
||||
if (newParent === currentParent) {
|
||||
return false;
|
||||
}
|
||||
currentParent = newParent;
|
||||
}
|
||||
}
|
||||
function follow(x, opts) {
|
||||
// TODO async version
|
||||
return new Promise((resolve) => {
|
||||
resolve((0, resolve_1.sync)(x, {
|
||||
basedir: opts.basedir,
|
||||
extensions: opts.extensions,
|
||||
isFile: (file) => {
|
||||
if (opts.ignoreFile &&
|
||||
path_1.default.join(path_1.default.dirname(opts.ignoreFile), PROOF) === file) {
|
||||
return true;
|
||||
}
|
||||
let stat;
|
||||
try {
|
||||
stat = fs_1.default.statSync(file);
|
||||
}
|
||||
catch (e) {
|
||||
const ex = e;
|
||||
if (ex && (ex.code === 'ENOENT' || ex.code === 'ENOTDIR'))
|
||||
return false;
|
||||
throw ex;
|
||||
}
|
||||
return stat.isFile() || stat.isFIFO();
|
||||
},
|
||||
isDirectory: (directory) => {
|
||||
if (opts.ignoreFile &&
|
||||
parentDirectoriesContain(opts.ignoreFile, directory)) {
|
||||
return false;
|
||||
}
|
||||
let stat;
|
||||
try {
|
||||
stat = fs_1.default.statSync(directory);
|
||||
}
|
||||
catch (e) {
|
||||
const ex = e;
|
||||
if (ex && (ex.code === 'ENOENT' || ex.code === 'ENOTDIR')) {
|
||||
return false;
|
||||
}
|
||||
throw ex;
|
||||
}
|
||||
return stat.isDirectory();
|
||||
},
|
||||
readFileSync: (file) => {
|
||||
if (opts.ignoreFile && opts.ignoreFile === file) {
|
||||
return Buffer.from(`{"main":"${PROOF}"}`);
|
||||
}
|
||||
if (opts.catchReadFile) {
|
||||
opts.catchReadFile(file);
|
||||
}
|
||||
return fs_1.default.readFileSync(file);
|
||||
},
|
||||
packageFilter: (config, base, dir) => {
|
||||
if (opts.catchPackageFilter) {
|
||||
opts.catchPackageFilter(config, base, dir);
|
||||
}
|
||||
return config;
|
||||
},
|
||||
/** function to synchronously resolve a potential symlink to its real path */
|
||||
// realpathSync?: (file: string) => string;
|
||||
realpathSync: (file) => {
|
||||
const file2 = (0, common_1.toNormalizedRealPath)(file);
|
||||
return file2;
|
||||
},
|
||||
}));
|
||||
});
|
||||
}
|
||||
exports.follow = follow;
|
||||
//# sourceMappingURL=follow.js.map
|
53
node_modules/pkg/lib-es5/help.js
generated
vendored
Normal file
53
node_modules/pkg/lib-es5/help.js
generated
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const chalk_1 = __importDefault(require("chalk"));
|
||||
function help() {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`
|
||||
${chalk_1.default.bold('pkg')} [options] <input>
|
||||
|
||||
${chalk_1.default.dim('Options:')}
|
||||
|
||||
-h, --help output usage information
|
||||
-v, --version output pkg version
|
||||
-t, --targets comma-separated list of targets (see examples)
|
||||
-c, --config package.json or any json file with top-level config
|
||||
--options bake v8 options into executable to run with them on
|
||||
-o, --output output file name or template for several files
|
||||
--out-path path to save output one or more executables
|
||||
-d, --debug show more information during packaging process [off]
|
||||
-b, --build don't download prebuilt base binaries, build them
|
||||
--public speed up and disclose the sources of top-level project
|
||||
--public-packages force specified packages to be considered public
|
||||
--no-bytecode skip bytecode generation and include source files as plain js
|
||||
--no-native-build skip native addons build
|
||||
--no-dict comma-separated list of packages names to ignore dictionaries. Use --no-dict * to disable all dictionaries
|
||||
-C, --compress [default=None] compression algorithm = Brotli or GZip
|
||||
|
||||
${chalk_1.default.dim('Examples:')}
|
||||
|
||||
${chalk_1.default.gray('–')} Makes executables for Linux, macOS and Windows
|
||||
${chalk_1.default.cyan('$ pkg index.js')}
|
||||
${chalk_1.default.gray('–')} Takes package.json from cwd and follows 'bin' entry
|
||||
${chalk_1.default.cyan('$ pkg .')}
|
||||
${chalk_1.default.gray('–')} Makes executable for particular target machine
|
||||
${chalk_1.default.cyan('$ pkg -t node14-win-arm64 index.js')}
|
||||
${chalk_1.default.gray('–')} Makes executables for target machines of your choice
|
||||
${chalk_1.default.cyan('$ pkg -t node12-linux,node14-linux,node14-win index.js')}
|
||||
${chalk_1.default.gray('–')} Bakes '--expose-gc' and '--max-heap-size=34' into executable
|
||||
${chalk_1.default.cyan('$ pkg --options "expose-gc,max-heap-size=34" index.js')}
|
||||
${chalk_1.default.gray('–')} Consider packageA and packageB to be public
|
||||
${chalk_1.default.cyan('$ pkg --public-packages "packageA,packageB" index.js')}
|
||||
${chalk_1.default.gray('–')} Consider all packages to be public
|
||||
${chalk_1.default.cyan('$ pkg --public-packages "*" index.js')}
|
||||
${chalk_1.default.gray('–')} Bakes '--expose-gc' into executable
|
||||
${chalk_1.default.cyan('$ pkg --options expose-gc index.js')}
|
||||
${chalk_1.default.gray('–')} reduce size of the data packed inside the executable with GZip
|
||||
${chalk_1.default.cyan('$ pkg --compress GZip index.js')}
|
||||
`);
|
||||
}
|
||||
exports.default = help;
|
||||
//# sourceMappingURL=help.js.map
|
2
node_modules/pkg/lib-es5/index.d.ts
generated
vendored
Normal file
2
node_modules/pkg/lib-es5/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
export declare function exec(argv2: string[]): Promise<void>;
|
||||
//# sourceMappingURL=index.d.ts.map
|
540
node_modules/pkg/lib-es5/index.js
generated
vendored
Normal file
540
node_modules/pkg/lib-es5/index.js
generated
vendored
Normal file
@ -0,0 +1,540 @@
|
||||
"use strict";
|
||||
/* eslint-disable require-atomic-updates */
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.exec = void 0;
|
||||
const assert_1 = __importDefault(require("assert"));
|
||||
const fs_extra_1 = require("fs-extra");
|
||||
const minimist_1 = __importDefault(require("minimist"));
|
||||
const pkg_fetch_1 = require("pkg-fetch");
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const log_1 = require("./log");
|
||||
const help_1 = __importDefault(require("./help"));
|
||||
const common_1 = require("./common");
|
||||
const packer_1 = __importDefault(require("./packer"));
|
||||
const chmod_1 = require("./chmod");
|
||||
const producer_1 = __importDefault(require("./producer"));
|
||||
const refiner_1 = __importDefault(require("./refiner"));
|
||||
const fabricator_1 = require("./fabricator");
|
||||
const walker_1 = __importDefault(require("./walker"));
|
||||
const compress_type_1 = require("./compress_type");
|
||||
const mach_o_1 = require("./mach-o");
|
||||
const { version } = JSON.parse((0, fs_extra_1.readFileSync)(path_1.default.join(__dirname, '../package.json'), 'utf-8'));
|
||||
function isConfiguration(file) {
|
||||
return (0, common_1.isPackageJson)(file) || file.endsWith('.config.json');
|
||||
}
|
||||
// http://www.openwall.com/lists/musl/2012/12/08/4
|
||||
const { hostArch, hostPlatform, isValidNodeRange, knownArchs, knownPlatforms, toFancyArch, toFancyPlatform, } = pkg_fetch_1.system;
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
const hostNodeRange = `node${process.version.match(/^v(\d+)/)[1]}`;
|
||||
function parseTargets(items) {
|
||||
// [ 'node6-macos-x64', 'node6-linux-x64' ]
|
||||
const targets = [];
|
||||
for (const item of items) {
|
||||
const target = {
|
||||
nodeRange: hostNodeRange,
|
||||
platform: hostPlatform,
|
||||
arch: hostArch,
|
||||
};
|
||||
if (item !== 'host') {
|
||||
for (const token of item.split('-')) {
|
||||
if (!token) {
|
||||
continue;
|
||||
}
|
||||
if (isValidNodeRange(token)) {
|
||||
target.nodeRange = token;
|
||||
continue;
|
||||
}
|
||||
const p = toFancyPlatform(token);
|
||||
if (knownPlatforms.indexOf(p) >= 0) {
|
||||
target.platform = p;
|
||||
continue;
|
||||
}
|
||||
const a = toFancyArch(token);
|
||||
if (knownArchs.indexOf(a) >= 0) {
|
||||
target.arch = a;
|
||||
continue;
|
||||
}
|
||||
throw (0, log_1.wasReported)(`Unknown token '${token}' in '${item}'`);
|
||||
}
|
||||
}
|
||||
targets.push(target);
|
||||
}
|
||||
return targets;
|
||||
}
|
||||
function stringifyTarget(target) {
|
||||
const { nodeRange, platform, arch } = target;
|
||||
return `${nodeRange}-${platform}-${arch}`;
|
||||
}
|
||||
function differentParts(targets) {
|
||||
const nodeRanges = {};
|
||||
const platforms = {};
|
||||
const archs = {};
|
||||
for (const target of targets) {
|
||||
nodeRanges[target.nodeRange] = true;
|
||||
platforms[target.platform] = true;
|
||||
archs[target.arch] = true;
|
||||
}
|
||||
const result = {};
|
||||
if (Object.keys(nodeRanges).length > 1) {
|
||||
result.nodeRange = true;
|
||||
}
|
||||
if (Object.keys(platforms).length > 1) {
|
||||
result.platform = true;
|
||||
}
|
||||
if (Object.keys(archs).length > 1) {
|
||||
result.arch = true;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function stringifyTargetForOutput(output, target, different) {
|
||||
const a = [output];
|
||||
if (different.nodeRange) {
|
||||
a.push(target.nodeRange);
|
||||
}
|
||||
if (different.platform) {
|
||||
a.push(target.platform);
|
||||
}
|
||||
if (different.arch) {
|
||||
a.push(target.arch);
|
||||
}
|
||||
return a.join('-');
|
||||
}
|
||||
function fabricatorForTarget({ nodeRange, arch }) {
|
||||
let fabPlatform = hostPlatform;
|
||||
if (hostArch !== arch &&
|
||||
(hostPlatform === 'linux' || hostPlatform === 'alpine')) {
|
||||
// With linuxstatic, it is possible to generate bytecode for different
|
||||
// arch with simple QEMU configuration instead of the entire sysroot.
|
||||
fabPlatform = 'linuxstatic';
|
||||
}
|
||||
return {
|
||||
nodeRange,
|
||||
platform: fabPlatform,
|
||||
arch,
|
||||
};
|
||||
}
|
||||
const dryRunResults = {};
|
||||
async function needWithDryRun({ forceBuild, nodeRange, platform, arch, }) {
|
||||
const result = await (0, pkg_fetch_1.need)({
|
||||
dryRun: true,
|
||||
forceBuild,
|
||||
nodeRange,
|
||||
platform,
|
||||
arch,
|
||||
});
|
||||
(0, assert_1.default)(['exists', 'fetched', 'built'].indexOf(result) >= 0);
|
||||
dryRunResults[result] = true;
|
||||
}
|
||||
const targetsCache = {};
|
||||
async function needViaCache(target) {
|
||||
const s = stringifyTarget(target);
|
||||
let c = targetsCache[s];
|
||||
if (c) {
|
||||
return c;
|
||||
}
|
||||
const { forceBuild, nodeRange, platform, arch } = target;
|
||||
c = await (0, pkg_fetch_1.need)({
|
||||
forceBuild,
|
||||
nodeRange,
|
||||
platform,
|
||||
arch,
|
||||
});
|
||||
targetsCache[s] = c;
|
||||
return c;
|
||||
}
|
||||
async function exec(argv2) {
|
||||
var _a, _b;
|
||||
const argv = (0, minimist_1.default)(argv2, {
|
||||
boolean: [
|
||||
'b',
|
||||
'build',
|
||||
'bytecode',
|
||||
'native-build',
|
||||
'd',
|
||||
'debug',
|
||||
'h',
|
||||
'help',
|
||||
'public',
|
||||
'v',
|
||||
'version',
|
||||
],
|
||||
string: [
|
||||
'_',
|
||||
'c',
|
||||
'config',
|
||||
'o',
|
||||
'options',
|
||||
'output',
|
||||
'outdir',
|
||||
'out-dir',
|
||||
'out-path',
|
||||
'public-packages',
|
||||
'no-dict',
|
||||
't',
|
||||
'target',
|
||||
'targets',
|
||||
'C',
|
||||
'compress',
|
||||
],
|
||||
default: { bytecode: true, 'native-build': true },
|
||||
});
|
||||
if (argv.h || argv.help) {
|
||||
(0, help_1.default)();
|
||||
return;
|
||||
}
|
||||
// version
|
||||
if (argv.v || argv.version) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(version);
|
||||
return;
|
||||
}
|
||||
log_1.log.info(`pkg@${version}`);
|
||||
// debug
|
||||
log_1.log.debugMode = argv.d || argv.debug;
|
||||
// forceBuild
|
||||
const forceBuild = argv.b || argv.build;
|
||||
// doCompress
|
||||
const algo = argv.C || argv.compress || 'None';
|
||||
let doCompress = compress_type_1.CompressType.None;
|
||||
switch (algo.toLowerCase()) {
|
||||
case 'brotli':
|
||||
case 'br':
|
||||
doCompress = compress_type_1.CompressType.Brotli;
|
||||
break;
|
||||
case 'gzip':
|
||||
case 'gz':
|
||||
doCompress = compress_type_1.CompressType.GZip;
|
||||
break;
|
||||
case 'none':
|
||||
break;
|
||||
default:
|
||||
throw (0, log_1.wasReported)(`Invalid compression algorithm ${algo} ( should be None, Brotli or Gzip)`);
|
||||
}
|
||||
if (doCompress !== compress_type_1.CompressType.None) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log('compression: ', compress_type_1.CompressType[doCompress]);
|
||||
}
|
||||
// _
|
||||
if (!argv._.length) {
|
||||
throw (0, log_1.wasReported)('Entry file/directory is expected', [
|
||||
'Pass --help to see usage information',
|
||||
]);
|
||||
}
|
||||
if (argv._.length > 1) {
|
||||
throw (0, log_1.wasReported)('Not more than one entry file/directory is expected');
|
||||
}
|
||||
// input
|
||||
let input = path_1.default.resolve(argv._[0]);
|
||||
if (!(0, fs_extra_1.existsSync)(input)) {
|
||||
throw (0, log_1.wasReported)('Input file does not exist', [input]);
|
||||
}
|
||||
if ((await (0, fs_extra_1.stat)(input)).isDirectory()) {
|
||||
input = path_1.default.join(input, 'package.json');
|
||||
if (!(0, fs_extra_1.existsSync)(input)) {
|
||||
throw (0, log_1.wasReported)('Input file does not exist', [input]);
|
||||
}
|
||||
}
|
||||
// inputJson
|
||||
let inputJson;
|
||||
let inputJsonName;
|
||||
if (isConfiguration(input)) {
|
||||
inputJson = JSON.parse(await (0, fs_extra_1.readFile)(input, 'utf-8'));
|
||||
inputJsonName = inputJson.name;
|
||||
if (inputJsonName) {
|
||||
inputJsonName = inputJsonName.split('/').pop(); // @org/foo
|
||||
}
|
||||
}
|
||||
// inputBin
|
||||
let inputBin;
|
||||
if (inputJson) {
|
||||
let { bin } = inputJson;
|
||||
if (bin) {
|
||||
if (typeof bin === 'object') {
|
||||
if (bin[inputJsonName]) {
|
||||
bin = bin[inputJsonName];
|
||||
}
|
||||
else {
|
||||
bin = bin[Object.keys(bin)[0]]; // TODO multiple inputs to pkg them all?
|
||||
}
|
||||
}
|
||||
inputBin = path_1.default.resolve(path_1.default.dirname(input), bin);
|
||||
if (!(0, fs_extra_1.existsSync)(inputBin)) {
|
||||
throw (0, log_1.wasReported)('Bin file does not exist (taken from package.json ' +
|
||||
"'bin' property)", [inputBin]);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (inputJson && !inputBin) {
|
||||
throw (0, log_1.wasReported)("Property 'bin' does not exist in", [input]);
|
||||
}
|
||||
// inputFin
|
||||
const inputFin = inputBin || input;
|
||||
// config
|
||||
let config = argv.c || argv.config;
|
||||
if (inputJson && config) {
|
||||
throw (0, log_1.wasReported)("Specify either 'package.json' or config. Not both");
|
||||
}
|
||||
// configJson
|
||||
let configJson;
|
||||
if (config) {
|
||||
config = path_1.default.resolve(config);
|
||||
if (!(0, fs_extra_1.existsSync)(config)) {
|
||||
throw (0, log_1.wasReported)('Config file does not exist', [config]);
|
||||
}
|
||||
// eslint-disable-next-line import/no-dynamic-require, global-require
|
||||
configJson = require(config); // may be either json or js
|
||||
if (!configJson.name &&
|
||||
!configJson.files &&
|
||||
!configJson.dependencies &&
|
||||
!configJson.pkg) {
|
||||
// package.json not detected
|
||||
configJson = { pkg: configJson };
|
||||
}
|
||||
}
|
||||
// output, outputPath
|
||||
let output = argv.o || argv.output;
|
||||
let outputPath = argv['out-path'] || argv.outdir || argv['out-dir'];
|
||||
let autoOutput = false;
|
||||
if (output && outputPath) {
|
||||
throw (0, log_1.wasReported)("Specify either 'output' or 'out-path'. Not both");
|
||||
}
|
||||
if (!output) {
|
||||
let name;
|
||||
if (inputJson) {
|
||||
name = inputJsonName;
|
||||
if (!name) {
|
||||
throw (0, log_1.wasReported)("Property 'name' does not exist in", [argv._[0]]);
|
||||
}
|
||||
}
|
||||
else if (configJson) {
|
||||
name = configJson.name;
|
||||
}
|
||||
if (!name) {
|
||||
name = path_1.default.basename(inputFin);
|
||||
}
|
||||
if (!outputPath) {
|
||||
if (inputJson && inputJson.pkg) {
|
||||
outputPath = inputJson.pkg.outputPath;
|
||||
}
|
||||
else if (configJson && configJson.pkg) {
|
||||
outputPath = configJson.pkg.outputPath;
|
||||
}
|
||||
outputPath = outputPath || '';
|
||||
}
|
||||
autoOutput = true;
|
||||
const ext = path_1.default.extname(name);
|
||||
output = name.slice(0, -ext.length || undefined);
|
||||
output = path_1.default.resolve(outputPath || '', output);
|
||||
}
|
||||
else {
|
||||
output = path_1.default.resolve(output);
|
||||
}
|
||||
// targets
|
||||
const sTargets = argv.t || argv.target || argv.targets || '';
|
||||
if (typeof sTargets !== 'string') {
|
||||
throw (0, log_1.wasReported)(`Something is wrong near ${JSON.stringify(sTargets)}`);
|
||||
}
|
||||
let targets = parseTargets(sTargets.split(',').filter((t) => t));
|
||||
if (!targets.length) {
|
||||
let jsonTargets;
|
||||
if (inputJson && inputJson.pkg) {
|
||||
jsonTargets = inputJson.pkg.targets;
|
||||
}
|
||||
else if (configJson && configJson.pkg) {
|
||||
jsonTargets = configJson.pkg.targets;
|
||||
}
|
||||
if (jsonTargets) {
|
||||
targets = parseTargets(jsonTargets);
|
||||
}
|
||||
}
|
||||
if (!targets.length) {
|
||||
if (!autoOutput) {
|
||||
targets = parseTargets(['host']);
|
||||
(0, assert_1.default)(targets.length === 1);
|
||||
}
|
||||
else {
|
||||
targets = parseTargets(['linux', 'macos', 'win']);
|
||||
}
|
||||
log_1.log.info('Targets not specified. Assuming:', `${targets.map((t) => stringifyTarget(t)).join(', ')}`);
|
||||
}
|
||||
// differentParts
|
||||
const different = differentParts(targets);
|
||||
// targets[].output
|
||||
for (const target of targets) {
|
||||
let file;
|
||||
if (targets.length === 1) {
|
||||
file = output;
|
||||
}
|
||||
else {
|
||||
file = stringifyTargetForOutput(output, target, different);
|
||||
}
|
||||
if (target.platform === 'win' && path_1.default.extname(file) !== '.exe') {
|
||||
file += '.exe';
|
||||
}
|
||||
target.output = file;
|
||||
}
|
||||
// bakes
|
||||
const bakes = (argv.options || '')
|
||||
.split(',')
|
||||
.filter((bake) => bake)
|
||||
.map((bake) => `--${bake}`);
|
||||
// check if input is going
|
||||
// to be overwritten by output
|
||||
for (const target of targets) {
|
||||
if (target.output === inputFin) {
|
||||
if (autoOutput) {
|
||||
target.output += `-${target.platform}`;
|
||||
}
|
||||
else {
|
||||
throw (0, log_1.wasReported)('Refusing to overwrite input file', [inputFin]);
|
||||
}
|
||||
}
|
||||
}
|
||||
// fetch targets
|
||||
const { bytecode } = argv;
|
||||
const nativeBuild = argv['native-build'];
|
||||
for (const target of targets) {
|
||||
target.forceBuild = forceBuild;
|
||||
await needWithDryRun(target);
|
||||
target.fabricator = fabricatorForTarget(target);
|
||||
if (bytecode) {
|
||||
await needWithDryRun(Object.assign(Object.assign({}, target.fabricator), { forceBuild }));
|
||||
}
|
||||
}
|
||||
if (dryRunResults.fetched && !dryRunResults.built) {
|
||||
log_1.log.info('Fetching base Node.js binaries to PKG_CACHE_PATH');
|
||||
}
|
||||
for (const target of targets) {
|
||||
target.binaryPath = await needViaCache(target);
|
||||
const f = target.fabricator;
|
||||
if (f && bytecode) {
|
||||
f.binaryPath = await needViaCache(f);
|
||||
if (f.platform === 'macos') {
|
||||
// ad-hoc sign the base binary temporarily to generate bytecode
|
||||
// due to the new mandatory signing requirement
|
||||
const signedBinaryPath = `${f.binaryPath}-signed`;
|
||||
await (0, fs_extra_1.remove)(signedBinaryPath);
|
||||
(0, fs_extra_1.copyFileSync)(f.binaryPath, signedBinaryPath);
|
||||
try {
|
||||
(0, mach_o_1.signMachOExecutable)(signedBinaryPath);
|
||||
}
|
||||
catch (_c) {
|
||||
throw (0, log_1.wasReported)('Cannot generate bytecode', [
|
||||
'pkg fails to run "codesign" utility. Due to the mandatory signing',
|
||||
'requirement of macOS, executables must be signed. Please ensure the',
|
||||
'utility is installed and properly configured.',
|
||||
]);
|
||||
}
|
||||
f.binaryPath = signedBinaryPath;
|
||||
}
|
||||
if (f.platform !== 'win') {
|
||||
await (0, chmod_1.plusx)(f.binaryPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
// marker
|
||||
let marker;
|
||||
if (configJson) {
|
||||
marker = {
|
||||
config: configJson,
|
||||
base: path_1.default.dirname(config),
|
||||
configPath: config,
|
||||
};
|
||||
}
|
||||
else {
|
||||
marker = {
|
||||
config: inputJson || {},
|
||||
base: path_1.default.dirname(input),
|
||||
configPath: input,
|
||||
};
|
||||
}
|
||||
marker.toplevel = true;
|
||||
// public
|
||||
const params = {};
|
||||
if (argv.public) {
|
||||
params.publicToplevel = true;
|
||||
}
|
||||
if (argv['public-packages']) {
|
||||
params.publicPackages = argv['public-packages'].split(',');
|
||||
if (((_a = params.publicPackages) === null || _a === void 0 ? void 0 : _a.indexOf('*')) !== -1) {
|
||||
params.publicPackages = ['*'];
|
||||
}
|
||||
}
|
||||
if (argv['no-dict']) {
|
||||
params.noDictionary = argv['no-dict'].split(',');
|
||||
if (((_b = params.noDictionary) === null || _b === void 0 ? void 0 : _b.indexOf('*')) !== -1) {
|
||||
params.noDictionary = ['*'];
|
||||
}
|
||||
}
|
||||
// records
|
||||
let records;
|
||||
let entrypoint = inputFin;
|
||||
let symLinks;
|
||||
const addition = isConfiguration(input) ? input : undefined;
|
||||
const walkResult = await (0, walker_1.default)(marker, entrypoint, addition, params);
|
||||
entrypoint = walkResult.entrypoint;
|
||||
records = walkResult.records;
|
||||
symLinks = walkResult.symLinks;
|
||||
const refineResult = (0, refiner_1.default)(records, entrypoint, symLinks);
|
||||
entrypoint = refineResult.entrypoint;
|
||||
records = refineResult.records;
|
||||
symLinks = refineResult.symLinks;
|
||||
const backpack = (0, packer_1.default)({ records, entrypoint, bytecode, symLinks });
|
||||
log_1.log.debug('Targets:', JSON.stringify(targets, null, 2));
|
||||
for (const target of targets) {
|
||||
if (target.output && (0, fs_extra_1.existsSync)(target.output)) {
|
||||
if ((await (0, fs_extra_1.stat)(target.output)).isFile()) {
|
||||
await (0, fs_extra_1.remove)(target.output);
|
||||
}
|
||||
else {
|
||||
throw (0, log_1.wasReported)('Refusing to overwrite non-file output', [
|
||||
target.output,
|
||||
]);
|
||||
}
|
||||
}
|
||||
else if (target.output) {
|
||||
await (0, fs_extra_1.mkdirp)(path_1.default.dirname(target.output));
|
||||
}
|
||||
await (0, producer_1.default)({
|
||||
backpack,
|
||||
bakes,
|
||||
slash: target.platform === 'win' ? '\\' : '/',
|
||||
target: target,
|
||||
symLinks,
|
||||
doCompress,
|
||||
nativeBuild,
|
||||
});
|
||||
if (target.platform !== 'win' && target.output) {
|
||||
if (target.platform === 'macos') {
|
||||
// patch executable to allow code signing
|
||||
const buf = (0, mach_o_1.patchMachOExecutable)((0, fs_extra_1.readFileSync)(target.output));
|
||||
(0, fs_extra_1.writeFileSync)(target.output, buf);
|
||||
try {
|
||||
// sign executable ad-hoc to workaround the new mandatory signing requirement
|
||||
// users can always replace the signature if necessary
|
||||
(0, mach_o_1.signMachOExecutable)(target.output);
|
||||
}
|
||||
catch (_d) {
|
||||
if (target.arch === 'arm64') {
|
||||
log_1.log.warn('Unable to sign the macOS executable', [
|
||||
'Due to the mandatory code signing requirement, before the',
|
||||
'executable is distributed to end users, it must be signed.',
|
||||
'Otherwise, it will be immediately killed by kernel on launch.',
|
||||
'An ad-hoc signature is sufficient.',
|
||||
'To do that, run pkg on a Mac, or transfer the executable to a Mac',
|
||||
'and run "codesign --sign - <executable>", or (if you use Linux)',
|
||||
'install "ldid" utility to PATH and then run pkg again',
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
await (0, chmod_1.plusx)(target.output);
|
||||
}
|
||||
}
|
||||
(0, fabricator_1.shutdown)();
|
||||
}
|
||||
exports.exec = exec;
|
||||
//# sourceMappingURL=index.js.map
|
7
node_modules/pkg/lib-es5/log.js
generated
vendored
Normal file
7
node_modules/pkg/lib-es5/log.js
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.wasReported = exports.log = void 0;
|
||||
var log_1 = require("pkg-fetch/lib-es5/log");
|
||||
Object.defineProperty(exports, "log", { enumerable: true, get: function () { return log_1.log; } });
|
||||
Object.defineProperty(exports, "wasReported", { enumerable: true, get: function () { return log_1.wasReported; } });
|
||||
//# sourceMappingURL=log.js.map
|
61
node_modules/pkg/lib-es5/mach-o.js
generated
vendored
Normal file
61
node_modules/pkg/lib-es5/mach-o.js
generated
vendored
Normal file
@ -0,0 +1,61 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.signMachOExecutable = exports.patchMachOExecutable = void 0;
|
||||
const child_process_1 = require("child_process");
|
||||
function parseCStr(buf) {
|
||||
for (let i = 0; i < buf.length; i += 1) {
|
||||
if (buf[i] === 0) {
|
||||
return buf.slice(0, i).toString();
|
||||
}
|
||||
}
|
||||
}
|
||||
function patchCommand(type, buf, file) {
|
||||
// segment_64
|
||||
if (type === 0x19) {
|
||||
const name = parseCStr(buf.slice(0, 16));
|
||||
if (name === '__LINKEDIT') {
|
||||
const fileoff = buf.readBigUInt64LE(32);
|
||||
const vmsizePatched = BigInt(file.length) - fileoff;
|
||||
const filesizePatched = vmsizePatched;
|
||||
buf.writeBigUInt64LE(vmsizePatched, 24);
|
||||
buf.writeBigUInt64LE(filesizePatched, 40);
|
||||
}
|
||||
}
|
||||
// symtab
|
||||
if (type === 0x2) {
|
||||
const stroff = buf.readUInt32LE(8);
|
||||
const strsizePatched = file.length - stroff;
|
||||
buf.writeUInt32LE(strsizePatched, 12);
|
||||
}
|
||||
}
|
||||
function patchMachOExecutable(file) {
|
||||
const align = 8;
|
||||
const hsize = 32;
|
||||
const ncmds = file.readUInt32LE(16);
|
||||
const buf = file.slice(hsize);
|
||||
for (let offset = 0, i = 0; i < ncmds; i += 1) {
|
||||
const type = buf.readUInt32LE(offset);
|
||||
offset += 4;
|
||||
const size = buf.readUInt32LE(offset) - 8;
|
||||
offset += 4;
|
||||
patchCommand(type, buf.slice(offset, offset + size), file);
|
||||
offset += size;
|
||||
if (offset & align) {
|
||||
offset += align - (offset & align);
|
||||
}
|
||||
}
|
||||
return file;
|
||||
}
|
||||
exports.patchMachOExecutable = patchMachOExecutable;
|
||||
function signMachOExecutable(executable) {
|
||||
try {
|
||||
(0, child_process_1.execFileSync)('codesign', ['-f', '--sign', '-', executable], {
|
||||
stdio: 'inherit',
|
||||
});
|
||||
}
|
||||
catch (_a) {
|
||||
(0, child_process_1.execFileSync)('ldid', ['-Cadhoc', '-S', executable], { stdio: 'inherit' });
|
||||
}
|
||||
}
|
||||
exports.signMachOExecutable = signMachOExecutable;
|
||||
//# sourceMappingURL=mach-o.js.map
|
131
node_modules/pkg/lib-es5/packer.js
generated
vendored
Normal file
131
node_modules/pkg/lib-es5/packer.js
generated
vendored
Normal file
@ -0,0 +1,131 @@
|
||||
"use strict";
|
||||
/* eslint-disable complexity */
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const assert_1 = __importDefault(require("assert"));
|
||||
const fs_extra_1 = __importDefault(require("fs-extra"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const common_1 = require("./common");
|
||||
const log_1 = require("./log");
|
||||
const { version } = JSON.parse(fs_extra_1.default.readFileSync(path_1.default.join(__dirname, '../package.json'), 'utf-8'));
|
||||
const bootstrapText = fs_extra_1.default
|
||||
.readFileSync(require.resolve('../prelude/bootstrap.js'), 'utf8')
|
||||
.replace('%VERSION%', version);
|
||||
const commonText = fs_extra_1.default.readFileSync(require.resolve('./common'), 'utf8');
|
||||
const diagnosticText = fs_extra_1.default.readFileSync(require.resolve('../prelude/diagnostic.js'), 'utf8');
|
||||
function itemsToText(items) {
|
||||
const len = items.length;
|
||||
return len.toString() + (len % 10 === 1 ? ' item' : ' items');
|
||||
}
|
||||
function hasAnyStore(record) {
|
||||
// discarded records like native addons
|
||||
for (const store of [common_1.STORE_BLOB, common_1.STORE_CONTENT, common_1.STORE_LINKS, common_1.STORE_STAT]) {
|
||||
if (record[store])
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function packer({ records, entrypoint, bytecode, }) {
|
||||
const stripes = [];
|
||||
for (const snap in records) {
|
||||
if (records[snap]) {
|
||||
const record = records[snap];
|
||||
const { file } = record;
|
||||
if (!hasAnyStore(record)) {
|
||||
continue;
|
||||
}
|
||||
(0, assert_1.default)(record[common_1.STORE_STAT], 'packer: no STORE_STAT');
|
||||
(0, assert_1.default)(record[common_1.STORE_BLOB] ||
|
||||
record[common_1.STORE_CONTENT] ||
|
||||
record[common_1.STORE_LINKS] ||
|
||||
record[common_1.STORE_STAT]);
|
||||
if (record[common_1.STORE_BLOB] && !bytecode) {
|
||||
delete record[common_1.STORE_BLOB];
|
||||
if (!record[common_1.STORE_CONTENT]) {
|
||||
// TODO make a test for it?
|
||||
throw (0, log_1.wasReported)('--no-bytecode and no source breaks final executable', [
|
||||
file,
|
||||
'Please run with "-d" and without "--no-bytecode" first, and make',
|
||||
'sure that debug log does not contain "was included as bytecode".',
|
||||
]);
|
||||
}
|
||||
}
|
||||
for (const store of [
|
||||
common_1.STORE_BLOB,
|
||||
common_1.STORE_CONTENT,
|
||||
common_1.STORE_LINKS,
|
||||
common_1.STORE_STAT,
|
||||
]) {
|
||||
const value = record[store];
|
||||
if (!value) {
|
||||
continue;
|
||||
}
|
||||
if (store === common_1.STORE_BLOB || store === common_1.STORE_CONTENT) {
|
||||
if (record.body === undefined) {
|
||||
stripes.push({ snap, store, file });
|
||||
}
|
||||
else if (Buffer.isBuffer(record.body)) {
|
||||
stripes.push({ snap, store, buffer: record.body });
|
||||
}
|
||||
else if (typeof record.body === 'string') {
|
||||
stripes.push({ snap, store, buffer: Buffer.from(record.body) });
|
||||
}
|
||||
else {
|
||||
(0, assert_1.default)(false, 'packer: bad STORE_BLOB/STORE_CONTENT');
|
||||
}
|
||||
}
|
||||
else if (store === common_1.STORE_LINKS) {
|
||||
if (Array.isArray(value)) {
|
||||
const dedupedValue = [...new Set(value)];
|
||||
log_1.log.debug('files & folders deduped = ', dedupedValue);
|
||||
const buffer = Buffer.from(JSON.stringify(dedupedValue));
|
||||
stripes.push({ snap, store, buffer });
|
||||
}
|
||||
else {
|
||||
(0, assert_1.default)(false, 'packer: bad STORE_LINKS');
|
||||
}
|
||||
}
|
||||
else if (store === common_1.STORE_STAT) {
|
||||
if (typeof value === 'object') {
|
||||
const newStat = Object.assign({}, value);
|
||||
const buffer = Buffer.from(JSON.stringify(newStat));
|
||||
stripes.push({ snap, store, buffer });
|
||||
}
|
||||
else {
|
||||
(0, assert_1.default)(false, 'packer: unknown store');
|
||||
}
|
||||
}
|
||||
if (record[common_1.STORE_CONTENT]) {
|
||||
const disclosed = (0, common_1.isDotJS)(file) || (0, common_1.isDotJSON)(file);
|
||||
log_1.log.debug(disclosed
|
||||
? 'The file was included as DISCLOSED code (with sources)'
|
||||
: 'The file was included as asset content', file);
|
||||
}
|
||||
else if (record[common_1.STORE_BLOB]) {
|
||||
log_1.log.debug('The file was included as bytecode (no sources)', file);
|
||||
}
|
||||
else if (record[common_1.STORE_LINKS]) {
|
||||
const link = record[common_1.STORE_LINKS];
|
||||
log_1.log.debug(`The directory files list was included (${itemsToText(link)})`, file);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const prelude = `return (function (REQUIRE_COMMON, VIRTUAL_FILESYSTEM, DEFAULT_ENTRYPOINT, SYMLINKS, DICT, DOCOMPRESS) {
|
||||
${bootstrapText}${log_1.log.debugMode ? diagnosticText : ''}\n})(function (exports) {\n${commonText}\n},\n` +
|
||||
`%VIRTUAL_FILESYSTEM%` +
|
||||
`\n,\n` +
|
||||
`%DEFAULT_ENTRYPOINT%` +
|
||||
`\n,\n` +
|
||||
`%SYMLINKS%` +
|
||||
'\n,\n' +
|
||||
'%DICT%' +
|
||||
'\n,\n' +
|
||||
'%DOCOMPRESS%' +
|
||||
`\n);`;
|
||||
return { prelude, entrypoint, stripes };
|
||||
}
|
||||
exports.default = packer;
|
||||
//# sourceMappingURL=packer.js.map
|
344
node_modules/pkg/lib-es5/producer.js
generated
vendored
Normal file
344
node_modules/pkg/lib-es5/producer.js
generated
vendored
Normal file
@ -0,0 +1,344 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const zlib_1 = require("zlib");
|
||||
const multistream_1 = __importDefault(require("multistream"));
|
||||
const assert_1 = __importDefault(require("assert"));
|
||||
const child_process_1 = require("child_process");
|
||||
const fs_extra_1 = __importDefault(require("fs-extra"));
|
||||
const into_stream_1 = __importDefault(require("into-stream"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const stream_meter_1 = __importDefault(require("stream-meter"));
|
||||
const common_1 = require("./common");
|
||||
const log_1 = require("./log");
|
||||
const fabricator_1 = require("./fabricator");
|
||||
const types_1 = require("./types");
|
||||
const compress_type_1 = require("./compress_type");
|
||||
function discoverPlaceholder(binaryBuffer, searchString, padder) {
|
||||
const placeholder = Buffer.from(searchString);
|
||||
const position = binaryBuffer.indexOf(placeholder);
|
||||
if (position === -1) {
|
||||
return { notFound: true };
|
||||
}
|
||||
return { position, size: placeholder.length, padder };
|
||||
}
|
||||
function injectPlaceholder(fd, placeholder, value, cb) {
|
||||
if ('notFound' in placeholder) {
|
||||
(0, assert_1.default)(false, 'Placeholder for not found');
|
||||
}
|
||||
const { position, size, padder } = placeholder;
|
||||
let stringValue = Buffer.from('');
|
||||
if (typeof value === 'number') {
|
||||
stringValue = Buffer.from(value.toString());
|
||||
}
|
||||
else if (typeof value === 'string') {
|
||||
stringValue = Buffer.from(value);
|
||||
}
|
||||
else {
|
||||
stringValue = value;
|
||||
}
|
||||
const padding = Buffer.from(padder.repeat(size - stringValue.length));
|
||||
stringValue = Buffer.concat([stringValue, padding]);
|
||||
fs_extra_1.default.write(fd, stringValue, 0, stringValue.length, position, cb);
|
||||
}
|
||||
function discoverPlaceholders(binaryBuffer) {
|
||||
return {
|
||||
BAKERY: discoverPlaceholder(binaryBuffer, `\0${'// BAKERY '.repeat(20)}`, '\0'),
|
||||
PAYLOAD_POSITION: discoverPlaceholder(binaryBuffer, '// PAYLOAD_POSITION //', ' '),
|
||||
PAYLOAD_SIZE: discoverPlaceholder(binaryBuffer, '// PAYLOAD_SIZE //', ' '),
|
||||
PRELUDE_POSITION: discoverPlaceholder(binaryBuffer, '// PRELUDE_POSITION //', ' '),
|
||||
PRELUDE_SIZE: discoverPlaceholder(binaryBuffer, '// PRELUDE_SIZE //', ' '),
|
||||
};
|
||||
}
|
||||
function injectPlaceholders(fd, placeholders, values, cb) {
|
||||
injectPlaceholder(fd, placeholders.BAKERY, values.BAKERY, (error) => {
|
||||
if (error) {
|
||||
return cb(error);
|
||||
}
|
||||
injectPlaceholder(fd, placeholders.PAYLOAD_POSITION, values.PAYLOAD_POSITION, (error2) => {
|
||||
if (error2) {
|
||||
return cb(error2);
|
||||
}
|
||||
injectPlaceholder(fd, placeholders.PAYLOAD_SIZE, values.PAYLOAD_SIZE, (error3) => {
|
||||
if (error3) {
|
||||
return cb(error3);
|
||||
}
|
||||
injectPlaceholder(fd, placeholders.PRELUDE_POSITION, values.PRELUDE_POSITION, (error4) => {
|
||||
if (error4) {
|
||||
return cb(error4);
|
||||
}
|
||||
injectPlaceholder(fd, placeholders.PRELUDE_SIZE, values.PRELUDE_SIZE, cb);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
function makeBakeryValueFromBakes(bakes) {
|
||||
const parts = [];
|
||||
if (bakes.length) {
|
||||
for (let i = 0; i < bakes.length; i += 1) {
|
||||
parts.push(Buffer.from(bakes[i]));
|
||||
parts.push(Buffer.alloc(1));
|
||||
}
|
||||
parts.push(Buffer.alloc(1));
|
||||
}
|
||||
return Buffer.concat(parts);
|
||||
}
|
||||
function replaceDollarWise(s, sf, st) {
|
||||
return s.replace(sf, () => st);
|
||||
}
|
||||
function makePreludeBufferFromPrelude(prelude) {
|
||||
return Buffer.from(`(function(process, require, console, EXECPATH_FD, PAYLOAD_POSITION, PAYLOAD_SIZE) { ${prelude}\n})` // dont remove \n
|
||||
);
|
||||
}
|
||||
function findPackageJson(nodeFile) {
|
||||
let dir = nodeFile;
|
||||
while (dir !== '/') {
|
||||
dir = path_1.default.dirname(dir);
|
||||
if (fs_extra_1.default.existsSync(path_1.default.join(dir, 'package.json'))) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (dir === '/') {
|
||||
throw new Error(`package.json not found for "${nodeFile}"`);
|
||||
}
|
||||
return dir;
|
||||
}
|
||||
function nativePrebuildInstall(target, nodeFile) {
|
||||
var _a, _b;
|
||||
const prebuildInstall = path_1.default.join(__dirname, '../node_modules/.bin/prebuild-install');
|
||||
const dir = findPackageJson(nodeFile);
|
||||
// parse the target node version from the binaryPath
|
||||
const nodeVersion = path_1.default.basename(target.binaryPath).split('-')[1];
|
||||
if (!/^v[0-9]+\.[0-9]+\.[0-9]+$/.test(nodeVersion)) {
|
||||
throw new Error(`Couldn't find node version, instead got: ${nodeVersion}`);
|
||||
}
|
||||
const nativeFile = `${nodeFile}.${target.platform}.${nodeVersion}`;
|
||||
if (fs_extra_1.default.existsSync(nativeFile)) {
|
||||
return nativeFile;
|
||||
}
|
||||
// prebuild-install will overwrite the target .node file, so take a backup
|
||||
if (!fs_extra_1.default.existsSync(`${nodeFile}.bak`)) {
|
||||
fs_extra_1.default.copyFileSync(nodeFile, `${nodeFile}.bak`);
|
||||
}
|
||||
const napiVersions = (_b = (_a = JSON.parse(fs_extra_1.default.readFileSync(path_1.default.join(dir, 'package.json'), { encoding: 'utf-8' }))) === null || _a === void 0 ? void 0 : _a.binary) === null || _b === void 0 ? void 0 : _b.napi_versions;
|
||||
const options = [
|
||||
'--platform',
|
||||
types_1.platform[target.platform],
|
||||
'--arch',
|
||||
target.arch,
|
||||
];
|
||||
if (napiVersions == null) {
|
||||
// TODO: consider target node version and supported n-api version
|
||||
options.push('--target', nodeVersion);
|
||||
}
|
||||
// run prebuild
|
||||
(0, child_process_1.execFileSync)(prebuildInstall, options, { cwd: dir });
|
||||
// move the prebuild to a new name with a platform/version extension
|
||||
fs_extra_1.default.copyFileSync(nodeFile, nativeFile);
|
||||
// put the backed up file back
|
||||
fs_extra_1.default.moveSync(`${nodeFile}.bak`, nodeFile, { overwrite: true });
|
||||
return nativeFile;
|
||||
}
|
||||
/**
|
||||
* instead of creating a vfs dicionnary with actual path as key
|
||||
* we use a compression mechanism that can reduce significantly
|
||||
* the memory footprint of the vfs in the code.
|
||||
*
|
||||
* without vfs compression:
|
||||
*
|
||||
* vfs = {
|
||||
* "/folder1/folder2/file1.js": {};
|
||||
* "/folder1/folder2/folder3/file2.js": {};
|
||||
* "/folder1/folder2/folder3/file3.js": {};
|
||||
* }
|
||||
*
|
||||
* with compression :
|
||||
*
|
||||
* fileDictionary = {
|
||||
* "folder1": "1",
|
||||
* "folder2": "2",
|
||||
* "file1": "3",
|
||||
* "folder3": "4",
|
||||
* "file2": "5",
|
||||
* "file3": "6",
|
||||
* }
|
||||
* vfs = {
|
||||
* "/1/2/3": {};
|
||||
* "/1/2/4/5": {};
|
||||
* "/1/2/4/6": {};
|
||||
* }
|
||||
*
|
||||
* note: the key is computed in base36 for further compression.
|
||||
*/
|
||||
const fileDictionary = {};
|
||||
let counter = 0;
|
||||
function getOrCreateHash(fileOrFolderName) {
|
||||
let existingKey = fileDictionary[fileOrFolderName];
|
||||
if (!existingKey) {
|
||||
const newkey = counter;
|
||||
counter += 1;
|
||||
existingKey = newkey.toString(36);
|
||||
fileDictionary[fileOrFolderName] = existingKey;
|
||||
}
|
||||
return existingKey;
|
||||
}
|
||||
const separator = '/';
|
||||
function makeKey(doCompression, fullpath, slash) {
|
||||
if (doCompression === compress_type_1.CompressType.None)
|
||||
return fullpath;
|
||||
return fullpath.split(slash).map(getOrCreateHash).join(separator);
|
||||
}
|
||||
function producer({ backpack, bakes, slash, target, symLinks, doCompress, nativeBuild, }) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!Buffer.alloc) {
|
||||
throw (0, log_1.wasReported)('Your node.js does not have Buffer.alloc. Please upgrade!');
|
||||
}
|
||||
const { prelude } = backpack;
|
||||
let { entrypoint, stripes } = backpack;
|
||||
entrypoint = (0, common_1.snapshotify)(entrypoint, slash);
|
||||
stripes = stripes.slice();
|
||||
const vfs = {};
|
||||
for (const stripe of stripes) {
|
||||
let { snap } = stripe;
|
||||
snap = (0, common_1.snapshotify)(snap, slash);
|
||||
const vfsKey = makeKey(doCompress, snap, slash);
|
||||
if (!vfs[vfsKey])
|
||||
vfs[vfsKey] = {};
|
||||
}
|
||||
const snapshotSymLinks = {};
|
||||
for (const [key, value] of Object.entries(symLinks)) {
|
||||
const k = (0, common_1.snapshotify)(key, slash);
|
||||
const v = (0, common_1.snapshotify)(value, slash);
|
||||
const vfsKey = makeKey(doCompress, k, slash);
|
||||
snapshotSymLinks[vfsKey] = makeKey(doCompress, v, slash);
|
||||
}
|
||||
let meter;
|
||||
let count = 0;
|
||||
function pipeToNewMeter(s) {
|
||||
meter = (0, stream_meter_1.default)();
|
||||
return s.pipe(meter);
|
||||
}
|
||||
function pipeMayCompressToNewMeter(s) {
|
||||
if (doCompress === compress_type_1.CompressType.GZip) {
|
||||
return pipeToNewMeter(s.pipe((0, zlib_1.createGzip)()));
|
||||
}
|
||||
if (doCompress === compress_type_1.CompressType.Brotli) {
|
||||
return pipeToNewMeter(s.pipe((0, zlib_1.createBrotliCompress)()));
|
||||
}
|
||||
return pipeToNewMeter(s);
|
||||
}
|
||||
function next(s) {
|
||||
count += 1;
|
||||
return pipeToNewMeter(s);
|
||||
}
|
||||
const binaryBuffer = fs_extra_1.default.readFileSync(target.binaryPath);
|
||||
const placeholders = discoverPlaceholders(binaryBuffer);
|
||||
let track = 0;
|
||||
let prevStripe;
|
||||
let payloadPosition;
|
||||
let payloadSize;
|
||||
let preludePosition;
|
||||
let preludeSize;
|
||||
new multistream_1.default((cb) => {
|
||||
if (count === 0) {
|
||||
return cb(null, next((0, into_stream_1.default)(binaryBuffer)));
|
||||
}
|
||||
if (count === 1) {
|
||||
payloadPosition = meter.bytes;
|
||||
return cb(null, next((0, into_stream_1.default)(Buffer.alloc(0))));
|
||||
}
|
||||
if (count === 2) {
|
||||
if (prevStripe && !prevStripe.skip) {
|
||||
const { store } = prevStripe;
|
||||
let { snap } = prevStripe;
|
||||
snap = (0, common_1.snapshotify)(snap, slash);
|
||||
const vfsKey = makeKey(doCompress, snap, slash);
|
||||
vfs[vfsKey][store] = [track, meter.bytes];
|
||||
track += meter.bytes;
|
||||
}
|
||||
if (stripes.length) {
|
||||
// clone to prevent 'skip' propagate
|
||||
// to other targets, since same stripe
|
||||
// is used for several targets
|
||||
const stripe = Object.assign({}, stripes.shift());
|
||||
prevStripe = stripe;
|
||||
if (stripe.buffer) {
|
||||
if (stripe.store === common_1.STORE_BLOB) {
|
||||
const snap = (0, common_1.snapshotify)(stripe.snap, slash);
|
||||
return (0, fabricator_1.fabricateTwice)(bakes, target.fabricator, snap, stripe.buffer, (error, buffer) => {
|
||||
if (error) {
|
||||
log_1.log.warn(error.message);
|
||||
stripe.skip = true;
|
||||
return cb(null, (0, into_stream_1.default)(Buffer.alloc(0)));
|
||||
}
|
||||
cb(null, pipeMayCompressToNewMeter((0, into_stream_1.default)(buffer || Buffer.from(''))));
|
||||
});
|
||||
}
|
||||
return cb(null, pipeMayCompressToNewMeter((0, into_stream_1.default)(stripe.buffer)));
|
||||
}
|
||||
if (stripe.file) {
|
||||
if (stripe.file === target.output) {
|
||||
return cb((0, log_1.wasReported)('Trying to take executable into executable', stripe.file), null);
|
||||
}
|
||||
assert_1.default.strictEqual(stripe.store, common_1.STORE_CONTENT); // others must be buffers from walker
|
||||
if ((0, common_1.isDotNODE)(stripe.file) && nativeBuild) {
|
||||
try {
|
||||
const platformFile = nativePrebuildInstall(target, stripe.file);
|
||||
if (fs_extra_1.default.existsSync(platformFile)) {
|
||||
return cb(null, pipeMayCompressToNewMeter(fs_extra_1.default.createReadStream(platformFile)));
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
log_1.log.debug(`prebuild-install failed[${stripe.file}]:`, err.message);
|
||||
}
|
||||
}
|
||||
return cb(null, pipeMayCompressToNewMeter(fs_extra_1.default.createReadStream(stripe.file)));
|
||||
}
|
||||
(0, assert_1.default)(false, 'producer: bad stripe');
|
||||
}
|
||||
else {
|
||||
payloadSize = track;
|
||||
preludePosition = payloadPosition + payloadSize;
|
||||
return cb(null, next((0, into_stream_1.default)(makePreludeBufferFromPrelude(replaceDollarWise(replaceDollarWise(replaceDollarWise(replaceDollarWise(replaceDollarWise(prelude, '%VIRTUAL_FILESYSTEM%', JSON.stringify(vfs)), '%DEFAULT_ENTRYPOINT%', JSON.stringify(entrypoint)), '%SYMLINKS%', JSON.stringify(snapshotSymLinks)), '%DICT%', JSON.stringify(fileDictionary)), '%DOCOMPRESS%', JSON.stringify(doCompress))))));
|
||||
}
|
||||
}
|
||||
else {
|
||||
return cb(null, null);
|
||||
}
|
||||
})
|
||||
.on('error', (error) => {
|
||||
reject(error);
|
||||
})
|
||||
.pipe(fs_extra_1.default.createWriteStream(target.output))
|
||||
.on('error', (error) => {
|
||||
reject(error);
|
||||
})
|
||||
.on('close', () => {
|
||||
preludeSize = meter.bytes;
|
||||
fs_extra_1.default.open(target.output, 'r+', (error, fd) => {
|
||||
if (error)
|
||||
return reject(error);
|
||||
injectPlaceholders(fd, placeholders, {
|
||||
BAKERY: makeBakeryValueFromBakes(bakes),
|
||||
PAYLOAD_POSITION: payloadPosition,
|
||||
PAYLOAD_SIZE: payloadSize,
|
||||
PRELUDE_POSITION: preludePosition,
|
||||
PRELUDE_SIZE: preludeSize,
|
||||
}, (error2) => {
|
||||
if (error2)
|
||||
return reject(error2);
|
||||
fs_extra_1.default.close(fd, (error3) => {
|
||||
if (error3)
|
||||
return reject(error3);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.default = producer;
|
||||
//# sourceMappingURL=producer.js.map
|
87
node_modules/pkg/lib-es5/refiner.js
generated
vendored
Normal file
87
node_modules/pkg/lib-es5/refiner.js
generated
vendored
Normal file
@ -0,0 +1,87 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const chalk_1 = __importDefault(require("chalk"));
|
||||
const common_1 = require("./common");
|
||||
const log_1 = require("./log");
|
||||
const win32 = process.platform === 'win32';
|
||||
function hasParent(file, records) {
|
||||
const dirname = path_1.default.dirname(file);
|
||||
// root directory
|
||||
if (dirname === file) {
|
||||
return false;
|
||||
}
|
||||
return Boolean(records[dirname]);
|
||||
}
|
||||
function purgeTopDirectories(records) {
|
||||
while (true) {
|
||||
let found = false;
|
||||
for (const file in records) {
|
||||
if (records[file]) {
|
||||
const record = records[file];
|
||||
const links = record[common_1.STORE_LINKS];
|
||||
if (links && links.length === 1) {
|
||||
if (!hasParent(file, records)) {
|
||||
const file2 = path_1.default.join(file, links[0]);
|
||||
const record2 = records[file2];
|
||||
const links2 = record2[common_1.STORE_LINKS];
|
||||
if (links2 && links2.length === 1) {
|
||||
const file3 = path_1.default.join(file2, links2[0]);
|
||||
const record3 = records[file3];
|
||||
const links3 = record3[common_1.STORE_LINKS];
|
||||
if (links3) {
|
||||
delete records[file];
|
||||
log_1.log.debug(chalk_1.default.cyan('Deleting record file :', file));
|
||||
found = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!found)
|
||||
break;
|
||||
}
|
||||
}
|
||||
function denominate(records, entrypoint, denominator, symLinks) {
|
||||
const newRecords = {};
|
||||
const makeSnap = (file) => {
|
||||
let snap = (0, common_1.substituteDenominator)(file, denominator);
|
||||
if (win32) {
|
||||
if (snap.slice(1) === ':')
|
||||
snap += '\\';
|
||||
}
|
||||
else if (snap === '') {
|
||||
snap = '/';
|
||||
}
|
||||
return snap;
|
||||
};
|
||||
for (const file in records) {
|
||||
if (records[file]) {
|
||||
const snap = makeSnap(file);
|
||||
newRecords[snap] = records[file];
|
||||
}
|
||||
}
|
||||
const tmpSymLinks = symLinks;
|
||||
symLinks = {};
|
||||
for (const [key, value] of Object.entries(tmpSymLinks)) {
|
||||
const key1 = makeSnap(key);
|
||||
const value1 = makeSnap(value);
|
||||
symLinks[key1] = value1;
|
||||
}
|
||||
return {
|
||||
records: newRecords,
|
||||
entrypoint: (0, common_1.substituteDenominator)(entrypoint, denominator),
|
||||
symLinks,
|
||||
};
|
||||
}
|
||||
function refiner(records, entrypoint, symLinks) {
|
||||
purgeTopDirectories(records);
|
||||
const denominator = (0, common_1.retrieveDenominator)(Object.keys(records));
|
||||
return denominate(records, entrypoint, denominator, symLinks);
|
||||
}
|
||||
exports.default = refiner;
|
||||
//# sourceMappingURL=refiner.js.map
|
9
node_modules/pkg/lib-es5/types.js
generated
vendored
Normal file
9
node_modules/pkg/lib-es5/types.js
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.platform = void 0;
|
||||
exports.platform = {
|
||||
macos: 'darwin',
|
||||
win: 'win32',
|
||||
linux: 'linux',
|
||||
};
|
||||
//# sourceMappingURL=types.js.map
|
853
node_modules/pkg/lib-es5/walker.js
generated
vendored
Normal file
853
node_modules/pkg/lib-es5/walker.js
generated
vendored
Normal file
@ -0,0 +1,853 @@
|
||||
"use strict";
|
||||
/* eslint-disable require-atomic-updates */
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const assert_1 = __importDefault(require("assert"));
|
||||
const fs_extra_1 = __importDefault(require("fs-extra"));
|
||||
const is_core_module_1 = __importDefault(require("is-core-module"));
|
||||
const globby_1 = __importDefault(require("globby"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const chalk_1 = __importDefault(require("chalk"));
|
||||
const common_1 = require("./common");
|
||||
const follow_1 = require("./follow");
|
||||
const log_1 = require("./log");
|
||||
const detector = __importStar(require("./detector"));
|
||||
// Note: as a developer, you can set the PKG_STRICT_VER variable.
|
||||
// this will turn on some assertion in the walker code below
|
||||
// to assert that each file content/state that we appending
|
||||
// to the virtual file system applies to a real file,
|
||||
// not a symlink.
|
||||
// By default assertion are disabled as they can have a
|
||||
// performance hit.
|
||||
const strictVerify = Boolean(process.env.PKG_STRICT_VER);
|
||||
const win32 = process.platform === 'win32';
|
||||
function unlikelyJavascript(file) {
|
||||
return ['.css', '.html', '.json', '.vue'].includes(path_1.default.extname(file));
|
||||
}
|
||||
function isPublic(config) {
|
||||
if (config.private) {
|
||||
return false;
|
||||
}
|
||||
const { licenses } = config;
|
||||
let { license } = config;
|
||||
if (licenses) {
|
||||
license = licenses;
|
||||
}
|
||||
if (license && !Array.isArray(license)) {
|
||||
license = typeof license === 'string' ? license : license.type;
|
||||
}
|
||||
if (Array.isArray(license)) {
|
||||
license = license.map((c) => String(c.type || c)).join(',');
|
||||
}
|
||||
if (!license) {
|
||||
return false;
|
||||
}
|
||||
if (/^\(/.test(license)) {
|
||||
license = license.slice(1);
|
||||
}
|
||||
if (/\)$/.test(license)) {
|
||||
license = license.slice(0, -1);
|
||||
}
|
||||
license = license.toLowerCase();
|
||||
const allLicenses = Array.prototype.concat(license.split(' or '), license.split(' and '), license.split('/'), license.split(','));
|
||||
let result = false;
|
||||
const foss = [
|
||||
'isc',
|
||||
'mit',
|
||||
'apache-2.0',
|
||||
'apache 2.0',
|
||||
'public domain',
|
||||
'bsd',
|
||||
'bsd-2-clause',
|
||||
'bsd-3-clause',
|
||||
'wtfpl',
|
||||
'cc-by-3.0',
|
||||
'x11',
|
||||
'artistic-2.0',
|
||||
'gplv3',
|
||||
'mpl',
|
||||
'mplv2.0',
|
||||
'unlicense',
|
||||
'apache license 2.0',
|
||||
'zlib',
|
||||
'mpl-2.0',
|
||||
'nasa-1.3',
|
||||
'apache license, version 2.0',
|
||||
'lgpl-2.1+',
|
||||
'cc0-1.0',
|
||||
];
|
||||
for (const c of allLicenses) {
|
||||
result = foss.indexOf(c) >= 0;
|
||||
if (result) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function upon(p, base) {
|
||||
if (typeof p !== 'string') {
|
||||
throw (0, log_1.wasReported)('Config items must be strings. See examples');
|
||||
}
|
||||
let negate = false;
|
||||
if (p[0] === '!') {
|
||||
p = p.slice(1);
|
||||
negate = true;
|
||||
}
|
||||
p = path_1.default.join(base, p);
|
||||
if (win32) {
|
||||
p = p.replace(/\\/g, '/');
|
||||
}
|
||||
if (negate) {
|
||||
p = `!${p}`;
|
||||
}
|
||||
return p;
|
||||
}
|
||||
function collect(ps) {
|
||||
return globby_1.default.sync(ps, { dot: true });
|
||||
}
|
||||
function expandFiles(efs, base) {
|
||||
if (!Array.isArray(efs)) {
|
||||
efs = [efs];
|
||||
}
|
||||
efs = collect(efs.map((p) => upon(p, base)));
|
||||
return efs;
|
||||
}
|
||||
async function stepRead(record) {
|
||||
if (strictVerify) {
|
||||
(0, assert_1.default)(record.file === (0, common_1.toNormalizedRealPath)(record.file));
|
||||
}
|
||||
let body;
|
||||
try {
|
||||
body = await fs_extra_1.default.readFile(record.file);
|
||||
}
|
||||
catch (error) {
|
||||
const exception = error;
|
||||
log_1.log.error(`Cannot read file, ${exception.code}`, record.file);
|
||||
throw (0, log_1.wasReported)(exception.message);
|
||||
}
|
||||
record.body = body;
|
||||
}
|
||||
function stepStrip(record) {
|
||||
let body = (record.body || '').toString('utf8');
|
||||
if (/^\ufeff/.test(body)) {
|
||||
body = body.replace(/^\ufeff/, '');
|
||||
}
|
||||
if (/^#!/.test(body)) {
|
||||
body = body.replace(/^#![^\n]*\n/, '\n');
|
||||
}
|
||||
record.body = body;
|
||||
}
|
||||
function stepDetect(record, marker, derivatives) {
|
||||
let { body = '' } = record;
|
||||
if (body instanceof Buffer) {
|
||||
body = body.toString();
|
||||
}
|
||||
try {
|
||||
detector.detect(body, (node, trying) => {
|
||||
const { toplevel } = marker;
|
||||
let d = detector.visitorSuccessful(node);
|
||||
if (d) {
|
||||
if (d.mustExclude) {
|
||||
return false;
|
||||
}
|
||||
d.mayExclude = d.mayExclude || trying;
|
||||
derivatives.push(d);
|
||||
return false;
|
||||
}
|
||||
d = detector.visitorNonLiteral(node);
|
||||
if (d) {
|
||||
if (typeof d === 'object' && d.mustExclude) {
|
||||
return false;
|
||||
}
|
||||
const debug = !toplevel || d.mayExclude || trying;
|
||||
const level = debug ? 'debug' : 'warn';
|
||||
log_1.log[level](`Cannot resolve '${d.alias}'`, [
|
||||
record.file,
|
||||
'Dynamic require may fail at run time, because the requested file',
|
||||
'is unknown at compilation time and not included into executable.',
|
||||
"Use a string literal as an argument for 'require', or leave it",
|
||||
"as is and specify the resolved file name in 'scripts' option.",
|
||||
]);
|
||||
return false;
|
||||
}
|
||||
d = detector.visitorMalformed(node);
|
||||
if (d) {
|
||||
// there is no 'mustExclude'
|
||||
const debug = !toplevel || trying;
|
||||
const level = debug ? 'debug' : 'warn'; // there is no 'mayExclude'
|
||||
log_1.log[level](`Malformed requirement for '${d.alias}'`, [record.file]);
|
||||
return false;
|
||||
}
|
||||
d = detector.visitorUseSCWD(node);
|
||||
if (d) {
|
||||
// there is no 'mustExclude'
|
||||
const level = 'debug'; // there is no 'mayExclude'
|
||||
log_1.log[level](`Path.resolve(${d.alias}) is ambiguous`, [
|
||||
record.file,
|
||||
"It resolves relatively to 'process.cwd' by default, however",
|
||||
"you may want to use 'path.dirname(require.main.filename)'",
|
||||
]);
|
||||
return false;
|
||||
}
|
||||
return true; // can i go inside?
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
log_1.log.error(error.message, record.file);
|
||||
throw (0, log_1.wasReported)(error.message);
|
||||
}
|
||||
}
|
||||
function findCommonJunctionPoint(file, realFile) {
|
||||
// find common denominator => where the link changes
|
||||
while ((0, common_1.toNormalizedRealPath)(path_1.default.dirname(file)) === path_1.default.dirname(realFile)) {
|
||||
file = path_1.default.dirname(file);
|
||||
realFile = path_1.default.dirname(realFile);
|
||||
}
|
||||
return { file, realFile };
|
||||
}
|
||||
class Walker {
|
||||
constructor() {
|
||||
this.tasks = [];
|
||||
this.records = {};
|
||||
this.dictionary = {};
|
||||
this.patches = {};
|
||||
this.params = {};
|
||||
this.symLinks = {};
|
||||
}
|
||||
appendRecord({ file, store }) {
|
||||
if (this.records[file]) {
|
||||
return;
|
||||
}
|
||||
if (store === common_1.STORE_BLOB ||
|
||||
store === common_1.STORE_CONTENT ||
|
||||
store === common_1.STORE_LINKS) {
|
||||
// make sure we have a real file
|
||||
if (strictVerify) {
|
||||
(0, assert_1.default)(file === (0, common_1.toNormalizedRealPath)(file));
|
||||
}
|
||||
}
|
||||
this.records[file] = { file };
|
||||
}
|
||||
append(task) {
|
||||
if (strictVerify) {
|
||||
(0, assert_1.default)(typeof task.file === 'string');
|
||||
(0, assert_1.default)(task.file === (0, common_1.normalizePath)(task.file));
|
||||
}
|
||||
this.appendRecord(task);
|
||||
this.tasks.push(task);
|
||||
const what = {
|
||||
[common_1.STORE_BLOB]: 'Bytecode of',
|
||||
[common_1.STORE_CONTENT]: 'Content of',
|
||||
[common_1.STORE_LINKS]: 'Directory',
|
||||
[common_1.STORE_STAT]: 'Stat info of',
|
||||
}[task.store];
|
||||
if (task.reason) {
|
||||
log_1.log.debug(`${what} ${task.file} is added to queue. It was required from ${task.reason}`);
|
||||
}
|
||||
else {
|
||||
log_1.log.debug(`${what} ${task.file} is added to queue.`);
|
||||
}
|
||||
}
|
||||
appendSymlink(file, realFile) {
|
||||
const a = findCommonJunctionPoint(file, realFile);
|
||||
file = a.file;
|
||||
realFile = a.realFile;
|
||||
if (!this.symLinks[file]) {
|
||||
const dn = path_1.default.dirname(file);
|
||||
this.appendFileInFolder({
|
||||
file: dn,
|
||||
store: common_1.STORE_LINKS,
|
||||
data: path_1.default.basename(file),
|
||||
});
|
||||
log_1.log.debug(`adding symlink ${file} => ${path_1.default.relative(file, realFile)}`);
|
||||
this.symLinks[file] = realFile;
|
||||
this.appendStat({
|
||||
file: realFile,
|
||||
store: common_1.STORE_STAT,
|
||||
});
|
||||
this.appendStat({
|
||||
file: dn,
|
||||
store: common_1.STORE_STAT,
|
||||
});
|
||||
this.appendStat({
|
||||
file,
|
||||
store: common_1.STORE_STAT,
|
||||
});
|
||||
}
|
||||
}
|
||||
appendStat(task) {
|
||||
(0, assert_1.default)(task.store === common_1.STORE_STAT);
|
||||
this.append(task);
|
||||
}
|
||||
appendFileInFolder(task) {
|
||||
if (strictVerify) {
|
||||
(0, assert_1.default)(task.store === common_1.STORE_LINKS);
|
||||
(0, assert_1.default)(typeof task.file === 'string');
|
||||
}
|
||||
const realFile = (0, common_1.toNormalizedRealPath)(task.file);
|
||||
if (realFile === task.file) {
|
||||
this.append(task);
|
||||
return;
|
||||
}
|
||||
this.append(Object.assign(Object.assign({}, task), { file: realFile }));
|
||||
this.appendStat({
|
||||
file: task.file,
|
||||
store: common_1.STORE_STAT,
|
||||
});
|
||||
this.appendStat({
|
||||
file: path_1.default.dirname(task.file),
|
||||
store: common_1.STORE_STAT,
|
||||
});
|
||||
}
|
||||
appendBlobOrContent(task) {
|
||||
if (strictVerify) {
|
||||
(0, assert_1.default)(task.file === (0, common_1.normalizePath)(task.file));
|
||||
}
|
||||
(0, assert_1.default)(task.store === common_1.STORE_BLOB || task.store === common_1.STORE_CONTENT);
|
||||
(0, assert_1.default)(typeof task.file === 'string');
|
||||
const realFile = (0, common_1.toNormalizedRealPath)(task.file);
|
||||
if (realFile === task.file) {
|
||||
this.append(task);
|
||||
return;
|
||||
}
|
||||
this.append(Object.assign(Object.assign({}, task), { file: realFile }));
|
||||
this.appendSymlink(task.file, realFile);
|
||||
this.appendStat({
|
||||
file: task.file,
|
||||
store: common_1.STORE_STAT,
|
||||
});
|
||||
}
|
||||
async appendFilesFromConfig(marker) {
|
||||
const { config, configPath, base } = marker;
|
||||
const pkgConfig = config === null || config === void 0 ? void 0 : config.pkg;
|
||||
if (pkgConfig) {
|
||||
let { scripts } = pkgConfig;
|
||||
if (scripts) {
|
||||
scripts = expandFiles(scripts, base);
|
||||
for (const script of scripts) {
|
||||
const stat = await fs_extra_1.default.stat(script);
|
||||
if (stat.isFile()) {
|
||||
if (!(0, common_1.isDotJS)(script) && !(0, common_1.isDotJSON)(script) && !(0, common_1.isDotNODE)(script)) {
|
||||
log_1.log.warn("Non-javascript file is specified in 'scripts'.", [
|
||||
'Pkg will probably fail to parse. Specify *.js in glob.',
|
||||
script,
|
||||
]);
|
||||
}
|
||||
this.appendBlobOrContent({
|
||||
file: (0, common_1.normalizePath)(script),
|
||||
marker,
|
||||
store: common_1.STORE_BLOB,
|
||||
reason: configPath,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
let { assets } = pkgConfig;
|
||||
if (assets) {
|
||||
assets = expandFiles(assets, base);
|
||||
for (const asset of assets) {
|
||||
log_1.log.debug(' Adding asset : .... ', asset);
|
||||
const stat = await fs_extra_1.default.stat(asset);
|
||||
if (stat.isFile()) {
|
||||
this.appendBlobOrContent({
|
||||
file: (0, common_1.normalizePath)(asset),
|
||||
marker,
|
||||
store: common_1.STORE_CONTENT,
|
||||
reason: configPath,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (config) {
|
||||
let { files } = config;
|
||||
if (files) {
|
||||
files = expandFiles(files, base);
|
||||
for (let file of files) {
|
||||
file = (0, common_1.normalizePath)(file);
|
||||
const stat = await fs_extra_1.default.stat(file);
|
||||
if (stat.isFile()) {
|
||||
// 1) remove sources of top-level(!) package 'files' i.e. ship as BLOB
|
||||
// 2) non-source (non-js) files of top-level package are shipped as CONTENT
|
||||
// 3) parsing some js 'files' of non-top-level packages fails, hence all CONTENT
|
||||
if (marker.toplevel) {
|
||||
this.appendBlobOrContent({
|
||||
file,
|
||||
marker,
|
||||
store: (0, common_1.isDotJS)(file) ? common_1.STORE_BLOB : common_1.STORE_CONTENT,
|
||||
reason: configPath,
|
||||
});
|
||||
}
|
||||
else {
|
||||
this.appendBlobOrContent({
|
||||
file,
|
||||
marker,
|
||||
store: common_1.STORE_CONTENT,
|
||||
reason: configPath,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
async stepActivate(marker, derivatives) {
|
||||
if (!marker) {
|
||||
(0, assert_1.default)(false);
|
||||
}
|
||||
if (marker.activated) {
|
||||
return;
|
||||
}
|
||||
const { config, base } = marker;
|
||||
if (!config) {
|
||||
(0, assert_1.default)(false);
|
||||
}
|
||||
const { name } = config;
|
||||
if (name) {
|
||||
const d = this.dictionary[name];
|
||||
if (d) {
|
||||
if (typeof config.dependencies === 'object' &&
|
||||
typeof d.dependencies === 'object') {
|
||||
Object.assign(config.dependencies, d.dependencies);
|
||||
delete d.dependencies;
|
||||
}
|
||||
Object.assign(config, d);
|
||||
marker.hasDictionary = true;
|
||||
}
|
||||
}
|
||||
const { dependencies } = config;
|
||||
if (typeof dependencies === 'object') {
|
||||
for (const dependency in dependencies) {
|
||||
// it may be `undefined` - overridden
|
||||
// in dictionary (see publicsuffixlist)
|
||||
if (dependencies[dependency]) {
|
||||
derivatives.push({
|
||||
alias: dependency,
|
||||
aliasType: common_1.ALIAS_AS_RESOLVABLE,
|
||||
fromDependencies: true,
|
||||
});
|
||||
derivatives.push({
|
||||
alias: `${dependency}/package.json`,
|
||||
aliasType: common_1.ALIAS_AS_RESOLVABLE,
|
||||
fromDependencies: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
const pkgConfig = config.pkg;
|
||||
if (pkgConfig) {
|
||||
const { patches } = pkgConfig;
|
||||
if (patches) {
|
||||
for (const key in patches) {
|
||||
if (patches[key]) {
|
||||
const p = path_1.default.join(base, key);
|
||||
this.patches[p] = patches[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
const { deployFiles } = pkgConfig;
|
||||
if (deployFiles) {
|
||||
marker.hasDeployFiles = true;
|
||||
for (const deployFile of deployFiles) {
|
||||
const type = deployFile[2] || 'file';
|
||||
log_1.log.warn(`Cannot include ${type} %1 into executable.`, [
|
||||
`The ${type} must be distributed with executable as %2.`,
|
||||
`%1: ${path_1.default.relative(process.cwd(), path_1.default.join(base, deployFile[0]))}`,
|
||||
`%2: path-to-executable/${deployFile[1]}`,
|
||||
]);
|
||||
}
|
||||
}
|
||||
if (pkgConfig.log) {
|
||||
pkgConfig.log(log_1.log, { packagePath: base });
|
||||
}
|
||||
}
|
||||
await this.appendFilesFromConfig(marker);
|
||||
marker.public = isPublic(config);
|
||||
if (!marker.public && marker.toplevel) {
|
||||
marker.public = this.params.publicToplevel;
|
||||
}
|
||||
if (!marker.public && !marker.toplevel && this.params.publicPackages) {
|
||||
marker.public =
|
||||
this.params.publicPackages[0] === '*' ||
|
||||
(!!name && this.params.publicPackages.indexOf(name) !== -1);
|
||||
}
|
||||
marker.activated = true;
|
||||
// assert no further work with config
|
||||
delete marker.config;
|
||||
}
|
||||
hasPatch(record) {
|
||||
const patch = this.patches[record.file];
|
||||
if (!patch) {
|
||||
return;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
stepPatch(record) {
|
||||
const patch = this.patches[record.file];
|
||||
if (!patch) {
|
||||
return;
|
||||
}
|
||||
let body = (record.body || '').toString('utf8');
|
||||
for (let i = 0; i < patch.length; i += 2) {
|
||||
if (typeof patch[i] === 'object') {
|
||||
if (patch[i].do === 'erase') {
|
||||
body = patch[i + 1];
|
||||
}
|
||||
else if (patch[i].do === 'prepend') {
|
||||
body = patch[i + 1] + body;
|
||||
}
|
||||
else if (patch[i].do === 'append') {
|
||||
body += patch[i + 1];
|
||||
}
|
||||
}
|
||||
else if (typeof patch[i] === 'string') {
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions
|
||||
// function escapeRegExp
|
||||
const esc = patch[i].replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
const regexp = new RegExp(esc, 'g');
|
||||
body = body.replace(regexp, patch[i + 1]);
|
||||
}
|
||||
}
|
||||
record.body = body;
|
||||
}
|
||||
async stepDerivatives_ALIAS_AS_RELATIVE(record, marker, derivative) {
|
||||
const file = (0, common_1.normalizePath)(path_1.default.join(path_1.default.dirname(record.file), derivative.alias));
|
||||
let stat;
|
||||
try {
|
||||
stat = await fs_extra_1.default.stat(file);
|
||||
}
|
||||
catch (error) {
|
||||
const { toplevel } = marker;
|
||||
const exception = error;
|
||||
const debug = !toplevel && exception.code === 'ENOENT';
|
||||
const level = debug ? 'debug' : 'warn';
|
||||
log_1.log[level](`Cannot stat, ${exception.code}`, [
|
||||
file,
|
||||
`The file was required from '${record.file}'`,
|
||||
]);
|
||||
}
|
||||
if (stat && stat.isFile()) {
|
||||
this.appendBlobOrContent({
|
||||
file,
|
||||
marker,
|
||||
store: common_1.STORE_CONTENT,
|
||||
reason: record.file,
|
||||
});
|
||||
}
|
||||
}
|
||||
async stepDerivatives_ALIAS_AS_RESOLVABLE(record, marker, derivative) {
|
||||
var _a, _b;
|
||||
const newPackages = [];
|
||||
const catchReadFile = (file) => {
|
||||
(0, assert_1.default)((0, common_1.isPackageJson)(file), `walker: ${file} must be package.json`);
|
||||
newPackages.push({ packageJson: file });
|
||||
};
|
||||
const catchPackageFilter = (config, base) => {
|
||||
const newPackage = newPackages[newPackages.length - 1];
|
||||
newPackage.marker = { config, configPath: newPackage.packageJson, base };
|
||||
};
|
||||
let newFile = '';
|
||||
let failure;
|
||||
const basedir = path_1.default.dirname(record.file);
|
||||
try {
|
||||
newFile = await (0, follow_1.follow)(derivative.alias, {
|
||||
basedir,
|
||||
// default is extensions: ['.js'], but
|
||||
// it is not enough because 'typos.json'
|
||||
// is not taken in require('./typos')
|
||||
// in 'normalize-package-data/lib/fixer.js'
|
||||
extensions: ['.js', '.json', '.node'],
|
||||
catchReadFile,
|
||||
catchPackageFilter,
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
failure = error;
|
||||
}
|
||||
if (failure) {
|
||||
const { toplevel } = marker;
|
||||
const mainNotFound = newPackages.length > 0 && !((_b = (_a = newPackages[0].marker) === null || _a === void 0 ? void 0 : _a.config) === null || _b === void 0 ? void 0 : _b.main);
|
||||
const debug = !toplevel ||
|
||||
derivative.mayExclude ||
|
||||
(mainNotFound && derivative.fromDependencies);
|
||||
const level = debug ? 'debug' : 'warn';
|
||||
if (mainNotFound) {
|
||||
const message = "Entry 'main' not found in %1";
|
||||
log_1.log[level](message, [
|
||||
`%1: ${newPackages[0].packageJson}`,
|
||||
`%2: ${record.file}`,
|
||||
]);
|
||||
}
|
||||
else {
|
||||
log_1.log[level](`${chalk_1.default.yellow(failure.message)} in ${record.file}`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
let newPackageForNewRecords;
|
||||
for (const newPackage of newPackages) {
|
||||
let newFile2;
|
||||
try {
|
||||
newFile2 = await (0, follow_1.follow)(derivative.alias, {
|
||||
basedir: path_1.default.dirname(record.file),
|
||||
extensions: ['.js', '.json', '.node'],
|
||||
ignoreFile: newPackage.packageJson,
|
||||
});
|
||||
if (strictVerify) {
|
||||
(0, assert_1.default)(newFile2 === (0, common_1.normalizePath)(newFile2));
|
||||
}
|
||||
}
|
||||
catch (_) {
|
||||
// not setting is enough
|
||||
}
|
||||
if (newFile2 !== newFile) {
|
||||
newPackageForNewRecords = newPackage;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (newPackageForNewRecords) {
|
||||
if (strictVerify) {
|
||||
(0, assert_1.default)(newPackageForNewRecords.packageJson ===
|
||||
(0, common_1.normalizePath)(newPackageForNewRecords.packageJson));
|
||||
}
|
||||
this.appendBlobOrContent({
|
||||
file: newPackageForNewRecords.packageJson,
|
||||
marker: newPackageForNewRecords.marker,
|
||||
store: common_1.STORE_CONTENT,
|
||||
reason: record.file,
|
||||
});
|
||||
}
|
||||
this.appendBlobOrContent({
|
||||
file: newFile,
|
||||
marker: newPackageForNewRecords ? newPackageForNewRecords.marker : marker,
|
||||
store: common_1.STORE_BLOB,
|
||||
reason: record.file,
|
||||
});
|
||||
}
|
||||
async stepDerivatives(record, marker, derivatives) {
|
||||
for (const derivative of derivatives) {
|
||||
// TODO: actually use the target node version
|
||||
if ((0, is_core_module_1.default)(derivative.alias, '99.0.0'))
|
||||
continue;
|
||||
switch (derivative.aliasType) {
|
||||
case common_1.ALIAS_AS_RELATIVE:
|
||||
await this.stepDerivatives_ALIAS_AS_RELATIVE(record, marker, derivative);
|
||||
break;
|
||||
case common_1.ALIAS_AS_RESOLVABLE:
|
||||
await this.stepDerivatives_ALIAS_AS_RESOLVABLE(record, marker, derivative);
|
||||
break;
|
||||
default:
|
||||
(0, assert_1.default)(false, `walker: unknown aliasType ${derivative.aliasType}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
async step_STORE_ANY(record, marker, store) {
|
||||
if (strictVerify) {
|
||||
(0, assert_1.default)(record.file === (0, common_1.toNormalizedRealPath)(record.file));
|
||||
}
|
||||
if (record[store] !== undefined)
|
||||
return;
|
||||
record[store] = false; // default is discard
|
||||
this.appendStat({
|
||||
file: record.file,
|
||||
store: common_1.STORE_STAT,
|
||||
});
|
||||
const derivatives1 = [];
|
||||
await this.stepActivate(marker, derivatives1);
|
||||
await this.stepDerivatives(record, marker, derivatives1);
|
||||
if (store === common_1.STORE_BLOB) {
|
||||
if (unlikelyJavascript(record.file) || (0, common_1.isDotNODE)(record.file)) {
|
||||
this.appendBlobOrContent({
|
||||
file: record.file,
|
||||
marker,
|
||||
store: common_1.STORE_CONTENT,
|
||||
});
|
||||
return; // discard
|
||||
}
|
||||
if (marker.public || marker.hasDictionary) {
|
||||
this.appendBlobOrContent({
|
||||
file: record.file,
|
||||
marker,
|
||||
store: common_1.STORE_CONTENT,
|
||||
});
|
||||
}
|
||||
}
|
||||
if (store === common_1.STORE_BLOB || this.hasPatch(record)) {
|
||||
if (!record.body) {
|
||||
await stepRead(record);
|
||||
this.stepPatch(record);
|
||||
if (store === common_1.STORE_BLOB) {
|
||||
stepStrip(record);
|
||||
}
|
||||
}
|
||||
if (store === common_1.STORE_BLOB) {
|
||||
const derivatives2 = [];
|
||||
stepDetect(record, marker, derivatives2);
|
||||
await this.stepDerivatives(record, marker, derivatives2);
|
||||
}
|
||||
}
|
||||
record[store] = true;
|
||||
}
|
||||
step_STORE_LINKS(record, data) {
|
||||
if (strictVerify) {
|
||||
(0, assert_1.default)(record.file === (0, common_1.toNormalizedRealPath)(record.file), ' expecting real file !!!');
|
||||
}
|
||||
if (record[common_1.STORE_LINKS]) {
|
||||
record[common_1.STORE_LINKS].push(data);
|
||||
return;
|
||||
}
|
||||
record[common_1.STORE_LINKS] = [data];
|
||||
if (record[common_1.STORE_STAT]) {
|
||||
return;
|
||||
}
|
||||
this.appendStat({
|
||||
file: record.file,
|
||||
store: common_1.STORE_STAT,
|
||||
});
|
||||
}
|
||||
async step_STORE_STAT(record) {
|
||||
if (record[common_1.STORE_STAT])
|
||||
return;
|
||||
const realPath = (0, common_1.toNormalizedRealPath)(record.file);
|
||||
if (realPath !== record.file) {
|
||||
this.appendStat({
|
||||
file: realPath,
|
||||
store: common_1.STORE_STAT,
|
||||
});
|
||||
}
|
||||
try {
|
||||
const valueStat = await fs_extra_1.default.stat(record.file);
|
||||
const value = {
|
||||
mode: valueStat.mode,
|
||||
size: valueStat.isFile() ? valueStat.size : 0,
|
||||
isFileValue: valueStat.isFile(),
|
||||
isDirectoryValue: valueStat.isDirectory(),
|
||||
isSocketValue: valueStat.isSocket(),
|
||||
isSymbolicLinkValue: valueStat.isSymbolicLink(),
|
||||
};
|
||||
record[common_1.STORE_STAT] = value;
|
||||
}
|
||||
catch (error) {
|
||||
const exception = error;
|
||||
log_1.log.error(`Cannot stat, ${exception.code}`, record.file);
|
||||
throw (0, log_1.wasReported)(exception.message);
|
||||
}
|
||||
if (path_1.default.dirname(record.file) !== record.file) {
|
||||
// root directory
|
||||
this.appendFileInFolder({
|
||||
file: path_1.default.dirname(record.file),
|
||||
store: common_1.STORE_LINKS,
|
||||
data: path_1.default.basename(record.file),
|
||||
});
|
||||
}
|
||||
}
|
||||
async step(task) {
|
||||
const { file, store, data } = task;
|
||||
const record = this.records[file];
|
||||
switch (store) {
|
||||
case common_1.STORE_BLOB:
|
||||
case common_1.STORE_CONTENT:
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
await this.step_STORE_ANY(record, task.marker, store);
|
||||
break;
|
||||
case common_1.STORE_LINKS:
|
||||
this.step_STORE_LINKS(record, data);
|
||||
break;
|
||||
case common_1.STORE_STAT:
|
||||
await this.step_STORE_STAT(record);
|
||||
break;
|
||||
default:
|
||||
(0, assert_1.default)(false, `walker: unknown store ${store}`);
|
||||
}
|
||||
}
|
||||
async readDictionary(marker) {
|
||||
var _a, _b, _c;
|
||||
if (((_a = this.params.noDictionary) === null || _a === void 0 ? void 0 : _a[0]) === '*') {
|
||||
return;
|
||||
}
|
||||
const dd = path_1.default.join(__dirname, '../dictionary');
|
||||
const files = await fs_extra_1.default.readdir(dd);
|
||||
for (const file of files) {
|
||||
if (/\.js$/.test(file)) {
|
||||
const name = file.slice(0, -3);
|
||||
if ((_b = this.params.noDictionary) === null || _b === void 0 ? void 0 : _b.includes(file)) {
|
||||
continue;
|
||||
}
|
||||
// eslint-disable-next-line import/no-dynamic-require, global-require, @typescript-eslint/no-var-requires
|
||||
const config = require(path_1.default.join(dd, file));
|
||||
this.dictionary[name] = config;
|
||||
}
|
||||
}
|
||||
const pkgConfig = (_c = marker.config) === null || _c === void 0 ? void 0 : _c.pkg;
|
||||
if (pkgConfig) {
|
||||
const { dictionary } = pkgConfig;
|
||||
if (dictionary) {
|
||||
for (const name in dictionary) {
|
||||
if (dictionary[name]) {
|
||||
this.dictionary[name] = { pkg: dictionary[name] };
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
async start(marker, entrypoint, addition, params) {
|
||||
this.params = params;
|
||||
this.symLinks = {};
|
||||
await this.readDictionary(marker);
|
||||
entrypoint = (0, common_1.normalizePath)(entrypoint);
|
||||
this.appendBlobOrContent({
|
||||
file: entrypoint,
|
||||
marker,
|
||||
store: common_1.STORE_BLOB,
|
||||
});
|
||||
if (addition) {
|
||||
addition = (0, common_1.normalizePath)(addition);
|
||||
this.appendBlobOrContent({
|
||||
file: addition,
|
||||
marker,
|
||||
store: common_1.STORE_CONTENT,
|
||||
});
|
||||
}
|
||||
const { tasks } = this;
|
||||
for (let i = 0; i < tasks.length; i += 1) {
|
||||
// NO MULTIPLE WORKERS! THIS WILL LEAD TO NON-DETERMINISTIC
|
||||
// ORDER. one-by-one fifo is the only way to iterate tasks
|
||||
await this.step(tasks[i]);
|
||||
}
|
||||
return {
|
||||
symLinks: this.symLinks,
|
||||
records: this.records,
|
||||
entrypoint: (0, common_1.normalizePath)(entrypoint),
|
||||
};
|
||||
}
|
||||
}
|
||||
async function walker(...args) {
|
||||
const w = new Walker();
|
||||
return w.start(...args);
|
||||
}
|
||||
exports.default = walker;
|
||||
//# sourceMappingURL=walker.js.map
|
Reference in New Issue
Block a user