11029 lines
369 KiB
JavaScript
11029 lines
369 KiB
JavaScript
require('./sourcemap-register.js');/******/ (() => { // webpackBootstrap
|
|
/******/ var __webpack_modules__ = ({
|
|
|
|
/***/ 3109:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
const core_1 = __nccwpck_require__(2186);
|
|
const utils_1 = __nccwpck_require__(918);
|
|
// Entry point for the action
|
|
function run() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
try {
|
|
// Get the input parameters
|
|
const filesPattern = (0, core_1.getInput)('files');
|
|
const searchText = (0, core_1.getInput)('search-text');
|
|
const replaceText = (0, core_1.getInput)('replacement-text');
|
|
const excludePattern = (0, core_1.getInput)('exclude');
|
|
const inputEncoding = (0, core_1.getInput)('encoding');
|
|
const maxParallelism = (0, core_1.getInput)('max-parallelism');
|
|
// Validate the encoding
|
|
if (!(0, utils_1.isValidEncoding)(inputEncoding)) {
|
|
throw new Error(`Invalid encoding: ${inputEncoding}`);
|
|
}
|
|
// Validate that maxParallelism is a positive integer
|
|
if (!(0, utils_1.isPositiveInteger)(maxParallelism)) {
|
|
throw new Error(`Invalid max-parallelism: ${maxParallelism}`);
|
|
}
|
|
// Get the file paths that match the files pattern and do not match the exclude pattern
|
|
const filePaths = yield (0, utils_1.getFiles)(filesPattern, excludePattern);
|
|
// If no file paths were found, log a warning and exit
|
|
if (filePaths.length === 0) {
|
|
(0, core_1.warning)(`No files found for the given pattern.`);
|
|
return;
|
|
}
|
|
(0, core_1.info)(`Found ${filePaths.length} files for the given pattern.`);
|
|
(0, core_1.info)(`Replacing "${searchText}" with "${replaceText}".`);
|
|
// Process the file paths in chunks, replacing the search text with the replace text in each file
|
|
// This is done to avoid opening too many files at once
|
|
const encoding = inputEncoding;
|
|
const chunkSize = parseInt(maxParallelism);
|
|
yield (0, utils_1.processInChunks)(filePaths, (filePath) => __awaiter(this, void 0, void 0, function* () {
|
|
(0, core_1.info)(`Replacing text in file ${filePath}`);
|
|
yield (0, utils_1.replaceTextInFile)(filePath, searchText, replaceText, encoding);
|
|
}), chunkSize);
|
|
(0, core_1.info)(`Done!`);
|
|
}
|
|
catch (err) {
|
|
if (err instanceof Error) {
|
|
(0, core_1.setFailed)(err.message);
|
|
}
|
|
else {
|
|
const errorMessage = 'An error occurred. Run in debug mode for additional info.';
|
|
(0, core_1.debug)(`${JSON.stringify(err)}`);
|
|
(0, core_1.setFailed)(errorMessage);
|
|
}
|
|
}
|
|
});
|
|
}
|
|
run();
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 918:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.replaceTextInFile = exports.processInChunks = exports.getFiles = exports.isValidEncoding = exports.isPositiveInteger = void 0;
|
|
const fs_1 = __importDefault(__nccwpck_require__(7147));
|
|
const glob_1 = __nccwpck_require__(8211);
|
|
const encodings = [
|
|
'ascii',
|
|
'utf8',
|
|
'utf16le',
|
|
'ucs2',
|
|
'base64',
|
|
'latin1',
|
|
];
|
|
/**
|
|
* Checks if a given string represents a positive integer.
|
|
*
|
|
* @param value - The string to check.
|
|
* @returns True if the string represents a positive integer, false otherwise.
|
|
*/
|
|
function isPositiveInteger(value) {
|
|
return /^[1-9]\d*$/.test(value);
|
|
}
|
|
exports.isPositiveInteger = isPositiveInteger;
|
|
/**
|
|
* Checks if the given encoding is supported.
|
|
* @param encoding The encoding to check.
|
|
* @returns `true` if the encoding is valid, `false` otherwise.
|
|
*/
|
|
function isValidEncoding(encoding) {
|
|
return encodings.includes(encoding);
|
|
}
|
|
exports.isValidEncoding = isValidEncoding;
|
|
/**
|
|
* Returns an array of file paths that match the given pattern.
|
|
* @param filesPattern The file path or glob pattern to search for.
|
|
* @param exclude An optional glob pattern to exclude from the search.
|
|
* @returns A Promise that resolves to an array of file paths.
|
|
* @throws An error if there is an error getting the files.
|
|
*/
|
|
function getFiles(filesPattern, exclude) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
try {
|
|
return yield (0, glob_1.glob)(filesPattern, { ignore: exclude });
|
|
}
|
|
catch (error) {
|
|
throw new Error(`Error getting files: ${error}`);
|
|
}
|
|
});
|
|
}
|
|
exports.getFiles = getFiles;
|
|
/**
|
|
* Processes an array in chunks, applying a given function to each item.
|
|
* @param array The array to process.
|
|
* @param func The function to apply to each item.
|
|
* @param chunkSize The number of items to process at a time.
|
|
* @returns A Promise that resolves when all items have been processed.
|
|
*/
|
|
function processInChunks(array, func, chunkSize) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// Split the array into chunks
|
|
const chunks = Array(Math.ceil(array.length / chunkSize))
|
|
.fill(0)
|
|
.map((_, index) => index * chunkSize)
|
|
.map(begin => array.slice(begin, begin + chunkSize));
|
|
// Process each chunk
|
|
for (const chunk of chunks) {
|
|
yield Promise.all(chunk.map(func));
|
|
}
|
|
});
|
|
}
|
|
exports.processInChunks = processInChunks;
|
|
/**
|
|
* Replaces all instances of the given text with the given value in the file.
|
|
* @param filePath The path of the file to modify.
|
|
* @param searchText The string to search for.
|
|
* @param replacementText The string to replace the search text with.
|
|
* @param encoding The encoding of the file.
|
|
* @returns A Promise that resolves when the file has been modified.
|
|
* @throws An error if there is an error reading or saving the file.
|
|
*/
|
|
function replaceTextInFile(filePath_1, searchText_1, replacementText_1) {
|
|
return __awaiter(this, arguments, void 0, function* (filePath, searchText, replacementText, encoding = 'utf8') {
|
|
// Don't do anything if the search text is empty
|
|
if (!searchText) {
|
|
return;
|
|
}
|
|
const fileContent = yield readFileContent(filePath, encoding);
|
|
const updatedContent = fileContent.replace(searchText, replacementText);
|
|
yield saveFileContent(filePath, updatedContent);
|
|
});
|
|
}
|
|
exports.replaceTextInFile = replaceTextInFile;
|
|
/**
|
|
* Reads the content of the file at the given path.
|
|
* @param filePath The path of the file to read.
|
|
* @param encoding The encoding of the file.
|
|
* @returns A Promise that resolves to the content of the file as a string.
|
|
* @throws An error if there is an error reading the file.
|
|
*/
|
|
function readFileContent(filePath, encoding) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
try {
|
|
const fileContentBuffer = yield fs_1.default.promises.readFile(filePath, encoding);
|
|
return fileContentBuffer.toString();
|
|
}
|
|
catch (error) {
|
|
throw new Error(`Error reading file content: ${error}`);
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Saves the given content to the file at the given path.
|
|
* @param filePath The path of the file to save.
|
|
* @param content The content to save to the file.
|
|
* @returns A Promise that resolves when the file has been saved.
|
|
* @throws An error if there is an error saving the file.
|
|
*/
|
|
function saveFileContent(filePath, content) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
try {
|
|
yield fs_1.default.promises.writeFile(filePath, content);
|
|
}
|
|
catch (error) {
|
|
throw new Error(`Error saving file content: ${error}`);
|
|
}
|
|
});
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7351:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.issue = exports.issueCommand = void 0;
|
|
const os = __importStar(__nccwpck_require__(2037));
|
|
const utils_1 = __nccwpck_require__(5278);
|
|
/**
|
|
* Commands
|
|
*
|
|
* Command Format:
|
|
* ::name key=value,key=value::message
|
|
*
|
|
* Examples:
|
|
* ::warning::This is the message
|
|
* ::set-env name=MY_VAR::some value
|
|
*/
|
|
function issueCommand(command, properties, message) {
|
|
const cmd = new Command(command, properties, message);
|
|
process.stdout.write(cmd.toString() + os.EOL);
|
|
}
|
|
exports.issueCommand = issueCommand;
|
|
function issue(name, message = '') {
|
|
issueCommand(name, {}, message);
|
|
}
|
|
exports.issue = issue;
|
|
const CMD_STRING = '::';
|
|
class Command {
|
|
constructor(command, properties, message) {
|
|
if (!command) {
|
|
command = 'missing.command';
|
|
}
|
|
this.command = command;
|
|
this.properties = properties;
|
|
this.message = message;
|
|
}
|
|
toString() {
|
|
let cmdStr = CMD_STRING + this.command;
|
|
if (this.properties && Object.keys(this.properties).length > 0) {
|
|
cmdStr += ' ';
|
|
let first = true;
|
|
for (const key in this.properties) {
|
|
if (this.properties.hasOwnProperty(key)) {
|
|
const val = this.properties[key];
|
|
if (val) {
|
|
if (first) {
|
|
first = false;
|
|
}
|
|
else {
|
|
cmdStr += ',';
|
|
}
|
|
cmdStr += `${key}=${escapeProperty(val)}`;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
|
|
return cmdStr;
|
|
}
|
|
}
|
|
function escapeData(s) {
|
|
return utils_1.toCommandValue(s)
|
|
.replace(/%/g, '%25')
|
|
.replace(/\r/g, '%0D')
|
|
.replace(/\n/g, '%0A');
|
|
}
|
|
function escapeProperty(s) {
|
|
return utils_1.toCommandValue(s)
|
|
.replace(/%/g, '%25')
|
|
.replace(/\r/g, '%0D')
|
|
.replace(/\n/g, '%0A')
|
|
.replace(/:/g, '%3A')
|
|
.replace(/,/g, '%2C');
|
|
}
|
|
//# sourceMappingURL=command.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2186:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
|
|
const command_1 = __nccwpck_require__(7351);
|
|
const file_command_1 = __nccwpck_require__(717);
|
|
const utils_1 = __nccwpck_require__(5278);
|
|
const os = __importStar(__nccwpck_require__(2037));
|
|
const path = __importStar(__nccwpck_require__(1017));
|
|
const oidc_utils_1 = __nccwpck_require__(8041);
|
|
/**
|
|
* The code to exit an action
|
|
*/
|
|
var ExitCode;
|
|
(function (ExitCode) {
|
|
/**
|
|
* A code indicating that the action was successful
|
|
*/
|
|
ExitCode[ExitCode["Success"] = 0] = "Success";
|
|
/**
|
|
* A code indicating that the action was a failure
|
|
*/
|
|
ExitCode[ExitCode["Failure"] = 1] = "Failure";
|
|
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
|
|
//-----------------------------------------------------------------------
|
|
// Variables
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Sets env variable for this action and future actions in the job
|
|
* @param name the name of the variable to set
|
|
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
|
*/
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
function exportVariable(name, val) {
|
|
const convertedVal = utils_1.toCommandValue(val);
|
|
process.env[name] = convertedVal;
|
|
const filePath = process.env['GITHUB_ENV'] || '';
|
|
if (filePath) {
|
|
return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));
|
|
}
|
|
command_1.issueCommand('set-env', { name }, convertedVal);
|
|
}
|
|
exports.exportVariable = exportVariable;
|
|
/**
|
|
* Registers a secret which will get masked from logs
|
|
* @param secret value of the secret
|
|
*/
|
|
function setSecret(secret) {
|
|
command_1.issueCommand('add-mask', {}, secret);
|
|
}
|
|
exports.setSecret = setSecret;
|
|
/**
|
|
* Prepends inputPath to the PATH (for this action and future actions)
|
|
* @param inputPath
|
|
*/
|
|
function addPath(inputPath) {
|
|
const filePath = process.env['GITHUB_PATH'] || '';
|
|
if (filePath) {
|
|
file_command_1.issueFileCommand('PATH', inputPath);
|
|
}
|
|
else {
|
|
command_1.issueCommand('add-path', {}, inputPath);
|
|
}
|
|
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
|
}
|
|
exports.addPath = addPath;
|
|
/**
|
|
* Gets the value of an input.
|
|
* Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.
|
|
* Returns an empty string if the value is not defined.
|
|
*
|
|
* @param name name of the input to get
|
|
* @param options optional. See InputOptions.
|
|
* @returns string
|
|
*/
|
|
function getInput(name, options) {
|
|
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
|
|
if (options && options.required && !val) {
|
|
throw new Error(`Input required and not supplied: ${name}`);
|
|
}
|
|
if (options && options.trimWhitespace === false) {
|
|
return val;
|
|
}
|
|
return val.trim();
|
|
}
|
|
exports.getInput = getInput;
|
|
/**
|
|
* Gets the values of an multiline input. Each value is also trimmed.
|
|
*
|
|
* @param name name of the input to get
|
|
* @param options optional. See InputOptions.
|
|
* @returns string[]
|
|
*
|
|
*/
|
|
function getMultilineInput(name, options) {
|
|
const inputs = getInput(name, options)
|
|
.split('\n')
|
|
.filter(x => x !== '');
|
|
if (options && options.trimWhitespace === false) {
|
|
return inputs;
|
|
}
|
|
return inputs.map(input => input.trim());
|
|
}
|
|
exports.getMultilineInput = getMultilineInput;
|
|
/**
|
|
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
|
|
* Support boolean input list: `true | True | TRUE | false | False | FALSE` .
|
|
* The return value is also in boolean type.
|
|
* ref: https://yaml.org/spec/1.2/spec.html#id2804923
|
|
*
|
|
* @param name name of the input to get
|
|
* @param options optional. See InputOptions.
|
|
* @returns boolean
|
|
*/
|
|
function getBooleanInput(name, options) {
|
|
const trueValue = ['true', 'True', 'TRUE'];
|
|
const falseValue = ['false', 'False', 'FALSE'];
|
|
const val = getInput(name, options);
|
|
if (trueValue.includes(val))
|
|
return true;
|
|
if (falseValue.includes(val))
|
|
return false;
|
|
throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` +
|
|
`Support boolean input list: \`true | True | TRUE | false | False | FALSE\``);
|
|
}
|
|
exports.getBooleanInput = getBooleanInput;
|
|
/**
|
|
* Sets the value of an output.
|
|
*
|
|
* @param name name of the output to set
|
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
|
*/
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
function setOutput(name, value) {
|
|
const filePath = process.env['GITHUB_OUTPUT'] || '';
|
|
if (filePath) {
|
|
return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
|
|
}
|
|
process.stdout.write(os.EOL);
|
|
command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));
|
|
}
|
|
exports.setOutput = setOutput;
|
|
/**
|
|
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
|
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
|
*
|
|
*/
|
|
function setCommandEcho(enabled) {
|
|
command_1.issue('echo', enabled ? 'on' : 'off');
|
|
}
|
|
exports.setCommandEcho = setCommandEcho;
|
|
//-----------------------------------------------------------------------
|
|
// Results
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Sets the action status to failed.
|
|
* When the action exits it will be with an exit code of 1
|
|
* @param message add error issue message
|
|
*/
|
|
function setFailed(message) {
|
|
process.exitCode = ExitCode.Failure;
|
|
error(message);
|
|
}
|
|
exports.setFailed = setFailed;
|
|
//-----------------------------------------------------------------------
|
|
// Logging Commands
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Gets whether Actions Step Debug is on or not
|
|
*/
|
|
function isDebug() {
|
|
return process.env['RUNNER_DEBUG'] === '1';
|
|
}
|
|
exports.isDebug = isDebug;
|
|
/**
|
|
* Writes debug message to user log
|
|
* @param message debug message
|
|
*/
|
|
function debug(message) {
|
|
command_1.issueCommand('debug', {}, message);
|
|
}
|
|
exports.debug = debug;
|
|
/**
|
|
* Adds an error issue
|
|
* @param message error issue message. Errors will be converted to string via toString()
|
|
* @param properties optional properties to add to the annotation.
|
|
*/
|
|
function error(message, properties = {}) {
|
|
command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
|
|
}
|
|
exports.error = error;
|
|
/**
|
|
* Adds a warning issue
|
|
* @param message warning issue message. Errors will be converted to string via toString()
|
|
* @param properties optional properties to add to the annotation.
|
|
*/
|
|
function warning(message, properties = {}) {
|
|
command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
|
|
}
|
|
exports.warning = warning;
|
|
/**
|
|
* Adds a notice issue
|
|
* @param message notice issue message. Errors will be converted to string via toString()
|
|
* @param properties optional properties to add to the annotation.
|
|
*/
|
|
function notice(message, properties = {}) {
|
|
command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
|
|
}
|
|
exports.notice = notice;
|
|
/**
|
|
* Writes info to log with console.log.
|
|
* @param message info message
|
|
*/
|
|
function info(message) {
|
|
process.stdout.write(message + os.EOL);
|
|
}
|
|
exports.info = info;
|
|
/**
|
|
* Begin an output group.
|
|
*
|
|
* Output until the next `groupEnd` will be foldable in this group
|
|
*
|
|
* @param name The name of the output group
|
|
*/
|
|
function startGroup(name) {
|
|
command_1.issue('group', name);
|
|
}
|
|
exports.startGroup = startGroup;
|
|
/**
|
|
* End an output group.
|
|
*/
|
|
function endGroup() {
|
|
command_1.issue('endgroup');
|
|
}
|
|
exports.endGroup = endGroup;
|
|
/**
|
|
* Wrap an asynchronous function call in a group.
|
|
*
|
|
* Returns the same type as the function itself.
|
|
*
|
|
* @param name The name of the group
|
|
* @param fn The function to wrap in the group
|
|
*/
|
|
function group(name, fn) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
startGroup(name);
|
|
let result;
|
|
try {
|
|
result = yield fn();
|
|
}
|
|
finally {
|
|
endGroup();
|
|
}
|
|
return result;
|
|
});
|
|
}
|
|
exports.group = group;
|
|
//-----------------------------------------------------------------------
|
|
// Wrapper action state
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
|
*
|
|
* @param name name of the state to store
|
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
|
*/
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
function saveState(name, value) {
|
|
const filePath = process.env['GITHUB_STATE'] || '';
|
|
if (filePath) {
|
|
return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
|
|
}
|
|
command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
|
|
}
|
|
exports.saveState = saveState;
|
|
/**
|
|
* Gets the value of an state set by this action's main execution.
|
|
*
|
|
* @param name name of the state to get
|
|
* @returns string
|
|
*/
|
|
function getState(name) {
|
|
return process.env[`STATE_${name}`] || '';
|
|
}
|
|
exports.getState = getState;
|
|
function getIDToken(aud) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return yield oidc_utils_1.OidcClient.getIDToken(aud);
|
|
});
|
|
}
|
|
exports.getIDToken = getIDToken;
|
|
/**
|
|
* Summary exports
|
|
*/
|
|
var summary_1 = __nccwpck_require__(1327);
|
|
Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } }));
|
|
/**
|
|
* @deprecated use core.summary
|
|
*/
|
|
var summary_2 = __nccwpck_require__(1327);
|
|
Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } }));
|
|
/**
|
|
* Path exports
|
|
*/
|
|
var path_utils_1 = __nccwpck_require__(2981);
|
|
Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } }));
|
|
Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } }));
|
|
Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }));
|
|
//# sourceMappingURL=core.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 717:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
// For internal use, subject to change.
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
|
|
// We use any as a valid input type
|
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
const fs = __importStar(__nccwpck_require__(7147));
|
|
const os = __importStar(__nccwpck_require__(2037));
|
|
const uuid_1 = __nccwpck_require__(5840);
|
|
const utils_1 = __nccwpck_require__(5278);
|
|
function issueFileCommand(command, message) {
|
|
const filePath = process.env[`GITHUB_${command}`];
|
|
if (!filePath) {
|
|
throw new Error(`Unable to find environment variable for file command ${command}`);
|
|
}
|
|
if (!fs.existsSync(filePath)) {
|
|
throw new Error(`Missing file at path: ${filePath}`);
|
|
}
|
|
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
|
|
encoding: 'utf8'
|
|
});
|
|
}
|
|
exports.issueFileCommand = issueFileCommand;
|
|
function prepareKeyValueMessage(key, value) {
|
|
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
|
|
const convertedValue = utils_1.toCommandValue(value);
|
|
// These should realistically never happen, but just in case someone finds a
|
|
// way to exploit uuid generation let's not allow keys or values that contain
|
|
// the delimiter.
|
|
if (key.includes(delimiter)) {
|
|
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
|
|
}
|
|
if (convertedValue.includes(delimiter)) {
|
|
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
|
|
}
|
|
return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
|
|
}
|
|
exports.prepareKeyValueMessage = prepareKeyValueMessage;
|
|
//# sourceMappingURL=file-command.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8041:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.OidcClient = void 0;
|
|
const http_client_1 = __nccwpck_require__(6255);
|
|
const auth_1 = __nccwpck_require__(5526);
|
|
const core_1 = __nccwpck_require__(2186);
|
|
class OidcClient {
|
|
static createHttpClient(allowRetry = true, maxRetry = 10) {
|
|
const requestOptions = {
|
|
allowRetries: allowRetry,
|
|
maxRetries: maxRetry
|
|
};
|
|
return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);
|
|
}
|
|
static getRequestToken() {
|
|
const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];
|
|
if (!token) {
|
|
throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');
|
|
}
|
|
return token;
|
|
}
|
|
static getIDTokenUrl() {
|
|
const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];
|
|
if (!runtimeUrl) {
|
|
throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');
|
|
}
|
|
return runtimeUrl;
|
|
}
|
|
static getCall(id_token_url) {
|
|
var _a;
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const httpclient = OidcClient.createHttpClient();
|
|
const res = yield httpclient
|
|
.getJson(id_token_url)
|
|
.catch(error => {
|
|
throw new Error(`Failed to get ID Token. \n
|
|
Error Code : ${error.statusCode}\n
|
|
Error Message: ${error.message}`);
|
|
});
|
|
const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
|
|
if (!id_token) {
|
|
throw new Error('Response json body do not have ID Token field');
|
|
}
|
|
return id_token;
|
|
});
|
|
}
|
|
static getIDToken(audience) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
try {
|
|
// New ID Token is requested from action service
|
|
let id_token_url = OidcClient.getIDTokenUrl();
|
|
if (audience) {
|
|
const encodedAudience = encodeURIComponent(audience);
|
|
id_token_url = `${id_token_url}&audience=${encodedAudience}`;
|
|
}
|
|
core_1.debug(`ID token url is ${id_token_url}`);
|
|
const id_token = yield OidcClient.getCall(id_token_url);
|
|
core_1.setSecret(id_token);
|
|
return id_token;
|
|
}
|
|
catch (error) {
|
|
throw new Error(`Error message: ${error.message}`);
|
|
}
|
|
});
|
|
}
|
|
}
|
|
exports.OidcClient = OidcClient;
|
|
//# sourceMappingURL=oidc-utils.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2981:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
|
|
const path = __importStar(__nccwpck_require__(1017));
|
|
/**
|
|
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
|
|
* replaced with /.
|
|
*
|
|
* @param pth. Path to transform.
|
|
* @return string Posix path.
|
|
*/
|
|
function toPosixPath(pth) {
|
|
return pth.replace(/[\\]/g, '/');
|
|
}
|
|
exports.toPosixPath = toPosixPath;
|
|
/**
|
|
* toWin32Path converts the given path to the win32 form. On Linux, / will be
|
|
* replaced with \\.
|
|
*
|
|
* @param pth. Path to transform.
|
|
* @return string Win32 path.
|
|
*/
|
|
function toWin32Path(pth) {
|
|
return pth.replace(/[/]/g, '\\');
|
|
}
|
|
exports.toWin32Path = toWin32Path;
|
|
/**
|
|
* toPlatformPath converts the given path to a platform-specific path. It does
|
|
* this by replacing instances of / and \ with the platform-specific path
|
|
* separator.
|
|
*
|
|
* @param pth The path to platformize.
|
|
* @return string The platform-specific path.
|
|
*/
|
|
function toPlatformPath(pth) {
|
|
return pth.replace(/[/\\]/g, path.sep);
|
|
}
|
|
exports.toPlatformPath = toPlatformPath;
|
|
//# sourceMappingURL=path-utils.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1327:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
|
|
const os_1 = __nccwpck_require__(2037);
|
|
const fs_1 = __nccwpck_require__(7147);
|
|
const { access, appendFile, writeFile } = fs_1.promises;
|
|
exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
|
|
exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
|
|
class Summary {
|
|
constructor() {
|
|
this._buffer = '';
|
|
}
|
|
/**
|
|
* Finds the summary file path from the environment, rejects if env var is not found or file does not exist
|
|
* Also checks r/w permissions.
|
|
*
|
|
* @returns step summary file path
|
|
*/
|
|
filePath() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (this._filePath) {
|
|
return this._filePath;
|
|
}
|
|
const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];
|
|
if (!pathFromEnv) {
|
|
throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
|
|
}
|
|
try {
|
|
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
|
|
}
|
|
catch (_a) {
|
|
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
|
|
}
|
|
this._filePath = pathFromEnv;
|
|
return this._filePath;
|
|
});
|
|
}
|
|
/**
|
|
* Wraps content in an HTML tag, adding any HTML attributes
|
|
*
|
|
* @param {string} tag HTML tag to wrap
|
|
* @param {string | null} content content within the tag
|
|
* @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
|
|
*
|
|
* @returns {string} content wrapped in HTML element
|
|
*/
|
|
wrap(tag, content, attrs = {}) {
|
|
const htmlAttrs = Object.entries(attrs)
|
|
.map(([key, value]) => ` ${key}="${value}"`)
|
|
.join('');
|
|
if (!content) {
|
|
return `<${tag}${htmlAttrs}>`;
|
|
}
|
|
return `<${tag}${htmlAttrs}>${content}</${tag}>`;
|
|
}
|
|
/**
|
|
* Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
|
|
*
|
|
* @param {SummaryWriteOptions} [options] (optional) options for write operation
|
|
*
|
|
* @returns {Promise<Summary>} summary instance
|
|
*/
|
|
write(options) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
|
|
const filePath = yield this.filePath();
|
|
const writeFunc = overwrite ? writeFile : appendFile;
|
|
yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });
|
|
return this.emptyBuffer();
|
|
});
|
|
}
|
|
/**
|
|
* Clears the summary buffer and wipes the summary file
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
clear() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.emptyBuffer().write({ overwrite: true });
|
|
});
|
|
}
|
|
/**
|
|
* Returns the current summary buffer as a string
|
|
*
|
|
* @returns {string} string of summary buffer
|
|
*/
|
|
stringify() {
|
|
return this._buffer;
|
|
}
|
|
/**
|
|
* If the summary buffer is empty
|
|
*
|
|
* @returns {boolen} true if the buffer is empty
|
|
*/
|
|
isEmptyBuffer() {
|
|
return this._buffer.length === 0;
|
|
}
|
|
/**
|
|
* Resets the summary buffer without writing to summary file
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
emptyBuffer() {
|
|
this._buffer = '';
|
|
return this;
|
|
}
|
|
/**
|
|
* Adds raw text to the summary buffer
|
|
*
|
|
* @param {string} text content to add
|
|
* @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addRaw(text, addEOL = false) {
|
|
this._buffer += text;
|
|
return addEOL ? this.addEOL() : this;
|
|
}
|
|
/**
|
|
* Adds the operating system-specific end-of-line marker to the buffer
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addEOL() {
|
|
return this.addRaw(os_1.EOL);
|
|
}
|
|
/**
|
|
* Adds an HTML codeblock to the summary buffer
|
|
*
|
|
* @param {string} code content to render within fenced code block
|
|
* @param {string} lang (optional) language to syntax highlight code
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addCodeBlock(code, lang) {
|
|
const attrs = Object.assign({}, (lang && { lang }));
|
|
const element = this.wrap('pre', this.wrap('code', code), attrs);
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds an HTML list to the summary buffer
|
|
*
|
|
* @param {string[]} items list of items to render
|
|
* @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addList(items, ordered = false) {
|
|
const tag = ordered ? 'ol' : 'ul';
|
|
const listItems = items.map(item => this.wrap('li', item)).join('');
|
|
const element = this.wrap(tag, listItems);
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds an HTML table to the summary buffer
|
|
*
|
|
* @param {SummaryTableCell[]} rows table rows
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addTable(rows) {
|
|
const tableBody = rows
|
|
.map(row => {
|
|
const cells = row
|
|
.map(cell => {
|
|
if (typeof cell === 'string') {
|
|
return this.wrap('td', cell);
|
|
}
|
|
const { header, data, colspan, rowspan } = cell;
|
|
const tag = header ? 'th' : 'td';
|
|
const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));
|
|
return this.wrap(tag, data, attrs);
|
|
})
|
|
.join('');
|
|
return this.wrap('tr', cells);
|
|
})
|
|
.join('');
|
|
const element = this.wrap('table', tableBody);
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds a collapsable HTML details element to the summary buffer
|
|
*
|
|
* @param {string} label text for the closed state
|
|
* @param {string} content collapsable content
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addDetails(label, content) {
|
|
const element = this.wrap('details', this.wrap('summary', label) + content);
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds an HTML image tag to the summary buffer
|
|
*
|
|
* @param {string} src path to the image you to embed
|
|
* @param {string} alt text description of the image
|
|
* @param {SummaryImageOptions} options (optional) addition image attributes
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addImage(src, alt, options) {
|
|
const { width, height } = options || {};
|
|
const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));
|
|
const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds an HTML section heading element
|
|
*
|
|
* @param {string} text heading text
|
|
* @param {number | string} [level=1] (optional) the heading level, default: 1
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addHeading(text, level) {
|
|
const tag = `h${level}`;
|
|
const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)
|
|
? tag
|
|
: 'h1';
|
|
const element = this.wrap(allowedTag, text);
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds an HTML thematic break (<hr>) to the summary buffer
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addSeparator() {
|
|
const element = this.wrap('hr', null);
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds an HTML line break (<br>) to the summary buffer
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addBreak() {
|
|
const element = this.wrap('br', null);
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds an HTML blockquote to the summary buffer
|
|
*
|
|
* @param {string} text quote text
|
|
* @param {string} cite (optional) citation url
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addQuote(text, cite) {
|
|
const attrs = Object.assign({}, (cite && { cite }));
|
|
const element = this.wrap('blockquote', text, attrs);
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
/**
|
|
* Adds an HTML anchor tag to the summary buffer
|
|
*
|
|
* @param {string} text link text/content
|
|
* @param {string} href hyperlink
|
|
*
|
|
* @returns {Summary} summary instance
|
|
*/
|
|
addLink(text, href) {
|
|
const element = this.wrap('a', text, { href });
|
|
return this.addRaw(element).addEOL();
|
|
}
|
|
}
|
|
const _summary = new Summary();
|
|
/**
|
|
* @deprecated use `core.summary`
|
|
*/
|
|
exports.markdownSummary = _summary;
|
|
exports.summary = _summary;
|
|
//# sourceMappingURL=summary.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5278:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
// We use any as a valid input type
|
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.toCommandProperties = exports.toCommandValue = void 0;
|
|
/**
|
|
* Sanitizes an input into a string so it can be passed into issueCommand safely
|
|
* @param input input to sanitize into a string
|
|
*/
|
|
function toCommandValue(input) {
|
|
if (input === null || input === undefined) {
|
|
return '';
|
|
}
|
|
else if (typeof input === 'string' || input instanceof String) {
|
|
return input;
|
|
}
|
|
return JSON.stringify(input);
|
|
}
|
|
exports.toCommandValue = toCommandValue;
|
|
/**
|
|
*
|
|
* @param annotationProperties
|
|
* @returns The command properties to send with the actual annotation command
|
|
* See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
|
|
*/
|
|
function toCommandProperties(annotationProperties) {
|
|
if (!Object.keys(annotationProperties).length) {
|
|
return {};
|
|
}
|
|
return {
|
|
title: annotationProperties.title,
|
|
file: annotationProperties.file,
|
|
line: annotationProperties.startLine,
|
|
endLine: annotationProperties.endLine,
|
|
col: annotationProperties.startColumn,
|
|
endColumn: annotationProperties.endColumn
|
|
};
|
|
}
|
|
exports.toCommandProperties = toCommandProperties;
|
|
//# sourceMappingURL=utils.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5526:
|
|
/***/ (function(__unused_webpack_module, exports) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;
|
|
class BasicCredentialHandler {
|
|
constructor(username, password) {
|
|
this.username = username;
|
|
this.password = password;
|
|
}
|
|
prepareRequest(options) {
|
|
if (!options.headers) {
|
|
throw Error('The request has no headers');
|
|
}
|
|
options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;
|
|
}
|
|
// This handler cannot handle 401
|
|
canHandleAuthentication() {
|
|
return false;
|
|
}
|
|
handleAuthentication() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
throw new Error('not implemented');
|
|
});
|
|
}
|
|
}
|
|
exports.BasicCredentialHandler = BasicCredentialHandler;
|
|
class BearerCredentialHandler {
|
|
constructor(token) {
|
|
this.token = token;
|
|
}
|
|
// currently implements pre-authorization
|
|
// TODO: support preAuth = false where it hooks on 401
|
|
prepareRequest(options) {
|
|
if (!options.headers) {
|
|
throw Error('The request has no headers');
|
|
}
|
|
options.headers['Authorization'] = `Bearer ${this.token}`;
|
|
}
|
|
// This handler cannot handle 401
|
|
canHandleAuthentication() {
|
|
return false;
|
|
}
|
|
handleAuthentication() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
throw new Error('not implemented');
|
|
});
|
|
}
|
|
}
|
|
exports.BearerCredentialHandler = BearerCredentialHandler;
|
|
class PersonalAccessTokenCredentialHandler {
|
|
constructor(token) {
|
|
this.token = token;
|
|
}
|
|
// currently implements pre-authorization
|
|
// TODO: support preAuth = false where it hooks on 401
|
|
prepareRequest(options) {
|
|
if (!options.headers) {
|
|
throw Error('The request has no headers');
|
|
}
|
|
options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;
|
|
}
|
|
// This handler cannot handle 401
|
|
canHandleAuthentication() {
|
|
return false;
|
|
}
|
|
handleAuthentication() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
throw new Error('not implemented');
|
|
});
|
|
}
|
|
}
|
|
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
|
|
//# sourceMappingURL=auth.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6255:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
|
|
const http = __importStar(__nccwpck_require__(3685));
|
|
const https = __importStar(__nccwpck_require__(5687));
|
|
const pm = __importStar(__nccwpck_require__(9835));
|
|
const tunnel = __importStar(__nccwpck_require__(4294));
|
|
var HttpCodes;
|
|
(function (HttpCodes) {
|
|
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
|
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
|
|
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
|
|
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
|
|
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
|
|
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
|
|
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
|
|
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
|
|
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
|
|
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
|
|
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
|
|
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
|
|
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
|
|
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
|
|
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
|
|
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
|
|
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
|
|
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
|
|
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
|
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
|
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
|
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
|
|
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
|
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
|
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
|
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
|
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
|
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
|
var Headers;
|
|
(function (Headers) {
|
|
Headers["Accept"] = "accept";
|
|
Headers["ContentType"] = "content-type";
|
|
})(Headers = exports.Headers || (exports.Headers = {}));
|
|
var MediaTypes;
|
|
(function (MediaTypes) {
|
|
MediaTypes["ApplicationJson"] = "application/json";
|
|
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
|
/**
|
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
|
*/
|
|
function getProxyUrl(serverUrl) {
|
|
const proxyUrl = pm.getProxyUrl(new URL(serverUrl));
|
|
return proxyUrl ? proxyUrl.href : '';
|
|
}
|
|
exports.getProxyUrl = getProxyUrl;
|
|
const HttpRedirectCodes = [
|
|
HttpCodes.MovedPermanently,
|
|
HttpCodes.ResourceMoved,
|
|
HttpCodes.SeeOther,
|
|
HttpCodes.TemporaryRedirect,
|
|
HttpCodes.PermanentRedirect
|
|
];
|
|
const HttpResponseRetryCodes = [
|
|
HttpCodes.BadGateway,
|
|
HttpCodes.ServiceUnavailable,
|
|
HttpCodes.GatewayTimeout
|
|
];
|
|
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
|
const ExponentialBackoffCeiling = 10;
|
|
const ExponentialBackoffTimeSlice = 5;
|
|
class HttpClientError extends Error {
|
|
constructor(message, statusCode) {
|
|
super(message);
|
|
this.name = 'HttpClientError';
|
|
this.statusCode = statusCode;
|
|
Object.setPrototypeOf(this, HttpClientError.prototype);
|
|
}
|
|
}
|
|
exports.HttpClientError = HttpClientError;
|
|
class HttpClientResponse {
|
|
constructor(message) {
|
|
this.message = message;
|
|
}
|
|
readBody() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
|
let output = Buffer.alloc(0);
|
|
this.message.on('data', (chunk) => {
|
|
output = Buffer.concat([output, chunk]);
|
|
});
|
|
this.message.on('end', () => {
|
|
resolve(output.toString());
|
|
});
|
|
}));
|
|
});
|
|
}
|
|
}
|
|
exports.HttpClientResponse = HttpClientResponse;
|
|
function isHttps(requestUrl) {
|
|
const parsedUrl = new URL(requestUrl);
|
|
return parsedUrl.protocol === 'https:';
|
|
}
|
|
exports.isHttps = isHttps;
|
|
class HttpClient {
|
|
constructor(userAgent, handlers, requestOptions) {
|
|
this._ignoreSslError = false;
|
|
this._allowRedirects = true;
|
|
this._allowRedirectDowngrade = false;
|
|
this._maxRedirects = 50;
|
|
this._allowRetries = false;
|
|
this._maxRetries = 1;
|
|
this._keepAlive = false;
|
|
this._disposed = false;
|
|
this.userAgent = userAgent;
|
|
this.handlers = handlers || [];
|
|
this.requestOptions = requestOptions;
|
|
if (requestOptions) {
|
|
if (requestOptions.ignoreSslError != null) {
|
|
this._ignoreSslError = requestOptions.ignoreSslError;
|
|
}
|
|
this._socketTimeout = requestOptions.socketTimeout;
|
|
if (requestOptions.allowRedirects != null) {
|
|
this._allowRedirects = requestOptions.allowRedirects;
|
|
}
|
|
if (requestOptions.allowRedirectDowngrade != null) {
|
|
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
|
|
}
|
|
if (requestOptions.maxRedirects != null) {
|
|
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
|
|
}
|
|
if (requestOptions.keepAlive != null) {
|
|
this._keepAlive = requestOptions.keepAlive;
|
|
}
|
|
if (requestOptions.allowRetries != null) {
|
|
this._allowRetries = requestOptions.allowRetries;
|
|
}
|
|
if (requestOptions.maxRetries != null) {
|
|
this._maxRetries = requestOptions.maxRetries;
|
|
}
|
|
}
|
|
}
|
|
options(requestUrl, additionalHeaders) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
|
|
});
|
|
}
|
|
get(requestUrl, additionalHeaders) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.request('GET', requestUrl, null, additionalHeaders || {});
|
|
});
|
|
}
|
|
del(requestUrl, additionalHeaders) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
|
|
});
|
|
}
|
|
post(requestUrl, data, additionalHeaders) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.request('POST', requestUrl, data, additionalHeaders || {});
|
|
});
|
|
}
|
|
patch(requestUrl, data, additionalHeaders) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
|
|
});
|
|
}
|
|
put(requestUrl, data, additionalHeaders) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.request('PUT', requestUrl, data, additionalHeaders || {});
|
|
});
|
|
}
|
|
head(requestUrl, additionalHeaders) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
|
|
});
|
|
}
|
|
sendStream(verb, requestUrl, stream, additionalHeaders) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return this.request(verb, requestUrl, stream, additionalHeaders);
|
|
});
|
|
}
|
|
/**
|
|
* Gets a typed object from an endpoint
|
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
|
*/
|
|
getJson(requestUrl, additionalHeaders = {}) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
const res = yield this.get(requestUrl, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
});
|
|
}
|
|
postJson(requestUrl, obj, additionalHeaders = {}) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
const res = yield this.post(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
});
|
|
}
|
|
putJson(requestUrl, obj, additionalHeaders = {}) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
const res = yield this.put(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
});
|
|
}
|
|
patchJson(requestUrl, obj, additionalHeaders = {}) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
const res = yield this.patch(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
});
|
|
}
|
|
/**
|
|
* Makes a raw http request.
|
|
* All other methods such as get, post, patch, and request ultimately call this.
|
|
* Prefer get, del, post and patch
|
|
*/
|
|
request(verb, requestUrl, data, headers) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
if (this._disposed) {
|
|
throw new Error('Client has already been disposed.');
|
|
}
|
|
const parsedUrl = new URL(requestUrl);
|
|
let info = this._prepareRequest(verb, parsedUrl, headers);
|
|
// Only perform retries on reads since writes may not be idempotent.
|
|
const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)
|
|
? this._maxRetries + 1
|
|
: 1;
|
|
let numTries = 0;
|
|
let response;
|
|
do {
|
|
response = yield this.requestRaw(info, data);
|
|
// Check if it's an authentication challenge
|
|
if (response &&
|
|
response.message &&
|
|
response.message.statusCode === HttpCodes.Unauthorized) {
|
|
let authenticationHandler;
|
|
for (const handler of this.handlers) {
|
|
if (handler.canHandleAuthentication(response)) {
|
|
authenticationHandler = handler;
|
|
break;
|
|
}
|
|
}
|
|
if (authenticationHandler) {
|
|
return authenticationHandler.handleAuthentication(this, info, data);
|
|
}
|
|
else {
|
|
// We have received an unauthorized response but have no handlers to handle it.
|
|
// Let the response return to the caller.
|
|
return response;
|
|
}
|
|
}
|
|
let redirectsRemaining = this._maxRedirects;
|
|
while (response.message.statusCode &&
|
|
HttpRedirectCodes.includes(response.message.statusCode) &&
|
|
this._allowRedirects &&
|
|
redirectsRemaining > 0) {
|
|
const redirectUrl = response.message.headers['location'];
|
|
if (!redirectUrl) {
|
|
// if there's no location to redirect to, we won't
|
|
break;
|
|
}
|
|
const parsedRedirectUrl = new URL(redirectUrl);
|
|
if (parsedUrl.protocol === 'https:' &&
|
|
parsedUrl.protocol !== parsedRedirectUrl.protocol &&
|
|
!this._allowRedirectDowngrade) {
|
|
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
|
}
|
|
// we need to finish reading the response before reassigning response
|
|
// which will leak the open socket.
|
|
yield response.readBody();
|
|
// strip authorization header if redirected to a different hostname
|
|
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
|
for (const header in headers) {
|
|
// header names are case insensitive
|
|
if (header.toLowerCase() === 'authorization') {
|
|
delete headers[header];
|
|
}
|
|
}
|
|
}
|
|
// let's make the request with the new redirectUrl
|
|
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
|
response = yield this.requestRaw(info, data);
|
|
redirectsRemaining--;
|
|
}
|
|
if (!response.message.statusCode ||
|
|
!HttpResponseRetryCodes.includes(response.message.statusCode)) {
|
|
// If not a retry code, return immediately instead of retrying
|
|
return response;
|
|
}
|
|
numTries += 1;
|
|
if (numTries < maxTries) {
|
|
yield response.readBody();
|
|
yield this._performExponentialBackoff(numTries);
|
|
}
|
|
} while (numTries < maxTries);
|
|
return response;
|
|
});
|
|
}
|
|
/**
|
|
* Needs to be called if keepAlive is set to true in request options.
|
|
*/
|
|
dispose() {
|
|
if (this._agent) {
|
|
this._agent.destroy();
|
|
}
|
|
this._disposed = true;
|
|
}
|
|
/**
|
|
* Raw request.
|
|
* @param info
|
|
* @param data
|
|
*/
|
|
requestRaw(info, data) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return new Promise((resolve, reject) => {
|
|
function callbackForResult(err, res) {
|
|
if (err) {
|
|
reject(err);
|
|
}
|
|
else if (!res) {
|
|
// If `err` is not passed, then `res` must be passed.
|
|
reject(new Error('Unknown error'));
|
|
}
|
|
else {
|
|
resolve(res);
|
|
}
|
|
}
|
|
this.requestRawWithCallback(info, data, callbackForResult);
|
|
});
|
|
});
|
|
}
|
|
/**
|
|
* Raw request with callback.
|
|
* @param info
|
|
* @param data
|
|
* @param onResult
|
|
*/
|
|
requestRawWithCallback(info, data, onResult) {
|
|
if (typeof data === 'string') {
|
|
if (!info.options.headers) {
|
|
info.options.headers = {};
|
|
}
|
|
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
|
}
|
|
let callbackCalled = false;
|
|
function handleResult(err, res) {
|
|
if (!callbackCalled) {
|
|
callbackCalled = true;
|
|
onResult(err, res);
|
|
}
|
|
}
|
|
const req = info.httpModule.request(info.options, (msg) => {
|
|
const res = new HttpClientResponse(msg);
|
|
handleResult(undefined, res);
|
|
});
|
|
let socket;
|
|
req.on('socket', sock => {
|
|
socket = sock;
|
|
});
|
|
// If we ever get disconnected, we want the socket to timeout eventually
|
|
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
|
|
if (socket) {
|
|
socket.end();
|
|
}
|
|
handleResult(new Error(`Request timeout: ${info.options.path}`));
|
|
});
|
|
req.on('error', function (err) {
|
|
// err has statusCode property
|
|
// res should have headers
|
|
handleResult(err);
|
|
});
|
|
if (data && typeof data === 'string') {
|
|
req.write(data, 'utf8');
|
|
}
|
|
if (data && typeof data !== 'string') {
|
|
data.on('close', function () {
|
|
req.end();
|
|
});
|
|
data.pipe(req);
|
|
}
|
|
else {
|
|
req.end();
|
|
}
|
|
}
|
|
/**
|
|
* Gets an http agent. This function is useful when you need an http agent that handles
|
|
* routing through a proxy server - depending upon the url and proxy environment variables.
|
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
|
*/
|
|
getAgent(serverUrl) {
|
|
const parsedUrl = new URL(serverUrl);
|
|
return this._getAgent(parsedUrl);
|
|
}
|
|
_prepareRequest(method, requestUrl, headers) {
|
|
const info = {};
|
|
info.parsedUrl = requestUrl;
|
|
const usingSsl = info.parsedUrl.protocol === 'https:';
|
|
info.httpModule = usingSsl ? https : http;
|
|
const defaultPort = usingSsl ? 443 : 80;
|
|
info.options = {};
|
|
info.options.host = info.parsedUrl.hostname;
|
|
info.options.port = info.parsedUrl.port
|
|
? parseInt(info.parsedUrl.port)
|
|
: defaultPort;
|
|
info.options.path =
|
|
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
|
info.options.method = method;
|
|
info.options.headers = this._mergeHeaders(headers);
|
|
if (this.userAgent != null) {
|
|
info.options.headers['user-agent'] = this.userAgent;
|
|
}
|
|
info.options.agent = this._getAgent(info.parsedUrl);
|
|
// gives handlers an opportunity to participate
|
|
if (this.handlers) {
|
|
for (const handler of this.handlers) {
|
|
handler.prepareRequest(info.options);
|
|
}
|
|
}
|
|
return info;
|
|
}
|
|
_mergeHeaders(headers) {
|
|
if (this.requestOptions && this.requestOptions.headers) {
|
|
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));
|
|
}
|
|
return lowercaseKeys(headers || {});
|
|
}
|
|
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
|
let clientHeader;
|
|
if (this.requestOptions && this.requestOptions.headers) {
|
|
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
|
}
|
|
return additionalHeaders[header] || clientHeader || _default;
|
|
}
|
|
_getAgent(parsedUrl) {
|
|
let agent;
|
|
const proxyUrl = pm.getProxyUrl(parsedUrl);
|
|
const useProxy = proxyUrl && proxyUrl.hostname;
|
|
if (this._keepAlive && useProxy) {
|
|
agent = this._proxyAgent;
|
|
}
|
|
if (this._keepAlive && !useProxy) {
|
|
agent = this._agent;
|
|
}
|
|
// if agent is already assigned use that agent.
|
|
if (agent) {
|
|
return agent;
|
|
}
|
|
const usingSsl = parsedUrl.protocol === 'https:';
|
|
let maxSockets = 100;
|
|
if (this.requestOptions) {
|
|
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
|
|
}
|
|
// This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
|
|
if (proxyUrl && proxyUrl.hostname) {
|
|
const agentOptions = {
|
|
maxSockets,
|
|
keepAlive: this._keepAlive,
|
|
proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {
|
|
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
|
|
})), { host: proxyUrl.hostname, port: proxyUrl.port })
|
|
};
|
|
let tunnelAgent;
|
|
const overHttps = proxyUrl.protocol === 'https:';
|
|
if (usingSsl) {
|
|
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
|
|
}
|
|
else {
|
|
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
|
|
}
|
|
agent = tunnelAgent(agentOptions);
|
|
this._proxyAgent = agent;
|
|
}
|
|
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
|
if (this._keepAlive && !agent) {
|
|
const options = { keepAlive: this._keepAlive, maxSockets };
|
|
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
|
this._agent = agent;
|
|
}
|
|
// if not using private agent and tunnel agent isn't setup then use global agent
|
|
if (!agent) {
|
|
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
|
}
|
|
if (usingSsl && this._ignoreSslError) {
|
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
|
// we have to cast it to any and change it directly
|
|
agent.options = Object.assign(agent.options || {}, {
|
|
rejectUnauthorized: false
|
|
});
|
|
}
|
|
return agent;
|
|
}
|
|
_performExponentialBackoff(retryNumber) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
|
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
|
|
return new Promise(resolve => setTimeout(() => resolve(), ms));
|
|
});
|
|
}
|
|
_processResponse(res, options) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
|
|
const statusCode = res.message.statusCode || 0;
|
|
const response = {
|
|
statusCode,
|
|
result: null,
|
|
headers: {}
|
|
};
|
|
// not found leads to null obj returned
|
|
if (statusCode === HttpCodes.NotFound) {
|
|
resolve(response);
|
|
}
|
|
// get the result from the body
|
|
function dateTimeDeserializer(key, value) {
|
|
if (typeof value === 'string') {
|
|
const a = new Date(value);
|
|
if (!isNaN(a.valueOf())) {
|
|
return a;
|
|
}
|
|
}
|
|
return value;
|
|
}
|
|
let obj;
|
|
let contents;
|
|
try {
|
|
contents = yield res.readBody();
|
|
if (contents && contents.length > 0) {
|
|
if (options && options.deserializeDates) {
|
|
obj = JSON.parse(contents, dateTimeDeserializer);
|
|
}
|
|
else {
|
|
obj = JSON.parse(contents);
|
|
}
|
|
response.result = obj;
|
|
}
|
|
response.headers = res.message.headers;
|
|
}
|
|
catch (err) {
|
|
// Invalid resource (contents not json); leaving result obj null
|
|
}
|
|
// note that 3xx redirects are handled by the http layer.
|
|
if (statusCode > 299) {
|
|
let msg;
|
|
// if exception/error in body, attempt to get better error
|
|
if (obj && obj.message) {
|
|
msg = obj.message;
|
|
}
|
|
else if (contents && contents.length > 0) {
|
|
// it may be the case that the exception is in the body message as string
|
|
msg = contents;
|
|
}
|
|
else {
|
|
msg = `Failed request: (${statusCode})`;
|
|
}
|
|
const err = new HttpClientError(msg, statusCode);
|
|
err.result = response.result;
|
|
reject(err);
|
|
}
|
|
else {
|
|
resolve(response);
|
|
}
|
|
}));
|
|
});
|
|
}
|
|
}
|
|
exports.HttpClient = HttpClient;
|
|
const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9835:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.checkBypass = exports.getProxyUrl = void 0;
|
|
function getProxyUrl(reqUrl) {
|
|
const usingSsl = reqUrl.protocol === 'https:';
|
|
if (checkBypass(reqUrl)) {
|
|
return undefined;
|
|
}
|
|
const proxyVar = (() => {
|
|
if (usingSsl) {
|
|
return process.env['https_proxy'] || process.env['HTTPS_PROXY'];
|
|
}
|
|
else {
|
|
return process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
|
}
|
|
})();
|
|
if (proxyVar) {
|
|
return new URL(proxyVar);
|
|
}
|
|
else {
|
|
return undefined;
|
|
}
|
|
}
|
|
exports.getProxyUrl = getProxyUrl;
|
|
function checkBypass(reqUrl) {
|
|
if (!reqUrl.hostname) {
|
|
return false;
|
|
}
|
|
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
|
if (!noProxy) {
|
|
return false;
|
|
}
|
|
// Determine the request port
|
|
let reqPort;
|
|
if (reqUrl.port) {
|
|
reqPort = Number(reqUrl.port);
|
|
}
|
|
else if (reqUrl.protocol === 'http:') {
|
|
reqPort = 80;
|
|
}
|
|
else if (reqUrl.protocol === 'https:') {
|
|
reqPort = 443;
|
|
}
|
|
// Format the request hostname and hostname with port
|
|
const upperReqHosts = [reqUrl.hostname.toUpperCase()];
|
|
if (typeof reqPort === 'number') {
|
|
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
|
}
|
|
// Compare request host against noproxy
|
|
for (const upperNoProxyItem of noProxy
|
|
.split(',')
|
|
.map(x => x.trim().toUpperCase())
|
|
.filter(x => x)) {
|
|
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
|
return true;
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
exports.checkBypass = checkBypass;
|
|
//# sourceMappingURL=proxy.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9417:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
|
|
module.exports = balanced;
|
|
function balanced(a, b, str) {
|
|
if (a instanceof RegExp) a = maybeMatch(a, str);
|
|
if (b instanceof RegExp) b = maybeMatch(b, str);
|
|
|
|
var r = range(a, b, str);
|
|
|
|
return r && {
|
|
start: r[0],
|
|
end: r[1],
|
|
pre: str.slice(0, r[0]),
|
|
body: str.slice(r[0] + a.length, r[1]),
|
|
post: str.slice(r[1] + b.length)
|
|
};
|
|
}
|
|
|
|
function maybeMatch(reg, str) {
|
|
var m = str.match(reg);
|
|
return m ? m[0] : null;
|
|
}
|
|
|
|
balanced.range = range;
|
|
function range(a, b, str) {
|
|
var begs, beg, left, right, result;
|
|
var ai = str.indexOf(a);
|
|
var bi = str.indexOf(b, ai + 1);
|
|
var i = ai;
|
|
|
|
if (ai >= 0 && bi > 0) {
|
|
begs = [];
|
|
left = str.length;
|
|
|
|
while (i >= 0 && !result) {
|
|
if (i == ai) {
|
|
begs.push(i);
|
|
ai = str.indexOf(a, i + 1);
|
|
} else if (begs.length == 1) {
|
|
result = [ begs.pop(), bi ];
|
|
} else {
|
|
beg = begs.pop();
|
|
if (beg < left) {
|
|
left = beg;
|
|
right = bi;
|
|
}
|
|
|
|
bi = str.indexOf(b, i + 1);
|
|
}
|
|
|
|
i = ai < bi && ai >= 0 ? ai : bi;
|
|
}
|
|
|
|
if (begs.length) {
|
|
result = [ left, right ];
|
|
}
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1046:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
var balanced = __nccwpck_require__(9417);
|
|
|
|
module.exports = expandTop;
|
|
|
|
var escSlash = '\0SLASH'+Math.random()+'\0';
|
|
var escOpen = '\0OPEN'+Math.random()+'\0';
|
|
var escClose = '\0CLOSE'+Math.random()+'\0';
|
|
var escComma = '\0COMMA'+Math.random()+'\0';
|
|
var escPeriod = '\0PERIOD'+Math.random()+'\0';
|
|
|
|
function numeric(str) {
|
|
return parseInt(str, 10) == str
|
|
? parseInt(str, 10)
|
|
: str.charCodeAt(0);
|
|
}
|
|
|
|
function escapeBraces(str) {
|
|
return str.split('\\\\').join(escSlash)
|
|
.split('\\{').join(escOpen)
|
|
.split('\\}').join(escClose)
|
|
.split('\\,').join(escComma)
|
|
.split('\\.').join(escPeriod);
|
|
}
|
|
|
|
function unescapeBraces(str) {
|
|
return str.split(escSlash).join('\\')
|
|
.split(escOpen).join('{')
|
|
.split(escClose).join('}')
|
|
.split(escComma).join(',')
|
|
.split(escPeriod).join('.');
|
|
}
|
|
|
|
|
|
// Basically just str.split(","), but handling cases
|
|
// where we have nested braced sections, which should be
|
|
// treated as individual members, like {a,{b,c},d}
|
|
function parseCommaParts(str) {
|
|
if (!str)
|
|
return [''];
|
|
|
|
var parts = [];
|
|
var m = balanced('{', '}', str);
|
|
|
|
if (!m)
|
|
return str.split(',');
|
|
|
|
var pre = m.pre;
|
|
var body = m.body;
|
|
var post = m.post;
|
|
var p = pre.split(',');
|
|
|
|
p[p.length-1] += '{' + body + '}';
|
|
var postParts = parseCommaParts(post);
|
|
if (post.length) {
|
|
p[p.length-1] += postParts.shift();
|
|
p.push.apply(p, postParts);
|
|
}
|
|
|
|
parts.push.apply(parts, p);
|
|
|
|
return parts;
|
|
}
|
|
|
|
function expandTop(str) {
|
|
if (!str)
|
|
return [];
|
|
|
|
// I don't know why Bash 4.3 does this, but it does.
|
|
// Anything starting with {} will have the first two bytes preserved
|
|
// but *only* at the top level, so {},a}b will not expand to anything,
|
|
// but a{},b}c will be expanded to [a}c,abc].
|
|
// One could argue that this is a bug in Bash, but since the goal of
|
|
// this module is to match Bash's rules, we escape a leading {}
|
|
if (str.substr(0, 2) === '{}') {
|
|
str = '\\{\\}' + str.substr(2);
|
|
}
|
|
|
|
return expand(escapeBraces(str), true).map(unescapeBraces);
|
|
}
|
|
|
|
function embrace(str) {
|
|
return '{' + str + '}';
|
|
}
|
|
function isPadded(el) {
|
|
return /^-?0\d/.test(el);
|
|
}
|
|
|
|
function lte(i, y) {
|
|
return i <= y;
|
|
}
|
|
function gte(i, y) {
|
|
return i >= y;
|
|
}
|
|
|
|
function expand(str, isTop) {
|
|
var expansions = [];
|
|
|
|
var m = balanced('{', '}', str);
|
|
if (!m) return [str];
|
|
|
|
// no need to expand pre, since it is guaranteed to be free of brace-sets
|
|
var pre = m.pre;
|
|
var post = m.post.length
|
|
? expand(m.post, false)
|
|
: [''];
|
|
|
|
if (/\$$/.test(m.pre)) {
|
|
for (var k = 0; k < post.length; k++) {
|
|
var expansion = pre+ '{' + m.body + '}' + post[k];
|
|
expansions.push(expansion);
|
|
}
|
|
} else {
|
|
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
|
|
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
|
|
var isSequence = isNumericSequence || isAlphaSequence;
|
|
var isOptions = m.body.indexOf(',') >= 0;
|
|
if (!isSequence && !isOptions) {
|
|
// {a},b}
|
|
if (m.post.match(/,.*\}/)) {
|
|
str = m.pre + '{' + m.body + escClose + m.post;
|
|
return expand(str);
|
|
}
|
|
return [str];
|
|
}
|
|
|
|
var n;
|
|
if (isSequence) {
|
|
n = m.body.split(/\.\./);
|
|
} else {
|
|
n = parseCommaParts(m.body);
|
|
if (n.length === 1) {
|
|
// x{{a,b}}y ==> x{a}y x{b}y
|
|
n = expand(n[0], false).map(embrace);
|
|
if (n.length === 1) {
|
|
return post.map(function(p) {
|
|
return m.pre + n[0] + p;
|
|
});
|
|
}
|
|
}
|
|
}
|
|
|
|
// at this point, n is the parts, and we know it's not a comma set
|
|
// with a single entry.
|
|
var N;
|
|
|
|
if (isSequence) {
|
|
var x = numeric(n[0]);
|
|
var y = numeric(n[1]);
|
|
var width = Math.max(n[0].length, n[1].length)
|
|
var incr = n.length == 3
|
|
? Math.abs(numeric(n[2]))
|
|
: 1;
|
|
var test = lte;
|
|
var reverse = y < x;
|
|
if (reverse) {
|
|
incr *= -1;
|
|
test = gte;
|
|
}
|
|
var pad = n.some(isPadded);
|
|
|
|
N = [];
|
|
|
|
for (var i = x; test(i, y); i += incr) {
|
|
var c;
|
|
if (isAlphaSequence) {
|
|
c = String.fromCharCode(i);
|
|
if (c === '\\')
|
|
c = '';
|
|
} else {
|
|
c = String(i);
|
|
if (pad) {
|
|
var need = width - c.length;
|
|
if (need > 0) {
|
|
var z = new Array(need + 1).join('0');
|
|
if (i < 0)
|
|
c = '-' + z + c.slice(1);
|
|
else
|
|
c = z + c;
|
|
}
|
|
}
|
|
}
|
|
N.push(c);
|
|
}
|
|
} else {
|
|
N = [];
|
|
|
|
for (var j = 0; j < n.length; j++) {
|
|
N.push.apply(N, expand(n[j], false));
|
|
}
|
|
}
|
|
|
|
for (var j = 0; j < N.length; j++) {
|
|
for (var k = 0; k < post.length; k++) {
|
|
var expansion = pre + N[j] + post[k];
|
|
if (!isTop || isSequence || expansion)
|
|
expansions.push(expansion);
|
|
}
|
|
}
|
|
}
|
|
|
|
return expansions;
|
|
}
|
|
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4294:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
module.exports = __nccwpck_require__(4219);
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4219:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
var net = __nccwpck_require__(1808);
|
|
var tls = __nccwpck_require__(4404);
|
|
var http = __nccwpck_require__(3685);
|
|
var https = __nccwpck_require__(5687);
|
|
var events = __nccwpck_require__(2361);
|
|
var assert = __nccwpck_require__(9491);
|
|
var util = __nccwpck_require__(3837);
|
|
|
|
|
|
exports.httpOverHttp = httpOverHttp;
|
|
exports.httpsOverHttp = httpsOverHttp;
|
|
exports.httpOverHttps = httpOverHttps;
|
|
exports.httpsOverHttps = httpsOverHttps;
|
|
|
|
|
|
function httpOverHttp(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = http.request;
|
|
return agent;
|
|
}
|
|
|
|
function httpsOverHttp(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = http.request;
|
|
agent.createSocket = createSecureSocket;
|
|
agent.defaultPort = 443;
|
|
return agent;
|
|
}
|
|
|
|
function httpOverHttps(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = https.request;
|
|
return agent;
|
|
}
|
|
|
|
function httpsOverHttps(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = https.request;
|
|
agent.createSocket = createSecureSocket;
|
|
agent.defaultPort = 443;
|
|
return agent;
|
|
}
|
|
|
|
|
|
function TunnelingAgent(options) {
|
|
var self = this;
|
|
self.options = options || {};
|
|
self.proxyOptions = self.options.proxy || {};
|
|
self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
|
|
self.requests = [];
|
|
self.sockets = [];
|
|
|
|
self.on('free', function onFree(socket, host, port, localAddress) {
|
|
var options = toOptions(host, port, localAddress);
|
|
for (var i = 0, len = self.requests.length; i < len; ++i) {
|
|
var pending = self.requests[i];
|
|
if (pending.host === options.host && pending.port === options.port) {
|
|
// Detect the request to connect same origin server,
|
|
// reuse the connection.
|
|
self.requests.splice(i, 1);
|
|
pending.request.onSocket(socket);
|
|
return;
|
|
}
|
|
}
|
|
socket.destroy();
|
|
self.removeSocket(socket);
|
|
});
|
|
}
|
|
util.inherits(TunnelingAgent, events.EventEmitter);
|
|
|
|
TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
|
|
var self = this;
|
|
var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
|
|
|
|
if (self.sockets.length >= this.maxSockets) {
|
|
// We are over limit so we'll add it to the queue.
|
|
self.requests.push(options);
|
|
return;
|
|
}
|
|
|
|
// If we are under maxSockets create a new one.
|
|
self.createSocket(options, function(socket) {
|
|
socket.on('free', onFree);
|
|
socket.on('close', onCloseOrRemove);
|
|
socket.on('agentRemove', onCloseOrRemove);
|
|
req.onSocket(socket);
|
|
|
|
function onFree() {
|
|
self.emit('free', socket, options);
|
|
}
|
|
|
|
function onCloseOrRemove(err) {
|
|
self.removeSocket(socket);
|
|
socket.removeListener('free', onFree);
|
|
socket.removeListener('close', onCloseOrRemove);
|
|
socket.removeListener('agentRemove', onCloseOrRemove);
|
|
}
|
|
});
|
|
};
|
|
|
|
TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
|
|
var self = this;
|
|
var placeholder = {};
|
|
self.sockets.push(placeholder);
|
|
|
|
var connectOptions = mergeOptions({}, self.proxyOptions, {
|
|
method: 'CONNECT',
|
|
path: options.host + ':' + options.port,
|
|
agent: false,
|
|
headers: {
|
|
host: options.host + ':' + options.port
|
|
}
|
|
});
|
|
if (options.localAddress) {
|
|
connectOptions.localAddress = options.localAddress;
|
|
}
|
|
if (connectOptions.proxyAuth) {
|
|
connectOptions.headers = connectOptions.headers || {};
|
|
connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
|
|
new Buffer(connectOptions.proxyAuth).toString('base64');
|
|
}
|
|
|
|
debug('making CONNECT request');
|
|
var connectReq = self.request(connectOptions);
|
|
connectReq.useChunkedEncodingByDefault = false; // for v0.6
|
|
connectReq.once('response', onResponse); // for v0.6
|
|
connectReq.once('upgrade', onUpgrade); // for v0.6
|
|
connectReq.once('connect', onConnect); // for v0.7 or later
|
|
connectReq.once('error', onError);
|
|
connectReq.end();
|
|
|
|
function onResponse(res) {
|
|
// Very hacky. This is necessary to avoid http-parser leaks.
|
|
res.upgrade = true;
|
|
}
|
|
|
|
function onUpgrade(res, socket, head) {
|
|
// Hacky.
|
|
process.nextTick(function() {
|
|
onConnect(res, socket, head);
|
|
});
|
|
}
|
|
|
|
function onConnect(res, socket, head) {
|
|
connectReq.removeAllListeners();
|
|
socket.removeAllListeners();
|
|
|
|
if (res.statusCode !== 200) {
|
|
debug('tunneling socket could not be established, statusCode=%d',
|
|
res.statusCode);
|
|
socket.destroy();
|
|
var error = new Error('tunneling socket could not be established, ' +
|
|
'statusCode=' + res.statusCode);
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
return;
|
|
}
|
|
if (head.length > 0) {
|
|
debug('got illegal response body from proxy');
|
|
socket.destroy();
|
|
var error = new Error('got illegal response body from proxy');
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
return;
|
|
}
|
|
debug('tunneling connection has established');
|
|
self.sockets[self.sockets.indexOf(placeholder)] = socket;
|
|
return cb(socket);
|
|
}
|
|
|
|
function onError(cause) {
|
|
connectReq.removeAllListeners();
|
|
|
|
debug('tunneling socket could not be established, cause=%s\n',
|
|
cause.message, cause.stack);
|
|
var error = new Error('tunneling socket could not be established, ' +
|
|
'cause=' + cause.message);
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
}
|
|
};
|
|
|
|
TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
|
|
var pos = this.sockets.indexOf(socket)
|
|
if (pos === -1) {
|
|
return;
|
|
}
|
|
this.sockets.splice(pos, 1);
|
|
|
|
var pending = this.requests.shift();
|
|
if (pending) {
|
|
// If we have pending requests and a socket gets closed a new one
|
|
// needs to be created to take over in the pool for the one that closed.
|
|
this.createSocket(pending, function(socket) {
|
|
pending.request.onSocket(socket);
|
|
});
|
|
}
|
|
};
|
|
|
|
function createSecureSocket(options, cb) {
|
|
var self = this;
|
|
TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
|
|
var hostHeader = options.request.getHeader('host');
|
|
var tlsOptions = mergeOptions({}, self.options, {
|
|
socket: socket,
|
|
servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
|
|
});
|
|
|
|
// 0 is dummy port for v0.6
|
|
var secureSocket = tls.connect(0, tlsOptions);
|
|
self.sockets[self.sockets.indexOf(socket)] = secureSocket;
|
|
cb(secureSocket);
|
|
});
|
|
}
|
|
|
|
|
|
function toOptions(host, port, localAddress) {
|
|
if (typeof host === 'string') { // since v0.10
|
|
return {
|
|
host: host,
|
|
port: port,
|
|
localAddress: localAddress
|
|
};
|
|
}
|
|
return host; // for v0.11 or later
|
|
}
|
|
|
|
function mergeOptions(target) {
|
|
for (var i = 1, len = arguments.length; i < len; ++i) {
|
|
var overrides = arguments[i];
|
|
if (typeof overrides === 'object') {
|
|
var keys = Object.keys(overrides);
|
|
for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
|
|
var k = keys[j];
|
|
if (overrides[k] !== undefined) {
|
|
target[k] = overrides[k];
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return target;
|
|
}
|
|
|
|
|
|
var debug;
|
|
if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
|
|
debug = function() {
|
|
var args = Array.prototype.slice.call(arguments);
|
|
if (typeof args[0] === 'string') {
|
|
args[0] = 'TUNNEL: ' + args[0];
|
|
} else {
|
|
args.unshift('TUNNEL:');
|
|
}
|
|
console.error.apply(console, args);
|
|
}
|
|
} else {
|
|
debug = function() {};
|
|
}
|
|
exports.debug = debug; // for test
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5840:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
Object.defineProperty(exports, "v1", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _v.default;
|
|
}
|
|
}));
|
|
Object.defineProperty(exports, "v3", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _v2.default;
|
|
}
|
|
}));
|
|
Object.defineProperty(exports, "v4", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _v3.default;
|
|
}
|
|
}));
|
|
Object.defineProperty(exports, "v5", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _v4.default;
|
|
}
|
|
}));
|
|
Object.defineProperty(exports, "NIL", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _nil.default;
|
|
}
|
|
}));
|
|
Object.defineProperty(exports, "version", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _version.default;
|
|
}
|
|
}));
|
|
Object.defineProperty(exports, "validate", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _validate.default;
|
|
}
|
|
}));
|
|
Object.defineProperty(exports, "stringify", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _stringify.default;
|
|
}
|
|
}));
|
|
Object.defineProperty(exports, "parse", ({
|
|
enumerable: true,
|
|
get: function () {
|
|
return _parse.default;
|
|
}
|
|
}));
|
|
|
|
var _v = _interopRequireDefault(__nccwpck_require__(8628));
|
|
|
|
var _v2 = _interopRequireDefault(__nccwpck_require__(6409));
|
|
|
|
var _v3 = _interopRequireDefault(__nccwpck_require__(5122));
|
|
|
|
var _v4 = _interopRequireDefault(__nccwpck_require__(9120));
|
|
|
|
var _nil = _interopRequireDefault(__nccwpck_require__(5332));
|
|
|
|
var _version = _interopRequireDefault(__nccwpck_require__(1595));
|
|
|
|
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
|
|
|
|
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
|
|
|
|
var _parse = _interopRequireDefault(__nccwpck_require__(2746));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4569:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
function md5(bytes) {
|
|
if (Array.isArray(bytes)) {
|
|
bytes = Buffer.from(bytes);
|
|
} else if (typeof bytes === 'string') {
|
|
bytes = Buffer.from(bytes, 'utf8');
|
|
}
|
|
|
|
return _crypto.default.createHash('md5').update(bytes).digest();
|
|
}
|
|
|
|
var _default = md5;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5332:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
var _default = '00000000-0000-0000-0000-000000000000';
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2746:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
function parse(uuid) {
|
|
if (!(0, _validate.default)(uuid)) {
|
|
throw TypeError('Invalid UUID');
|
|
}
|
|
|
|
let v;
|
|
const arr = new Uint8Array(16); // Parse ########-....-....-....-............
|
|
|
|
arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;
|
|
arr[1] = v >>> 16 & 0xff;
|
|
arr[2] = v >>> 8 & 0xff;
|
|
arr[3] = v & 0xff; // Parse ........-####-....-....-............
|
|
|
|
arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;
|
|
arr[5] = v & 0xff; // Parse ........-....-####-....-............
|
|
|
|
arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;
|
|
arr[7] = v & 0xff; // Parse ........-....-....-####-............
|
|
|
|
arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;
|
|
arr[9] = v & 0xff; // Parse ........-....-....-....-############
|
|
// (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
|
|
|
|
arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;
|
|
arr[11] = v / 0x100000000 & 0xff;
|
|
arr[12] = v >>> 24 & 0xff;
|
|
arr[13] = v >>> 16 & 0xff;
|
|
arr[14] = v >>> 8 & 0xff;
|
|
arr[15] = v & 0xff;
|
|
return arr;
|
|
}
|
|
|
|
var _default = parse;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 814:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 807:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = rng;
|
|
|
|
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate
|
|
|
|
let poolPtr = rnds8Pool.length;
|
|
|
|
function rng() {
|
|
if (poolPtr > rnds8Pool.length - 16) {
|
|
_crypto.default.randomFillSync(rnds8Pool);
|
|
|
|
poolPtr = 0;
|
|
}
|
|
|
|
return rnds8Pool.slice(poolPtr, poolPtr += 16);
|
|
}
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5274:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
function sha1(bytes) {
|
|
if (Array.isArray(bytes)) {
|
|
bytes = Buffer.from(bytes);
|
|
} else if (typeof bytes === 'string') {
|
|
bytes = Buffer.from(bytes, 'utf8');
|
|
}
|
|
|
|
return _crypto.default.createHash('sha1').update(bytes).digest();
|
|
}
|
|
|
|
var _default = sha1;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8950:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
/**
|
|
* Convert array of 16 byte values to UUID string format of the form:
|
|
* XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
|
|
*/
|
|
const byteToHex = [];
|
|
|
|
for (let i = 0; i < 256; ++i) {
|
|
byteToHex.push((i + 0x100).toString(16).substr(1));
|
|
}
|
|
|
|
function stringify(arr, offset = 0) {
|
|
// Note: Be careful editing this code! It's been tuned for performance
|
|
// and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
|
|
const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one
|
|
// of the following:
|
|
// - One or more input array values don't map to a hex octet (leading to
|
|
// "undefined" in the uuid)
|
|
// - Invalid input values for the RFC `version` or `variant` fields
|
|
|
|
if (!(0, _validate.default)(uuid)) {
|
|
throw TypeError('Stringified UUID is invalid');
|
|
}
|
|
|
|
return uuid;
|
|
}
|
|
|
|
var _default = stringify;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8628:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _rng = _interopRequireDefault(__nccwpck_require__(807));
|
|
|
|
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
// **`v1()` - Generate time-based UUID**
|
|
//
|
|
// Inspired by https://github.com/LiosK/UUID.js
|
|
// and http://docs.python.org/library/uuid.html
|
|
let _nodeId;
|
|
|
|
let _clockseq; // Previous uuid creation time
|
|
|
|
|
|
let _lastMSecs = 0;
|
|
let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details
|
|
|
|
function v1(options, buf, offset) {
|
|
let i = buf && offset || 0;
|
|
const b = buf || new Array(16);
|
|
options = options || {};
|
|
let node = options.node || _nodeId;
|
|
let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not
|
|
// specified. We do this lazily to minimize issues related to insufficient
|
|
// system entropy. See #189
|
|
|
|
if (node == null || clockseq == null) {
|
|
const seedBytes = options.random || (options.rng || _rng.default)();
|
|
|
|
if (node == null) {
|
|
// Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
|
|
node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];
|
|
}
|
|
|
|
if (clockseq == null) {
|
|
// Per 4.2.2, randomize (14 bit) clockseq
|
|
clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
|
|
}
|
|
} // UUID timestamps are 100 nano-second units since the Gregorian epoch,
|
|
// (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
|
|
// time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
|
|
// (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
|
|
|
|
|
|
let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock
|
|
// cycle to simulate higher resolution clock
|
|
|
|
let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)
|
|
|
|
const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression
|
|
|
|
if (dt < 0 && options.clockseq === undefined) {
|
|
clockseq = clockseq + 1 & 0x3fff;
|
|
} // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
|
|
// time interval
|
|
|
|
|
|
if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
|
|
nsecs = 0;
|
|
} // Per 4.2.1.2 Throw error if too many uuids are requested
|
|
|
|
|
|
if (nsecs >= 10000) {
|
|
throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");
|
|
}
|
|
|
|
_lastMSecs = msecs;
|
|
_lastNSecs = nsecs;
|
|
_clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
|
|
|
|
msecs += 12219292800000; // `time_low`
|
|
|
|
const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
|
|
b[i++] = tl >>> 24 & 0xff;
|
|
b[i++] = tl >>> 16 & 0xff;
|
|
b[i++] = tl >>> 8 & 0xff;
|
|
b[i++] = tl & 0xff; // `time_mid`
|
|
|
|
const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;
|
|
b[i++] = tmh >>> 8 & 0xff;
|
|
b[i++] = tmh & 0xff; // `time_high_and_version`
|
|
|
|
b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
|
|
|
|
b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
|
|
|
|
b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`
|
|
|
|
b[i++] = clockseq & 0xff; // `node`
|
|
|
|
for (let n = 0; n < 6; ++n) {
|
|
b[i + n] = node[n];
|
|
}
|
|
|
|
return buf || (0, _stringify.default)(b);
|
|
}
|
|
|
|
var _default = v1;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6409:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _v = _interopRequireDefault(__nccwpck_require__(5998));
|
|
|
|
var _md = _interopRequireDefault(__nccwpck_require__(4569));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
const v3 = (0, _v.default)('v3', 0x30, _md.default);
|
|
var _default = v3;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5998:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = _default;
|
|
exports.URL = exports.DNS = void 0;
|
|
|
|
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
|
|
|
|
var _parse = _interopRequireDefault(__nccwpck_require__(2746));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
function stringToBytes(str) {
|
|
str = unescape(encodeURIComponent(str)); // UTF8 escape
|
|
|
|
const bytes = [];
|
|
|
|
for (let i = 0; i < str.length; ++i) {
|
|
bytes.push(str.charCodeAt(i));
|
|
}
|
|
|
|
return bytes;
|
|
}
|
|
|
|
const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
|
|
exports.DNS = DNS;
|
|
const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';
|
|
exports.URL = URL;
|
|
|
|
function _default(name, version, hashfunc) {
|
|
function generateUUID(value, namespace, buf, offset) {
|
|
if (typeof value === 'string') {
|
|
value = stringToBytes(value);
|
|
}
|
|
|
|
if (typeof namespace === 'string') {
|
|
namespace = (0, _parse.default)(namespace);
|
|
}
|
|
|
|
if (namespace.length !== 16) {
|
|
throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');
|
|
} // Compute hash of namespace and value, Per 4.3
|
|
// Future: Use spread syntax when supported on all platforms, e.g. `bytes =
|
|
// hashfunc([...namespace, ... value])`
|
|
|
|
|
|
let bytes = new Uint8Array(16 + value.length);
|
|
bytes.set(namespace);
|
|
bytes.set(value, namespace.length);
|
|
bytes = hashfunc(bytes);
|
|
bytes[6] = bytes[6] & 0x0f | version;
|
|
bytes[8] = bytes[8] & 0x3f | 0x80;
|
|
|
|
if (buf) {
|
|
offset = offset || 0;
|
|
|
|
for (let i = 0; i < 16; ++i) {
|
|
buf[offset + i] = bytes[i];
|
|
}
|
|
|
|
return buf;
|
|
}
|
|
|
|
return (0, _stringify.default)(bytes);
|
|
} // Function#name is not settable on some platforms (#270)
|
|
|
|
|
|
try {
|
|
generateUUID.name = name; // eslint-disable-next-line no-empty
|
|
} catch (err) {} // For CommonJS default export support
|
|
|
|
|
|
generateUUID.DNS = DNS;
|
|
generateUUID.URL = URL;
|
|
return generateUUID;
|
|
}
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5122:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _rng = _interopRequireDefault(__nccwpck_require__(807));
|
|
|
|
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
function v4(options, buf, offset) {
|
|
options = options || {};
|
|
|
|
const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
|
|
|
|
|
|
rnds[6] = rnds[6] & 0x0f | 0x40;
|
|
rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
|
|
|
|
if (buf) {
|
|
offset = offset || 0;
|
|
|
|
for (let i = 0; i < 16; ++i) {
|
|
buf[offset + i] = rnds[i];
|
|
}
|
|
|
|
return buf;
|
|
}
|
|
|
|
return (0, _stringify.default)(rnds);
|
|
}
|
|
|
|
var _default = v4;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9120:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _v = _interopRequireDefault(__nccwpck_require__(5998));
|
|
|
|
var _sha = _interopRequireDefault(__nccwpck_require__(5274));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
const v5 = (0, _v.default)('v5', 0x50, _sha.default);
|
|
var _default = v5;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6900:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _regex = _interopRequireDefault(__nccwpck_require__(814));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
function validate(uuid) {
|
|
return typeof uuid === 'string' && _regex.default.test(uuid);
|
|
}
|
|
|
|
var _default = validate;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1595:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({
|
|
value: true
|
|
}));
|
|
exports["default"] = void 0;
|
|
|
|
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
|
|
|
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
|
|
function version(uuid) {
|
|
if (!(0, _validate.default)(uuid)) {
|
|
throw TypeError('Invalid UUID');
|
|
}
|
|
|
|
return parseInt(uuid.substr(14, 1), 16);
|
|
}
|
|
|
|
var _default = version;
|
|
exports["default"] = _default;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9491:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("assert");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6113:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("crypto");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2361:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("events");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7147:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("fs");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3292:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("fs/promises");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3685:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("http");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5687:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("https");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1808:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("net");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5673:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("node:events");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4492:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("node:stream");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6915:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("node:string_decoder");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2037:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("os");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1017:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("path");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4404:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("tls");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7310:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("url");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3837:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("util");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2487:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.Glob = void 0;
|
|
const minimatch_1 = __nccwpck_require__(266);
|
|
const path_scurry_1 = __nccwpck_require__(9569);
|
|
const url_1 = __nccwpck_require__(7310);
|
|
const pattern_js_1 = __nccwpck_require__(6866);
|
|
const walker_js_1 = __nccwpck_require__(153);
|
|
// if no process global, just call it linux.
|
|
// so we default to case-sensitive, / separators
|
|
const defaultPlatform = typeof process === 'object' &&
|
|
process &&
|
|
typeof process.platform === 'string'
|
|
? process.platform
|
|
: 'linux';
|
|
/**
|
|
* An object that can perform glob pattern traversals.
|
|
*/
|
|
class Glob {
|
|
absolute;
|
|
cwd;
|
|
root;
|
|
dot;
|
|
dotRelative;
|
|
follow;
|
|
ignore;
|
|
magicalBraces;
|
|
mark;
|
|
matchBase;
|
|
maxDepth;
|
|
nobrace;
|
|
nocase;
|
|
nodir;
|
|
noext;
|
|
noglobstar;
|
|
pattern;
|
|
platform;
|
|
realpath;
|
|
scurry;
|
|
stat;
|
|
signal;
|
|
windowsPathsNoEscape;
|
|
withFileTypes;
|
|
/**
|
|
* The options provided to the constructor.
|
|
*/
|
|
opts;
|
|
/**
|
|
* An array of parsed immutable {@link Pattern} objects.
|
|
*/
|
|
patterns;
|
|
/**
|
|
* All options are stored as properties on the `Glob` object.
|
|
*
|
|
* See {@link GlobOptions} for full options descriptions.
|
|
*
|
|
* Note that a previous `Glob` object can be passed as the
|
|
* `GlobOptions` to another `Glob` instantiation to re-use settings
|
|
* and caches with a new pattern.
|
|
*
|
|
* Traversal functions can be called multiple times to run the walk
|
|
* again.
|
|
*/
|
|
constructor(pattern, opts) {
|
|
/* c8 ignore start */
|
|
if (!opts)
|
|
throw new TypeError('glob options required');
|
|
/* c8 ignore stop */
|
|
this.withFileTypes = !!opts.withFileTypes;
|
|
this.signal = opts.signal;
|
|
this.follow = !!opts.follow;
|
|
this.dot = !!opts.dot;
|
|
this.dotRelative = !!opts.dotRelative;
|
|
this.nodir = !!opts.nodir;
|
|
this.mark = !!opts.mark;
|
|
if (!opts.cwd) {
|
|
this.cwd = '';
|
|
}
|
|
else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {
|
|
opts.cwd = (0, url_1.fileURLToPath)(opts.cwd);
|
|
}
|
|
this.cwd = opts.cwd || '';
|
|
this.root = opts.root;
|
|
this.magicalBraces = !!opts.magicalBraces;
|
|
this.nobrace = !!opts.nobrace;
|
|
this.noext = !!opts.noext;
|
|
this.realpath = !!opts.realpath;
|
|
this.absolute = opts.absolute;
|
|
this.noglobstar = !!opts.noglobstar;
|
|
this.matchBase = !!opts.matchBase;
|
|
this.maxDepth =
|
|
typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity;
|
|
this.stat = !!opts.stat;
|
|
this.ignore = opts.ignore;
|
|
if (this.withFileTypes && this.absolute !== undefined) {
|
|
throw new Error('cannot set absolute and withFileTypes:true');
|
|
}
|
|
if (typeof pattern === 'string') {
|
|
pattern = [pattern];
|
|
}
|
|
this.windowsPathsNoEscape =
|
|
!!opts.windowsPathsNoEscape ||
|
|
opts.allowWindowsEscape === false;
|
|
if (this.windowsPathsNoEscape) {
|
|
pattern = pattern.map(p => p.replace(/\\/g, '/'));
|
|
}
|
|
if (this.matchBase) {
|
|
if (opts.noglobstar) {
|
|
throw new TypeError('base matching requires globstar');
|
|
}
|
|
pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`));
|
|
}
|
|
this.pattern = pattern;
|
|
this.platform = opts.platform || defaultPlatform;
|
|
this.opts = { ...opts, platform: this.platform };
|
|
if (opts.scurry) {
|
|
this.scurry = opts.scurry;
|
|
if (opts.nocase !== undefined &&
|
|
opts.nocase !== opts.scurry.nocase) {
|
|
throw new Error('nocase option contradicts provided scurry option');
|
|
}
|
|
}
|
|
else {
|
|
const Scurry = opts.platform === 'win32'
|
|
? path_scurry_1.PathScurryWin32
|
|
: opts.platform === 'darwin'
|
|
? path_scurry_1.PathScurryDarwin
|
|
: opts.platform
|
|
? path_scurry_1.PathScurryPosix
|
|
: path_scurry_1.PathScurry;
|
|
this.scurry = new Scurry(this.cwd, {
|
|
nocase: opts.nocase,
|
|
fs: opts.fs,
|
|
});
|
|
}
|
|
this.nocase = this.scurry.nocase;
|
|
// If you do nocase:true on a case-sensitive file system, then
|
|
// we need to use regexps instead of strings for non-magic
|
|
// path portions, because statting `aBc` won't return results
|
|
// for the file `AbC` for example.
|
|
const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32';
|
|
const mmo = {
|
|
// default nocase based on platform
|
|
...opts,
|
|
dot: this.dot,
|
|
matchBase: this.matchBase,
|
|
nobrace: this.nobrace,
|
|
nocase: this.nocase,
|
|
nocaseMagicOnly,
|
|
nocomment: true,
|
|
noext: this.noext,
|
|
nonegate: true,
|
|
optimizationLevel: 2,
|
|
platform: this.platform,
|
|
windowsPathsNoEscape: this.windowsPathsNoEscape,
|
|
debug: !!this.opts.debug,
|
|
};
|
|
const mms = this.pattern.map(p => new minimatch_1.Minimatch(p, mmo));
|
|
const [matchSet, globParts] = mms.reduce((set, m) => {
|
|
set[0].push(...m.set);
|
|
set[1].push(...m.globParts);
|
|
return set;
|
|
}, [[], []]);
|
|
this.patterns = matchSet.map((set, i) => {
|
|
const g = globParts[i];
|
|
/* c8 ignore start */
|
|
if (!g)
|
|
throw new Error('invalid pattern object');
|
|
/* c8 ignore stop */
|
|
return new pattern_js_1.Pattern(set, g, 0, this.platform);
|
|
});
|
|
}
|
|
async walk() {
|
|
// Walkers always return array of Path objects, so we just have to
|
|
// coerce them into the right shape. It will have already called
|
|
// realpath() if the option was set to do so, so we know that's cached.
|
|
// start out knowing the cwd, at least
|
|
return [
|
|
...(await new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, {
|
|
...this.opts,
|
|
maxDepth: this.maxDepth !== Infinity
|
|
? this.maxDepth + this.scurry.cwd.depth()
|
|
: Infinity,
|
|
platform: this.platform,
|
|
nocase: this.nocase,
|
|
}).walk()),
|
|
];
|
|
}
|
|
walkSync() {
|
|
return [
|
|
...new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, {
|
|
...this.opts,
|
|
maxDepth: this.maxDepth !== Infinity
|
|
? this.maxDepth + this.scurry.cwd.depth()
|
|
: Infinity,
|
|
platform: this.platform,
|
|
nocase: this.nocase,
|
|
}).walkSync(),
|
|
];
|
|
}
|
|
stream() {
|
|
return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, {
|
|
...this.opts,
|
|
maxDepth: this.maxDepth !== Infinity
|
|
? this.maxDepth + this.scurry.cwd.depth()
|
|
: Infinity,
|
|
platform: this.platform,
|
|
nocase: this.nocase,
|
|
}).stream();
|
|
}
|
|
streamSync() {
|
|
return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, {
|
|
...this.opts,
|
|
maxDepth: this.maxDepth !== Infinity
|
|
? this.maxDepth + this.scurry.cwd.depth()
|
|
: Infinity,
|
|
platform: this.platform,
|
|
nocase: this.nocase,
|
|
}).streamSync();
|
|
}
|
|
/**
|
|
* Default sync iteration function. Returns a Generator that
|
|
* iterates over the results.
|
|
*/
|
|
iterateSync() {
|
|
return this.streamSync()[Symbol.iterator]();
|
|
}
|
|
[Symbol.iterator]() {
|
|
return this.iterateSync();
|
|
}
|
|
/**
|
|
* Default async iteration function. Returns an AsyncGenerator that
|
|
* iterates over the results.
|
|
*/
|
|
iterate() {
|
|
return this.stream()[Symbol.asyncIterator]();
|
|
}
|
|
[Symbol.asyncIterator]() {
|
|
return this.iterate();
|
|
}
|
|
}
|
|
exports.Glob = Glob;
|
|
//# sourceMappingURL=glob.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3133:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.hasMagic = void 0;
|
|
const minimatch_1 = __nccwpck_require__(266);
|
|
/**
|
|
* Return true if the patterns provided contain any magic glob characters,
|
|
* given the options provided.
|
|
*
|
|
* Brace expansion is not considered "magic" unless the `magicalBraces` option
|
|
* is set, as brace expansion just turns one string into an array of strings.
|
|
* So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
|
|
* `'xby'` both do not contain any magic glob characters, and it's treated the
|
|
* same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
|
|
* is in the options, brace expansion _is_ treated as a pattern having magic.
|
|
*/
|
|
const hasMagic = (pattern, options = {}) => {
|
|
if (!Array.isArray(pattern)) {
|
|
pattern = [pattern];
|
|
}
|
|
for (const p of pattern) {
|
|
if (new minimatch_1.Minimatch(p, options).hasMagic())
|
|
return true;
|
|
}
|
|
return false;
|
|
};
|
|
exports.hasMagic = hasMagic;
|
|
//# sourceMappingURL=has-magic.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9703:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
// give it a pattern, and it'll be able to tell you if
|
|
// a given path should be ignored.
|
|
// Ignoring a path ignores its children if the pattern ends in /**
|
|
// Ignores are always parsed in dot:true mode
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.Ignore = void 0;
|
|
const minimatch_1 = __nccwpck_require__(266);
|
|
const pattern_js_1 = __nccwpck_require__(6866);
|
|
const defaultPlatform = typeof process === 'object' &&
|
|
process &&
|
|
typeof process.platform === 'string'
|
|
? process.platform
|
|
: 'linux';
|
|
/**
|
|
* Class used to process ignored patterns
|
|
*/
|
|
class Ignore {
|
|
relative;
|
|
relativeChildren;
|
|
absolute;
|
|
absoluteChildren;
|
|
constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) {
|
|
this.relative = [];
|
|
this.absolute = [];
|
|
this.relativeChildren = [];
|
|
this.absoluteChildren = [];
|
|
const mmopts = {
|
|
dot: true,
|
|
nobrace,
|
|
nocase,
|
|
noext,
|
|
noglobstar,
|
|
optimizationLevel: 2,
|
|
platform,
|
|
nocomment: true,
|
|
nonegate: true,
|
|
};
|
|
// this is a little weird, but it gives us a clean set of optimized
|
|
// minimatch matchers, without getting tripped up if one of them
|
|
// ends in /** inside a brace section, and it's only inefficient at
|
|
// the start of the walk, not along it.
|
|
// It'd be nice if the Pattern class just had a .test() method, but
|
|
// handling globstars is a bit of a pita, and that code already lives
|
|
// in minimatch anyway.
|
|
// Another way would be if maybe Minimatch could take its set/globParts
|
|
// as an option, and then we could at least just use Pattern to test
|
|
// for absolute-ness.
|
|
// Yet another way, Minimatch could take an array of glob strings, and
|
|
// a cwd option, and do the right thing.
|
|
for (const ign of ignored) {
|
|
const mm = new minimatch_1.Minimatch(ign, mmopts);
|
|
for (let i = 0; i < mm.set.length; i++) {
|
|
const parsed = mm.set[i];
|
|
const globParts = mm.globParts[i];
|
|
/* c8 ignore start */
|
|
if (!parsed || !globParts) {
|
|
throw new Error('invalid pattern object');
|
|
}
|
|
/* c8 ignore stop */
|
|
const p = new pattern_js_1.Pattern(parsed, globParts, 0, platform);
|
|
const m = new minimatch_1.Minimatch(p.globString(), mmopts);
|
|
const children = globParts[globParts.length - 1] === '**';
|
|
const absolute = p.isAbsolute();
|
|
if (absolute)
|
|
this.absolute.push(m);
|
|
else
|
|
this.relative.push(m);
|
|
if (children) {
|
|
if (absolute)
|
|
this.absoluteChildren.push(m);
|
|
else
|
|
this.relativeChildren.push(m);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
ignored(p) {
|
|
const fullpath = p.fullpath();
|
|
const fullpaths = `${fullpath}/`;
|
|
const relative = p.relative() || '.';
|
|
const relatives = `${relative}/`;
|
|
for (const m of this.relative) {
|
|
if (m.match(relative) || m.match(relatives))
|
|
return true;
|
|
}
|
|
for (const m of this.absolute) {
|
|
if (m.match(fullpath) || m.match(fullpaths))
|
|
return true;
|
|
}
|
|
return false;
|
|
}
|
|
childrenIgnored(p) {
|
|
const fullpath = p.fullpath() + '/';
|
|
const relative = (p.relative() || '.') + '/';
|
|
for (const m of this.relativeChildren) {
|
|
if (m.match(relative))
|
|
return true;
|
|
}
|
|
for (const m of this.absoluteChildren) {
|
|
if (m.match(fullpath))
|
|
return true;
|
|
}
|
|
return false;
|
|
}
|
|
}
|
|
exports.Ignore = Ignore;
|
|
//# sourceMappingURL=ignore.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8211:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.glob = exports.hasMagic = exports.Glob = exports.unescape = exports.escape = exports.sync = exports.iterate = exports.iterateSync = exports.stream = exports.streamSync = exports.globIterate = exports.globIterateSync = exports.globSync = exports.globStream = exports.globStreamSync = void 0;
|
|
const minimatch_1 = __nccwpck_require__(266);
|
|
const glob_js_1 = __nccwpck_require__(2487);
|
|
const has_magic_js_1 = __nccwpck_require__(3133);
|
|
function globStreamSync(pattern, options = {}) {
|
|
return new glob_js_1.Glob(pattern, options).streamSync();
|
|
}
|
|
exports.globStreamSync = globStreamSync;
|
|
function globStream(pattern, options = {}) {
|
|
return new glob_js_1.Glob(pattern, options).stream();
|
|
}
|
|
exports.globStream = globStream;
|
|
function globSync(pattern, options = {}) {
|
|
return new glob_js_1.Glob(pattern, options).walkSync();
|
|
}
|
|
exports.globSync = globSync;
|
|
async function glob_(pattern, options = {}) {
|
|
return new glob_js_1.Glob(pattern, options).walk();
|
|
}
|
|
function globIterateSync(pattern, options = {}) {
|
|
return new glob_js_1.Glob(pattern, options).iterateSync();
|
|
}
|
|
exports.globIterateSync = globIterateSync;
|
|
function globIterate(pattern, options = {}) {
|
|
return new glob_js_1.Glob(pattern, options).iterate();
|
|
}
|
|
exports.globIterate = globIterate;
|
|
// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc
|
|
exports.streamSync = globStreamSync;
|
|
exports.stream = Object.assign(globStream, { sync: globStreamSync });
|
|
exports.iterateSync = globIterateSync;
|
|
exports.iterate = Object.assign(globIterate, {
|
|
sync: globIterateSync,
|
|
});
|
|
exports.sync = Object.assign(globSync, {
|
|
stream: globStreamSync,
|
|
iterate: globIterateSync,
|
|
});
|
|
/* c8 ignore start */
|
|
var minimatch_2 = __nccwpck_require__(266);
|
|
Object.defineProperty(exports, "escape", ({ enumerable: true, get: function () { return minimatch_2.escape; } }));
|
|
Object.defineProperty(exports, "unescape", ({ enumerable: true, get: function () { return minimatch_2.unescape; } }));
|
|
var glob_js_2 = __nccwpck_require__(2487);
|
|
Object.defineProperty(exports, "Glob", ({ enumerable: true, get: function () { return glob_js_2.Glob; } }));
|
|
var has_magic_js_2 = __nccwpck_require__(3133);
|
|
Object.defineProperty(exports, "hasMagic", ({ enumerable: true, get: function () { return has_magic_js_2.hasMagic; } }));
|
|
/* c8 ignore stop */
|
|
exports.glob = Object.assign(glob_, {
|
|
glob: glob_,
|
|
globSync,
|
|
sync: exports.sync,
|
|
globStream,
|
|
stream: exports.stream,
|
|
globStreamSync,
|
|
streamSync: exports.streamSync,
|
|
globIterate,
|
|
iterate: exports.iterate,
|
|
globIterateSync,
|
|
iterateSync: exports.iterateSync,
|
|
Glob: glob_js_1.Glob,
|
|
hasMagic: has_magic_js_1.hasMagic,
|
|
escape: minimatch_1.escape,
|
|
unescape: minimatch_1.unescape,
|
|
});
|
|
exports.glob.glob = exports.glob;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6866:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
// this is just a very light wrapper around 2 arrays with an offset index
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.Pattern = void 0;
|
|
const minimatch_1 = __nccwpck_require__(266);
|
|
const isPatternList = (pl) => pl.length >= 1;
|
|
const isGlobList = (gl) => gl.length >= 1;
|
|
/**
|
|
* An immutable-ish view on an array of glob parts and their parsed
|
|
* results
|
|
*/
|
|
class Pattern {
|
|
#patternList;
|
|
#globList;
|
|
#index;
|
|
length;
|
|
#platform;
|
|
#rest;
|
|
#globString;
|
|
#isDrive;
|
|
#isUNC;
|
|
#isAbsolute;
|
|
#followGlobstar = true;
|
|
constructor(patternList, globList, index, platform) {
|
|
if (!isPatternList(patternList)) {
|
|
throw new TypeError('empty pattern list');
|
|
}
|
|
if (!isGlobList(globList)) {
|
|
throw new TypeError('empty glob list');
|
|
}
|
|
if (globList.length !== patternList.length) {
|
|
throw new TypeError('mismatched pattern list and glob list lengths');
|
|
}
|
|
this.length = patternList.length;
|
|
if (index < 0 || index >= this.length) {
|
|
throw new TypeError('index out of range');
|
|
}
|
|
this.#patternList = patternList;
|
|
this.#globList = globList;
|
|
this.#index = index;
|
|
this.#platform = platform;
|
|
// normalize root entries of absolute patterns on initial creation.
|
|
if (this.#index === 0) {
|
|
// c: => ['c:/']
|
|
// C:/ => ['C:/']
|
|
// C:/x => ['C:/', 'x']
|
|
// //host/share => ['//host/share/']
|
|
// //host/share/ => ['//host/share/']
|
|
// //host/share/x => ['//host/share/', 'x']
|
|
// /etc => ['/', 'etc']
|
|
// / => ['/']
|
|
if (this.isUNC()) {
|
|
// '' / '' / 'host' / 'share'
|
|
const [p0, p1, p2, p3, ...prest] = this.#patternList;
|
|
const [g0, g1, g2, g3, ...grest] = this.#globList;
|
|
if (prest[0] === '') {
|
|
// ends in /
|
|
prest.shift();
|
|
grest.shift();
|
|
}
|
|
const p = [p0, p1, p2, p3, ''].join('/');
|
|
const g = [g0, g1, g2, g3, ''].join('/');
|
|
this.#patternList = [p, ...prest];
|
|
this.#globList = [g, ...grest];
|
|
this.length = this.#patternList.length;
|
|
}
|
|
else if (this.isDrive() || this.isAbsolute()) {
|
|
const [p1, ...prest] = this.#patternList;
|
|
const [g1, ...grest] = this.#globList;
|
|
if (prest[0] === '') {
|
|
// ends in /
|
|
prest.shift();
|
|
grest.shift();
|
|
}
|
|
const p = p1 + '/';
|
|
const g = g1 + '/';
|
|
this.#patternList = [p, ...prest];
|
|
this.#globList = [g, ...grest];
|
|
this.length = this.#patternList.length;
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* The first entry in the parsed list of patterns
|
|
*/
|
|
pattern() {
|
|
return this.#patternList[this.#index];
|
|
}
|
|
/**
|
|
* true of if pattern() returns a string
|
|
*/
|
|
isString() {
|
|
return typeof this.#patternList[this.#index] === 'string';
|
|
}
|
|
/**
|
|
* true of if pattern() returns GLOBSTAR
|
|
*/
|
|
isGlobstar() {
|
|
return this.#patternList[this.#index] === minimatch_1.GLOBSTAR;
|
|
}
|
|
/**
|
|
* true if pattern() returns a regexp
|
|
*/
|
|
isRegExp() {
|
|
return this.#patternList[this.#index] instanceof RegExp;
|
|
}
|
|
/**
|
|
* The /-joined set of glob parts that make up this pattern
|
|
*/
|
|
globString() {
|
|
return (this.#globString =
|
|
this.#globString ||
|
|
(this.#index === 0
|
|
? this.isAbsolute()
|
|
? this.#globList[0] + this.#globList.slice(1).join('/')
|
|
: this.#globList.join('/')
|
|
: this.#globList.slice(this.#index).join('/')));
|
|
}
|
|
/**
|
|
* true if there are more pattern parts after this one
|
|
*/
|
|
hasMore() {
|
|
return this.length > this.#index + 1;
|
|
}
|
|
/**
|
|
* The rest of the pattern after this part, or null if this is the end
|
|
*/
|
|
rest() {
|
|
if (this.#rest !== undefined)
|
|
return this.#rest;
|
|
if (!this.hasMore())
|
|
return (this.#rest = null);
|
|
this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform);
|
|
this.#rest.#isAbsolute = this.#isAbsolute;
|
|
this.#rest.#isUNC = this.#isUNC;
|
|
this.#rest.#isDrive = this.#isDrive;
|
|
return this.#rest;
|
|
}
|
|
/**
|
|
* true if the pattern represents a //unc/path/ on windows
|
|
*/
|
|
isUNC() {
|
|
const pl = this.#patternList;
|
|
return this.#isUNC !== undefined
|
|
? this.#isUNC
|
|
: (this.#isUNC =
|
|
this.#platform === 'win32' &&
|
|
this.#index === 0 &&
|
|
pl[0] === '' &&
|
|
pl[1] === '' &&
|
|
typeof pl[2] === 'string' &&
|
|
!!pl[2] &&
|
|
typeof pl[3] === 'string' &&
|
|
!!pl[3]);
|
|
}
|
|
// pattern like C:/...
|
|
// split = ['C:', ...]
|
|
// XXX: would be nice to handle patterns like `c:*` to test the cwd
|
|
// in c: for *, but I don't know of a way to even figure out what that
|
|
// cwd is without actually chdir'ing into it?
|
|
/**
|
|
* True if the pattern starts with a drive letter on Windows
|
|
*/
|
|
isDrive() {
|
|
const pl = this.#patternList;
|
|
return this.#isDrive !== undefined
|
|
? this.#isDrive
|
|
: (this.#isDrive =
|
|
this.#platform === 'win32' &&
|
|
this.#index === 0 &&
|
|
this.length > 1 &&
|
|
typeof pl[0] === 'string' &&
|
|
/^[a-z]:$/i.test(pl[0]));
|
|
}
|
|
// pattern = '/' or '/...' or '/x/...'
|
|
// split = ['', ''] or ['', ...] or ['', 'x', ...]
|
|
// Drive and UNC both considered absolute on windows
|
|
/**
|
|
* True if the pattern is rooted on an absolute path
|
|
*/
|
|
isAbsolute() {
|
|
const pl = this.#patternList;
|
|
return this.#isAbsolute !== undefined
|
|
? this.#isAbsolute
|
|
: (this.#isAbsolute =
|
|
(pl[0] === '' && pl.length > 1) ||
|
|
this.isDrive() ||
|
|
this.isUNC());
|
|
}
|
|
/**
|
|
* consume the root of the pattern, and return it
|
|
*/
|
|
root() {
|
|
const p = this.#patternList[0];
|
|
return typeof p === 'string' && this.isAbsolute() && this.#index === 0
|
|
? p
|
|
: '';
|
|
}
|
|
/**
|
|
* Check to see if the current globstar pattern is allowed to follow
|
|
* a symbolic link.
|
|
*/
|
|
checkFollowGlobstar() {
|
|
return !(this.#index === 0 ||
|
|
!this.isGlobstar() ||
|
|
!this.#followGlobstar);
|
|
}
|
|
/**
|
|
* Mark that the current globstar pattern is following a symbolic link
|
|
*/
|
|
markFollowGlobstar() {
|
|
if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)
|
|
return false;
|
|
this.#followGlobstar = false;
|
|
return true;
|
|
}
|
|
}
|
|
exports.Pattern = Pattern;
|
|
//# sourceMappingURL=pattern.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4628:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
// synchronous utility for filtering entries and calculating subwalks
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.Processor = exports.SubWalks = exports.MatchRecord = exports.HasWalkedCache = void 0;
|
|
const minimatch_1 = __nccwpck_require__(266);
|
|
/**
|
|
* A cache of which patterns have been processed for a given Path
|
|
*/
|
|
class HasWalkedCache {
|
|
store;
|
|
constructor(store = new Map()) {
|
|
this.store = store;
|
|
}
|
|
copy() {
|
|
return new HasWalkedCache(new Map(this.store));
|
|
}
|
|
hasWalked(target, pattern) {
|
|
return this.store.get(target.fullpath())?.has(pattern.globString());
|
|
}
|
|
storeWalked(target, pattern) {
|
|
const fullpath = target.fullpath();
|
|
const cached = this.store.get(fullpath);
|
|
if (cached)
|
|
cached.add(pattern.globString());
|
|
else
|
|
this.store.set(fullpath, new Set([pattern.globString()]));
|
|
}
|
|
}
|
|
exports.HasWalkedCache = HasWalkedCache;
|
|
/**
|
|
* A record of which paths have been matched in a given walk step,
|
|
* and whether they only are considered a match if they are a directory,
|
|
* and whether their absolute or relative path should be returned.
|
|
*/
|
|
class MatchRecord {
|
|
store = new Map();
|
|
add(target, absolute, ifDir) {
|
|
const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0);
|
|
const current = this.store.get(target);
|
|
this.store.set(target, current === undefined ? n : n & current);
|
|
}
|
|
// match, absolute, ifdir
|
|
entries() {
|
|
return [...this.store.entries()].map(([path, n]) => [
|
|
path,
|
|
!!(n & 2),
|
|
!!(n & 1),
|
|
]);
|
|
}
|
|
}
|
|
exports.MatchRecord = MatchRecord;
|
|
/**
|
|
* A collection of patterns that must be processed in a subsequent step
|
|
* for a given path.
|
|
*/
|
|
class SubWalks {
|
|
store = new Map();
|
|
add(target, pattern) {
|
|
if (!target.canReaddir()) {
|
|
return;
|
|
}
|
|
const subs = this.store.get(target);
|
|
if (subs) {
|
|
if (!subs.find(p => p.globString() === pattern.globString())) {
|
|
subs.push(pattern);
|
|
}
|
|
}
|
|
else
|
|
this.store.set(target, [pattern]);
|
|
}
|
|
get(target) {
|
|
const subs = this.store.get(target);
|
|
/* c8 ignore start */
|
|
if (!subs) {
|
|
throw new Error('attempting to walk unknown path');
|
|
}
|
|
/* c8 ignore stop */
|
|
return subs;
|
|
}
|
|
entries() {
|
|
return this.keys().map(k => [k, this.store.get(k)]);
|
|
}
|
|
keys() {
|
|
return [...this.store.keys()].filter(t => t.canReaddir());
|
|
}
|
|
}
|
|
exports.SubWalks = SubWalks;
|
|
/**
|
|
* The class that processes patterns for a given path.
|
|
*
|
|
* Handles child entry filtering, and determining whether a path's
|
|
* directory contents must be read.
|
|
*/
|
|
class Processor {
|
|
hasWalkedCache;
|
|
matches = new MatchRecord();
|
|
subwalks = new SubWalks();
|
|
patterns;
|
|
follow;
|
|
dot;
|
|
opts;
|
|
constructor(opts, hasWalkedCache) {
|
|
this.opts = opts;
|
|
this.follow = !!opts.follow;
|
|
this.dot = !!opts.dot;
|
|
this.hasWalkedCache = hasWalkedCache
|
|
? hasWalkedCache.copy()
|
|
: new HasWalkedCache();
|
|
}
|
|
processPatterns(target, patterns) {
|
|
this.patterns = patterns;
|
|
const processingSet = patterns.map(p => [target, p]);
|
|
// map of paths to the magic-starting subwalks they need to walk
|
|
// first item in patterns is the filter
|
|
for (let [t, pattern] of processingSet) {
|
|
this.hasWalkedCache.storeWalked(t, pattern);
|
|
const root = pattern.root();
|
|
const absolute = pattern.isAbsolute() && this.opts.absolute !== false;
|
|
// start absolute patterns at root
|
|
if (root) {
|
|
t = t.resolve(root === '/' && this.opts.root !== undefined
|
|
? this.opts.root
|
|
: root);
|
|
const rest = pattern.rest();
|
|
if (!rest) {
|
|
this.matches.add(t, true, false);
|
|
continue;
|
|
}
|
|
else {
|
|
pattern = rest;
|
|
}
|
|
}
|
|
if (t.isENOENT())
|
|
continue;
|
|
let p;
|
|
let rest;
|
|
let changed = false;
|
|
while (typeof (p = pattern.pattern()) === 'string' &&
|
|
(rest = pattern.rest())) {
|
|
const c = t.resolve(p);
|
|
t = c;
|
|
pattern = rest;
|
|
changed = true;
|
|
}
|
|
p = pattern.pattern();
|
|
rest = pattern.rest();
|
|
if (changed) {
|
|
if (this.hasWalkedCache.hasWalked(t, pattern))
|
|
continue;
|
|
this.hasWalkedCache.storeWalked(t, pattern);
|
|
}
|
|
// now we have either a final string for a known entry,
|
|
// more strings for an unknown entry,
|
|
// or a pattern starting with magic, mounted on t.
|
|
if (typeof p === 'string') {
|
|
// must not be final entry, otherwise we would have
|
|
// concatenated it earlier.
|
|
const ifDir = p === '..' || p === '' || p === '.';
|
|
this.matches.add(t.resolve(p), absolute, ifDir);
|
|
continue;
|
|
}
|
|
else if (p === minimatch_1.GLOBSTAR) {
|
|
// if no rest, match and subwalk pattern
|
|
// if rest, process rest and subwalk pattern
|
|
// if it's a symlink, but we didn't get here by way of a
|
|
// globstar match (meaning it's the first time THIS globstar
|
|
// has traversed a symlink), then we follow it. Otherwise, stop.
|
|
if (!t.isSymbolicLink() ||
|
|
this.follow ||
|
|
pattern.checkFollowGlobstar()) {
|
|
this.subwalks.add(t, pattern);
|
|
}
|
|
const rp = rest?.pattern();
|
|
const rrest = rest?.rest();
|
|
if (!rest || ((rp === '' || rp === '.') && !rrest)) {
|
|
// only HAS to be a dir if it ends in **/ or **/.
|
|
// but ending in ** will match files as well.
|
|
this.matches.add(t, absolute, rp === '' || rp === '.');
|
|
}
|
|
else {
|
|
if (rp === '..') {
|
|
// this would mean you're matching **/.. at the fs root,
|
|
// and no thanks, I'm not gonna test that specific case.
|
|
/* c8 ignore start */
|
|
const tp = t.parent || t;
|
|
/* c8 ignore stop */
|
|
if (!rrest)
|
|
this.matches.add(tp, absolute, true);
|
|
else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {
|
|
this.subwalks.add(tp, rrest);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
else if (p instanceof RegExp) {
|
|
this.subwalks.add(t, pattern);
|
|
}
|
|
}
|
|
return this;
|
|
}
|
|
subwalkTargets() {
|
|
return this.subwalks.keys();
|
|
}
|
|
child() {
|
|
return new Processor(this.opts, this.hasWalkedCache);
|
|
}
|
|
// return a new Processor containing the subwalks for each
|
|
// child entry, and a set of matches, and
|
|
// a hasWalkedCache that's a copy of this one
|
|
// then we're going to call
|
|
filterEntries(parent, entries) {
|
|
const patterns = this.subwalks.get(parent);
|
|
// put matches and entry walks into the results processor
|
|
const results = this.child();
|
|
for (const e of entries) {
|
|
for (const pattern of patterns) {
|
|
const absolute = pattern.isAbsolute();
|
|
const p = pattern.pattern();
|
|
const rest = pattern.rest();
|
|
if (p === minimatch_1.GLOBSTAR) {
|
|
results.testGlobstar(e, pattern, rest, absolute);
|
|
}
|
|
else if (p instanceof RegExp) {
|
|
results.testRegExp(e, p, rest, absolute);
|
|
}
|
|
else {
|
|
results.testString(e, p, rest, absolute);
|
|
}
|
|
}
|
|
}
|
|
return results;
|
|
}
|
|
testGlobstar(e, pattern, rest, absolute) {
|
|
if (this.dot || !e.name.startsWith('.')) {
|
|
if (!pattern.hasMore()) {
|
|
this.matches.add(e, absolute, false);
|
|
}
|
|
if (e.canReaddir()) {
|
|
// if we're in follow mode or it's not a symlink, just keep
|
|
// testing the same pattern. If there's more after the globstar,
|
|
// then this symlink consumes the globstar. If not, then we can
|
|
// follow at most ONE symlink along the way, so we mark it, which
|
|
// also checks to ensure that it wasn't already marked.
|
|
if (this.follow || !e.isSymbolicLink()) {
|
|
this.subwalks.add(e, pattern);
|
|
}
|
|
else if (e.isSymbolicLink()) {
|
|
if (rest && pattern.checkFollowGlobstar()) {
|
|
this.subwalks.add(e, rest);
|
|
}
|
|
else if (pattern.markFollowGlobstar()) {
|
|
this.subwalks.add(e, pattern);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
// if the NEXT thing matches this entry, then also add
|
|
// the rest.
|
|
if (rest) {
|
|
const rp = rest.pattern();
|
|
if (typeof rp === 'string' &&
|
|
// dots and empty were handled already
|
|
rp !== '..' &&
|
|
rp !== '' &&
|
|
rp !== '.') {
|
|
this.testString(e, rp, rest.rest(), absolute);
|
|
}
|
|
else if (rp === '..') {
|
|
/* c8 ignore start */
|
|
const ep = e.parent || e;
|
|
/* c8 ignore stop */
|
|
this.subwalks.add(ep, rest);
|
|
}
|
|
else if (rp instanceof RegExp) {
|
|
this.testRegExp(e, rp, rest.rest(), absolute);
|
|
}
|
|
}
|
|
}
|
|
testRegExp(e, p, rest, absolute) {
|
|
if (!p.test(e.name))
|
|
return;
|
|
if (!rest) {
|
|
this.matches.add(e, absolute, false);
|
|
}
|
|
else {
|
|
this.subwalks.add(e, rest);
|
|
}
|
|
}
|
|
testString(e, p, rest, absolute) {
|
|
// should never happen?
|
|
if (!e.isNamed(p))
|
|
return;
|
|
if (!rest) {
|
|
this.matches.add(e, absolute, false);
|
|
}
|
|
else {
|
|
this.subwalks.add(e, rest);
|
|
}
|
|
}
|
|
}
|
|
exports.Processor = Processor;
|
|
//# sourceMappingURL=processor.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 153:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.GlobStream = exports.GlobWalker = exports.GlobUtil = void 0;
|
|
/**
|
|
* Single-use utility classes to provide functionality to the {@link Glob}
|
|
* methods.
|
|
*
|
|
* @module
|
|
*/
|
|
const minipass_1 = __nccwpck_require__(8865);
|
|
const ignore_js_1 = __nccwpck_require__(9703);
|
|
const processor_js_1 = __nccwpck_require__(4628);
|
|
const makeIgnore = (ignore, opts) => typeof ignore === 'string'
|
|
? new ignore_js_1.Ignore([ignore], opts)
|
|
: Array.isArray(ignore)
|
|
? new ignore_js_1.Ignore(ignore, opts)
|
|
: ignore;
|
|
/**
|
|
* basic walking utilities that all the glob walker types use
|
|
*/
|
|
class GlobUtil {
|
|
path;
|
|
patterns;
|
|
opts;
|
|
seen = new Set();
|
|
paused = false;
|
|
aborted = false;
|
|
#onResume = [];
|
|
#ignore;
|
|
#sep;
|
|
signal;
|
|
maxDepth;
|
|
constructor(patterns, path, opts) {
|
|
this.patterns = patterns;
|
|
this.path = path;
|
|
this.opts = opts;
|
|
this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/';
|
|
if (opts.ignore) {
|
|
this.#ignore = makeIgnore(opts.ignore, opts);
|
|
}
|
|
// ignore, always set with maxDepth, but it's optional on the
|
|
// GlobOptions type
|
|
/* c8 ignore start */
|
|
this.maxDepth = opts.maxDepth || Infinity;
|
|
/* c8 ignore stop */
|
|
if (opts.signal) {
|
|
this.signal = opts.signal;
|
|
this.signal.addEventListener('abort', () => {
|
|
this.#onResume.length = 0;
|
|
});
|
|
}
|
|
}
|
|
#ignored(path) {
|
|
return this.seen.has(path) || !!this.#ignore?.ignored?.(path);
|
|
}
|
|
#childrenIgnored(path) {
|
|
return !!this.#ignore?.childrenIgnored?.(path);
|
|
}
|
|
// backpressure mechanism
|
|
pause() {
|
|
this.paused = true;
|
|
}
|
|
resume() {
|
|
/* c8 ignore start */
|
|
if (this.signal?.aborted)
|
|
return;
|
|
/* c8 ignore stop */
|
|
this.paused = false;
|
|
let fn = undefined;
|
|
while (!this.paused && (fn = this.#onResume.shift())) {
|
|
fn();
|
|
}
|
|
}
|
|
onResume(fn) {
|
|
if (this.signal?.aborted)
|
|
return;
|
|
/* c8 ignore start */
|
|
if (!this.paused) {
|
|
fn();
|
|
}
|
|
else {
|
|
/* c8 ignore stop */
|
|
this.#onResume.push(fn);
|
|
}
|
|
}
|
|
// do the requisite realpath/stat checking, and return the path
|
|
// to add or undefined to filter it out.
|
|
async matchCheck(e, ifDir) {
|
|
if (ifDir && this.opts.nodir)
|
|
return undefined;
|
|
let rpc;
|
|
if (this.opts.realpath) {
|
|
rpc = e.realpathCached() || (await e.realpath());
|
|
if (!rpc)
|
|
return undefined;
|
|
e = rpc;
|
|
}
|
|
const needStat = e.isUnknown() || this.opts.stat;
|
|
return this.matchCheckTest(needStat ? await e.lstat() : e, ifDir);
|
|
}
|
|
matchCheckTest(e, ifDir) {
|
|
return e &&
|
|
(this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&
|
|
(!ifDir || e.canReaddir()) &&
|
|
(!this.opts.nodir || !e.isDirectory()) &&
|
|
!this.#ignored(e)
|
|
? e
|
|
: undefined;
|
|
}
|
|
matchCheckSync(e, ifDir) {
|
|
if (ifDir && this.opts.nodir)
|
|
return undefined;
|
|
let rpc;
|
|
if (this.opts.realpath) {
|
|
rpc = e.realpathCached() || e.realpathSync();
|
|
if (!rpc)
|
|
return undefined;
|
|
e = rpc;
|
|
}
|
|
const needStat = e.isUnknown() || this.opts.stat;
|
|
return this.matchCheckTest(needStat ? e.lstatSync() : e, ifDir);
|
|
}
|
|
matchFinish(e, absolute) {
|
|
if (this.#ignored(e))
|
|
return;
|
|
const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute;
|
|
this.seen.add(e);
|
|
const mark = this.opts.mark && e.isDirectory() ? this.#sep : '';
|
|
// ok, we have what we need!
|
|
if (this.opts.withFileTypes) {
|
|
this.matchEmit(e);
|
|
}
|
|
else if (abs) {
|
|
const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath();
|
|
this.matchEmit(abs + mark);
|
|
}
|
|
else {
|
|
const rel = this.opts.posix ? e.relativePosix() : e.relative();
|
|
const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep)
|
|
? '.' + this.#sep
|
|
: '';
|
|
this.matchEmit(!rel ? '.' + mark : pre + rel + mark);
|
|
}
|
|
}
|
|
async match(e, absolute, ifDir) {
|
|
const p = await this.matchCheck(e, ifDir);
|
|
if (p)
|
|
this.matchFinish(p, absolute);
|
|
}
|
|
matchSync(e, absolute, ifDir) {
|
|
const p = this.matchCheckSync(e, ifDir);
|
|
if (p)
|
|
this.matchFinish(p, absolute);
|
|
}
|
|
walkCB(target, patterns, cb) {
|
|
/* c8 ignore start */
|
|
if (this.signal?.aborted)
|
|
cb();
|
|
/* c8 ignore stop */
|
|
this.walkCB2(target, patterns, new processor_js_1.Processor(this.opts), cb);
|
|
}
|
|
walkCB2(target, patterns, processor, cb) {
|
|
if (this.#childrenIgnored(target))
|
|
return cb();
|
|
if (this.signal?.aborted)
|
|
cb();
|
|
if (this.paused) {
|
|
this.onResume(() => this.walkCB2(target, patterns, processor, cb));
|
|
return;
|
|
}
|
|
processor.processPatterns(target, patterns);
|
|
// done processing. all of the above is sync, can be abstracted out.
|
|
// subwalks is a map of paths to the entry filters they need
|
|
// matches is a map of paths to [absolute, ifDir] tuples.
|
|
let tasks = 1;
|
|
const next = () => {
|
|
if (--tasks === 0)
|
|
cb();
|
|
};
|
|
for (const [m, absolute, ifDir] of processor.matches.entries()) {
|
|
if (this.#ignored(m))
|
|
continue;
|
|
tasks++;
|
|
this.match(m, absolute, ifDir).then(() => next());
|
|
}
|
|
for (const t of processor.subwalkTargets()) {
|
|
if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
|
|
continue;
|
|
}
|
|
tasks++;
|
|
const childrenCached = t.readdirCached();
|
|
if (t.calledReaddir())
|
|
this.walkCB3(t, childrenCached, processor, next);
|
|
else {
|
|
t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true);
|
|
}
|
|
}
|
|
next();
|
|
}
|
|
walkCB3(target, entries, processor, cb) {
|
|
processor = processor.filterEntries(target, entries);
|
|
let tasks = 1;
|
|
const next = () => {
|
|
if (--tasks === 0)
|
|
cb();
|
|
};
|
|
for (const [m, absolute, ifDir] of processor.matches.entries()) {
|
|
if (this.#ignored(m))
|
|
continue;
|
|
tasks++;
|
|
this.match(m, absolute, ifDir).then(() => next());
|
|
}
|
|
for (const [target, patterns] of processor.subwalks.entries()) {
|
|
tasks++;
|
|
this.walkCB2(target, patterns, processor.child(), next);
|
|
}
|
|
next();
|
|
}
|
|
walkCBSync(target, patterns, cb) {
|
|
/* c8 ignore start */
|
|
if (this.signal?.aborted)
|
|
cb();
|
|
/* c8 ignore stop */
|
|
this.walkCB2Sync(target, patterns, new processor_js_1.Processor(this.opts), cb);
|
|
}
|
|
walkCB2Sync(target, patterns, processor, cb) {
|
|
if (this.#childrenIgnored(target))
|
|
return cb();
|
|
if (this.signal?.aborted)
|
|
cb();
|
|
if (this.paused) {
|
|
this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb));
|
|
return;
|
|
}
|
|
processor.processPatterns(target, patterns);
|
|
// done processing. all of the above is sync, can be abstracted out.
|
|
// subwalks is a map of paths to the entry filters they need
|
|
// matches is a map of paths to [absolute, ifDir] tuples.
|
|
let tasks = 1;
|
|
const next = () => {
|
|
if (--tasks === 0)
|
|
cb();
|
|
};
|
|
for (const [m, absolute, ifDir] of processor.matches.entries()) {
|
|
if (this.#ignored(m))
|
|
continue;
|
|
this.matchSync(m, absolute, ifDir);
|
|
}
|
|
for (const t of processor.subwalkTargets()) {
|
|
if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
|
|
continue;
|
|
}
|
|
tasks++;
|
|
const children = t.readdirSync();
|
|
this.walkCB3Sync(t, children, processor, next);
|
|
}
|
|
next();
|
|
}
|
|
walkCB3Sync(target, entries, processor, cb) {
|
|
processor = processor.filterEntries(target, entries);
|
|
let tasks = 1;
|
|
const next = () => {
|
|
if (--tasks === 0)
|
|
cb();
|
|
};
|
|
for (const [m, absolute, ifDir] of processor.matches.entries()) {
|
|
if (this.#ignored(m))
|
|
continue;
|
|
this.matchSync(m, absolute, ifDir);
|
|
}
|
|
for (const [target, patterns] of processor.subwalks.entries()) {
|
|
tasks++;
|
|
this.walkCB2Sync(target, patterns, processor.child(), next);
|
|
}
|
|
next();
|
|
}
|
|
}
|
|
exports.GlobUtil = GlobUtil;
|
|
class GlobWalker extends GlobUtil {
|
|
matches;
|
|
constructor(patterns, path, opts) {
|
|
super(patterns, path, opts);
|
|
this.matches = new Set();
|
|
}
|
|
matchEmit(e) {
|
|
this.matches.add(e);
|
|
}
|
|
async walk() {
|
|
if (this.signal?.aborted)
|
|
throw this.signal.reason;
|
|
if (this.path.isUnknown()) {
|
|
await this.path.lstat();
|
|
}
|
|
await new Promise((res, rej) => {
|
|
this.walkCB(this.path, this.patterns, () => {
|
|
if (this.signal?.aborted) {
|
|
rej(this.signal.reason);
|
|
}
|
|
else {
|
|
res(this.matches);
|
|
}
|
|
});
|
|
});
|
|
return this.matches;
|
|
}
|
|
walkSync() {
|
|
if (this.signal?.aborted)
|
|
throw this.signal.reason;
|
|
if (this.path.isUnknown()) {
|
|
this.path.lstatSync();
|
|
}
|
|
// nothing for the callback to do, because this never pauses
|
|
this.walkCBSync(this.path, this.patterns, () => {
|
|
if (this.signal?.aborted)
|
|
throw this.signal.reason;
|
|
});
|
|
return this.matches;
|
|
}
|
|
}
|
|
exports.GlobWalker = GlobWalker;
|
|
class GlobStream extends GlobUtil {
|
|
results;
|
|
constructor(patterns, path, opts) {
|
|
super(patterns, path, opts);
|
|
this.results = new minipass_1.Minipass({
|
|
signal: this.signal,
|
|
objectMode: true,
|
|
});
|
|
this.results.on('drain', () => this.resume());
|
|
this.results.on('resume', () => this.resume());
|
|
}
|
|
matchEmit(e) {
|
|
this.results.write(e);
|
|
if (!this.results.flowing)
|
|
this.pause();
|
|
}
|
|
stream() {
|
|
const target = this.path;
|
|
if (target.isUnknown()) {
|
|
target.lstat().then(() => {
|
|
this.walkCB(target, this.patterns, () => this.results.end());
|
|
});
|
|
}
|
|
else {
|
|
this.walkCB(target, this.patterns, () => this.results.end());
|
|
}
|
|
return this.results;
|
|
}
|
|
streamSync() {
|
|
if (this.path.isUnknown()) {
|
|
this.path.lstatSync();
|
|
}
|
|
this.walkCBSync(this.path, this.patterns, () => this.results.end());
|
|
return this.results;
|
|
}
|
|
}
|
|
exports.GlobStream = GlobStream;
|
|
//# sourceMappingURL=walker.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5934:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.assertValidPattern = void 0;
|
|
const MAX_PATTERN_LENGTH = 1024 * 64;
|
|
const assertValidPattern = (pattern) => {
|
|
if (typeof pattern !== 'string') {
|
|
throw new TypeError('invalid pattern');
|
|
}
|
|
if (pattern.length > MAX_PATTERN_LENGTH) {
|
|
throw new TypeError('pattern is too long');
|
|
}
|
|
};
|
|
exports.assertValidPattern = assertValidPattern;
|
|
//# sourceMappingURL=assert-valid-pattern.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7642:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
// parse a single path portion
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.AST = void 0;
|
|
const brace_expressions_js_1 = __nccwpck_require__(314);
|
|
const unescape_js_1 = __nccwpck_require__(9820);
|
|
const types = new Set(['!', '?', '+', '*', '@']);
|
|
const isExtglobType = (c) => types.has(c);
|
|
// Patterns that get prepended to bind to the start of either the
|
|
// entire string, or just a single path portion, to prevent dots
|
|
// and/or traversal patterns, when needed.
|
|
// Exts don't need the ^ or / bit, because the root binds that already.
|
|
const startNoTraversal = '(?!\\.\\.?(?:$|/))';
|
|
const startNoDot = '(?!\\.)';
|
|
// characters that indicate a start of pattern needs the "no dots" bit,
|
|
// because a dot *might* be matched. ( is not in the list, because in
|
|
// the case of a child extglob, it will handle the prevention itself.
|
|
const addPatternStart = new Set(['[', '.']);
|
|
// cases where traversal is A-OK, no dot prevention needed
|
|
const justDots = new Set(['..', '.']);
|
|
const reSpecials = new Set('().*{}+?[]^$\\!');
|
|
const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
|
|
// any single thing other than /
|
|
const qmark = '[^/]';
|
|
// * => any number of characters
|
|
const star = qmark + '*?';
|
|
// use + when we need to ensure that *something* matches, because the * is
|
|
// the only thing in the path portion.
|
|
const starNoEmpty = qmark + '+?';
|
|
// remove the \ chars that we added if we end up doing a nonmagic compare
|
|
// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
|
|
class AST {
|
|
type;
|
|
#root;
|
|
#hasMagic;
|
|
#uflag = false;
|
|
#parts = [];
|
|
#parent;
|
|
#parentIndex;
|
|
#negs;
|
|
#filledNegs = false;
|
|
#options;
|
|
#toString;
|
|
// set to true if it's an extglob with no children
|
|
// (which really means one child of '')
|
|
#emptyExt = false;
|
|
constructor(type, parent, options = {}) {
|
|
this.type = type;
|
|
// extglobs are inherently magical
|
|
if (type)
|
|
this.#hasMagic = true;
|
|
this.#parent = parent;
|
|
this.#root = this.#parent ? this.#parent.#root : this;
|
|
this.#options = this.#root === this ? options : this.#root.#options;
|
|
this.#negs = this.#root === this ? [] : this.#root.#negs;
|
|
if (type === '!' && !this.#root.#filledNegs)
|
|
this.#negs.push(this);
|
|
this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
|
|
}
|
|
get hasMagic() {
|
|
/* c8 ignore start */
|
|
if (this.#hasMagic !== undefined)
|
|
return this.#hasMagic;
|
|
/* c8 ignore stop */
|
|
for (const p of this.#parts) {
|
|
if (typeof p === 'string')
|
|
continue;
|
|
if (p.type || p.hasMagic)
|
|
return (this.#hasMagic = true);
|
|
}
|
|
// note: will be undefined until we generate the regexp src and find out
|
|
return this.#hasMagic;
|
|
}
|
|
// reconstructs the pattern
|
|
toString() {
|
|
if (this.#toString !== undefined)
|
|
return this.#toString;
|
|
if (!this.type) {
|
|
return (this.#toString = this.#parts.map(p => String(p)).join(''));
|
|
}
|
|
else {
|
|
return (this.#toString =
|
|
this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
|
|
}
|
|
}
|
|
#fillNegs() {
|
|
/* c8 ignore start */
|
|
if (this !== this.#root)
|
|
throw new Error('should only call on root');
|
|
if (this.#filledNegs)
|
|
return this;
|
|
/* c8 ignore stop */
|
|
// call toString() once to fill this out
|
|
this.toString();
|
|
this.#filledNegs = true;
|
|
let n;
|
|
while ((n = this.#negs.pop())) {
|
|
if (n.type !== '!')
|
|
continue;
|
|
// walk up the tree, appending everthing that comes AFTER parentIndex
|
|
let p = n;
|
|
let pp = p.#parent;
|
|
while (pp) {
|
|
for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
|
|
for (const part of n.#parts) {
|
|
/* c8 ignore start */
|
|
if (typeof part === 'string') {
|
|
throw new Error('string part in extglob AST??');
|
|
}
|
|
/* c8 ignore stop */
|
|
part.copyIn(pp.#parts[i]);
|
|
}
|
|
}
|
|
p = pp;
|
|
pp = p.#parent;
|
|
}
|
|
}
|
|
return this;
|
|
}
|
|
push(...parts) {
|
|
for (const p of parts) {
|
|
if (p === '')
|
|
continue;
|
|
/* c8 ignore start */
|
|
if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
|
|
throw new Error('invalid part: ' + p);
|
|
}
|
|
/* c8 ignore stop */
|
|
this.#parts.push(p);
|
|
}
|
|
}
|
|
toJSON() {
|
|
const ret = this.type === null
|
|
? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
|
|
: [this.type, ...this.#parts.map(p => p.toJSON())];
|
|
if (this.isStart() && !this.type)
|
|
ret.unshift([]);
|
|
if (this.isEnd() &&
|
|
(this === this.#root ||
|
|
(this.#root.#filledNegs && this.#parent?.type === '!'))) {
|
|
ret.push({});
|
|
}
|
|
return ret;
|
|
}
|
|
isStart() {
|
|
if (this.#root === this)
|
|
return true;
|
|
// if (this.type) return !!this.#parent?.isStart()
|
|
if (!this.#parent?.isStart())
|
|
return false;
|
|
if (this.#parentIndex === 0)
|
|
return true;
|
|
// if everything AHEAD of this is a negation, then it's still the "start"
|
|
const p = this.#parent;
|
|
for (let i = 0; i < this.#parentIndex; i++) {
|
|
const pp = p.#parts[i];
|
|
if (!(pp instanceof AST && pp.type === '!')) {
|
|
return false;
|
|
}
|
|
}
|
|
return true;
|
|
}
|
|
isEnd() {
|
|
if (this.#root === this)
|
|
return true;
|
|
if (this.#parent?.type === '!')
|
|
return true;
|
|
if (!this.#parent?.isEnd())
|
|
return false;
|
|
if (!this.type)
|
|
return this.#parent?.isEnd();
|
|
// if not root, it'll always have a parent
|
|
/* c8 ignore start */
|
|
const pl = this.#parent ? this.#parent.#parts.length : 0;
|
|
/* c8 ignore stop */
|
|
return this.#parentIndex === pl - 1;
|
|
}
|
|
copyIn(part) {
|
|
if (typeof part === 'string')
|
|
this.push(part);
|
|
else
|
|
this.push(part.clone(this));
|
|
}
|
|
clone(parent) {
|
|
const c = new AST(this.type, parent);
|
|
for (const p of this.#parts) {
|
|
c.copyIn(p);
|
|
}
|
|
return c;
|
|
}
|
|
static #parseAST(str, ast, pos, opt) {
|
|
let escaping = false;
|
|
let inBrace = false;
|
|
let braceStart = -1;
|
|
let braceNeg = false;
|
|
if (ast.type === null) {
|
|
// outside of a extglob, append until we find a start
|
|
let i = pos;
|
|
let acc = '';
|
|
while (i < str.length) {
|
|
const c = str.charAt(i++);
|
|
// still accumulate escapes at this point, but we do ignore
|
|
// starts that are escaped
|
|
if (escaping || c === '\\') {
|
|
escaping = !escaping;
|
|
acc += c;
|
|
continue;
|
|
}
|
|
if (inBrace) {
|
|
if (i === braceStart + 1) {
|
|
if (c === '^' || c === '!') {
|
|
braceNeg = true;
|
|
}
|
|
}
|
|
else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
|
|
inBrace = false;
|
|
}
|
|
acc += c;
|
|
continue;
|
|
}
|
|
else if (c === '[') {
|
|
inBrace = true;
|
|
braceStart = i;
|
|
braceNeg = false;
|
|
acc += c;
|
|
continue;
|
|
}
|
|
if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
|
|
ast.push(acc);
|
|
acc = '';
|
|
const ext = new AST(c, ast);
|
|
i = AST.#parseAST(str, ext, i, opt);
|
|
ast.push(ext);
|
|
continue;
|
|
}
|
|
acc += c;
|
|
}
|
|
ast.push(acc);
|
|
return i;
|
|
}
|
|
// some kind of extglob, pos is at the (
|
|
// find the next | or )
|
|
let i = pos + 1;
|
|
let part = new AST(null, ast);
|
|
const parts = [];
|
|
let acc = '';
|
|
while (i < str.length) {
|
|
const c = str.charAt(i++);
|
|
// still accumulate escapes at this point, but we do ignore
|
|
// starts that are escaped
|
|
if (escaping || c === '\\') {
|
|
escaping = !escaping;
|
|
acc += c;
|
|
continue;
|
|
}
|
|
if (inBrace) {
|
|
if (i === braceStart + 1) {
|
|
if (c === '^' || c === '!') {
|
|
braceNeg = true;
|
|
}
|
|
}
|
|
else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
|
|
inBrace = false;
|
|
}
|
|
acc += c;
|
|
continue;
|
|
}
|
|
else if (c === '[') {
|
|
inBrace = true;
|
|
braceStart = i;
|
|
braceNeg = false;
|
|
acc += c;
|
|
continue;
|
|
}
|
|
if (isExtglobType(c) && str.charAt(i) === '(') {
|
|
part.push(acc);
|
|
acc = '';
|
|
const ext = new AST(c, part);
|
|
part.push(ext);
|
|
i = AST.#parseAST(str, ext, i, opt);
|
|
continue;
|
|
}
|
|
if (c === '|') {
|
|
part.push(acc);
|
|
acc = '';
|
|
parts.push(part);
|
|
part = new AST(null, ast);
|
|
continue;
|
|
}
|
|
if (c === ')') {
|
|
if (acc === '' && ast.#parts.length === 0) {
|
|
ast.#emptyExt = true;
|
|
}
|
|
part.push(acc);
|
|
acc = '';
|
|
ast.push(...parts, part);
|
|
return i;
|
|
}
|
|
acc += c;
|
|
}
|
|
// unfinished extglob
|
|
// if we got here, it was a malformed extglob! not an extglob, but
|
|
// maybe something else in there.
|
|
ast.type = null;
|
|
ast.#hasMagic = undefined;
|
|
ast.#parts = [str.substring(pos - 1)];
|
|
return i;
|
|
}
|
|
static fromGlob(pattern, options = {}) {
|
|
const ast = new AST(null, undefined, options);
|
|
AST.#parseAST(pattern, ast, 0, options);
|
|
return ast;
|
|
}
|
|
// returns the regular expression if there's magic, or the unescaped
|
|
// string if not.
|
|
toMMPattern() {
|
|
// should only be called on root
|
|
/* c8 ignore start */
|
|
if (this !== this.#root)
|
|
return this.#root.toMMPattern();
|
|
/* c8 ignore stop */
|
|
const glob = this.toString();
|
|
const [re, body, hasMagic, uflag] = this.toRegExpSource();
|
|
// if we're in nocase mode, and not nocaseMagicOnly, then we do
|
|
// still need a regular expression if we have to case-insensitively
|
|
// match capital/lowercase characters.
|
|
const anyMagic = hasMagic ||
|
|
this.#hasMagic ||
|
|
(this.#options.nocase &&
|
|
!this.#options.nocaseMagicOnly &&
|
|
glob.toUpperCase() !== glob.toLowerCase());
|
|
if (!anyMagic) {
|
|
return body;
|
|
}
|
|
const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
|
|
return Object.assign(new RegExp(`^${re}$`, flags), {
|
|
_src: re,
|
|
_glob: glob,
|
|
});
|
|
}
|
|
// returns the string match, the regexp source, whether there's magic
|
|
// in the regexp (so a regular expression is required) and whether or
|
|
// not the uflag is needed for the regular expression (for posix classes)
|
|
// TODO: instead of injecting the start/end at this point, just return
|
|
// the BODY of the regexp, along with the start/end portions suitable
|
|
// for binding the start/end in either a joined full-path makeRe context
|
|
// (where we bind to (^|/), or a standalone matchPart context (where
|
|
// we bind to ^, and not /). Otherwise slashes get duped!
|
|
//
|
|
// In part-matching mode, the start is:
|
|
// - if not isStart: nothing
|
|
// - if traversal possible, but not allowed: ^(?!\.\.?$)
|
|
// - if dots allowed or not possible: ^
|
|
// - if dots possible and not allowed: ^(?!\.)
|
|
// end is:
|
|
// - if not isEnd(): nothing
|
|
// - else: $
|
|
//
|
|
// In full-path matching mode, we put the slash at the START of the
|
|
// pattern, so start is:
|
|
// - if first pattern: same as part-matching mode
|
|
// - if not isStart(): nothing
|
|
// - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
|
|
// - if dots allowed or not possible: /
|
|
// - if dots possible and not allowed: /(?!\.)
|
|
// end is:
|
|
// - if last pattern, same as part-matching mode
|
|
// - else nothing
|
|
//
|
|
// Always put the (?:$|/) on negated tails, though, because that has to be
|
|
// there to bind the end of the negated pattern portion, and it's easier to
|
|
// just stick it in now rather than try to inject it later in the middle of
|
|
// the pattern.
|
|
//
|
|
// We can just always return the same end, and leave it up to the caller
|
|
// to know whether it's going to be used joined or in parts.
|
|
// And, if the start is adjusted slightly, can do the same there:
|
|
// - if not isStart: nothing
|
|
// - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
|
|
// - if dots allowed or not possible: (?:/|^)
|
|
// - if dots possible and not allowed: (?:/|^)(?!\.)
|
|
//
|
|
// But it's better to have a simpler binding without a conditional, for
|
|
// performance, so probably better to return both start options.
|
|
//
|
|
// Then the caller just ignores the end if it's not the first pattern,
|
|
// and the start always gets applied.
|
|
//
|
|
// But that's always going to be $ if it's the ending pattern, or nothing,
|
|
// so the caller can just attach $ at the end of the pattern when building.
|
|
//
|
|
// So the todo is:
|
|
// - better detect what kind of start is needed
|
|
// - return both flavors of starting pattern
|
|
// - attach $ at the end of the pattern when creating the actual RegExp
|
|
//
|
|
// Ah, but wait, no, that all only applies to the root when the first pattern
|
|
// is not an extglob. If the first pattern IS an extglob, then we need all
|
|
// that dot prevention biz to live in the extglob portions, because eg
|
|
// +(*|.x*) can match .xy but not .yx.
|
|
//
|
|
// So, return the two flavors if it's #root and the first child is not an
|
|
// AST, otherwise leave it to the child AST to handle it, and there,
|
|
// use the (?:^|/) style of start binding.
|
|
//
|
|
// Even simplified further:
|
|
// - Since the start for a join is eg /(?!\.) and the start for a part
|
|
// is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
|
|
// or start or whatever) and prepend ^ or / at the Regexp construction.
|
|
toRegExpSource() {
|
|
if (this.#root === this)
|
|
this.#fillNegs();
|
|
if (!this.type) {
|
|
const noEmpty = this.isStart() && this.isEnd();
|
|
const src = this.#parts
|
|
.map(p => {
|
|
const [re, _, hasMagic, uflag] = typeof p === 'string'
|
|
? AST.#parseGlob(p, this.#hasMagic, noEmpty)
|
|
: p.toRegExpSource();
|
|
this.#hasMagic = this.#hasMagic || hasMagic;
|
|
this.#uflag = this.#uflag || uflag;
|
|
return re;
|
|
})
|
|
.join('');
|
|
let start = '';
|
|
if (this.isStart()) {
|
|
if (typeof this.#parts[0] === 'string') {
|
|
// this is the string that will match the start of the pattern,
|
|
// so we need to protect against dots and such.
|
|
// '.' and '..' cannot match unless the pattern is that exactly,
|
|
// even if it starts with . or dot:true is set.
|
|
const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
|
|
if (!dotTravAllowed) {
|
|
const aps = addPatternStart;
|
|
// check if we have a possibility of matching . or ..,
|
|
// and prevent that.
|
|
const needNoTrav =
|
|
// dots are allowed, and the pattern starts with [ or .
|
|
(this.#options.dot && aps.has(src.charAt(0))) ||
|
|
// the pattern starts with \., and then [ or .
|
|
(src.startsWith('\\.') && aps.has(src.charAt(2))) ||
|
|
// the pattern starts with \.\., and then [ or .
|
|
(src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
|
|
// no need to prevent dots if it can't match a dot, or if a
|
|
// sub-pattern will be preventing it anyway.
|
|
const needNoDot = !this.#options.dot && aps.has(src.charAt(0));
|
|
start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
|
|
}
|
|
}
|
|
}
|
|
// append the "end of path portion" pattern to negation tails
|
|
let end = '';
|
|
if (this.isEnd() &&
|
|
this.#root.#filledNegs &&
|
|
this.#parent?.type === '!') {
|
|
end = '(?:$|\\/)';
|
|
}
|
|
const final = start + src + end;
|
|
return [
|
|
final,
|
|
(0, unescape_js_1.unescape)(src),
|
|
(this.#hasMagic = !!this.#hasMagic),
|
|
this.#uflag,
|
|
];
|
|
}
|
|
// some kind of extglob
|
|
const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
|
|
const body = this.#parts
|
|
.map(p => {
|
|
// extglob ASTs should only contain parent ASTs
|
|
/* c8 ignore start */
|
|
if (typeof p === 'string') {
|
|
throw new Error('string type in extglob ast??');
|
|
}
|
|
/* c8 ignore stop */
|
|
// can ignore hasMagic, because extglobs are already always magic
|
|
const [re, _, _hasMagic, uflag] = p.toRegExpSource();
|
|
this.#uflag = this.#uflag || uflag;
|
|
return re;
|
|
})
|
|
.filter(p => !(this.isStart() && this.isEnd()) || !!p)
|
|
.join('|');
|
|
if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
|
|
// invalid extglob, has to at least be *something* present, if it's
|
|
// the entire path portion.
|
|
const s = this.toString();
|
|
this.#parts = [s];
|
|
this.type = null;
|
|
this.#hasMagic = undefined;
|
|
return [s, (0, unescape_js_1.unescape)(this.toString()), false, false];
|
|
}
|
|
// an empty !() is exactly equivalent to a starNoEmpty
|
|
let final = '';
|
|
if (this.type === '!' && this.#emptyExt) {
|
|
final =
|
|
(this.isStart() && !this.#options.dot ? startNoDot : '') + starNoEmpty;
|
|
}
|
|
else {
|
|
const close = this.type === '!'
|
|
? // !() must match something,but !(x) can match ''
|
|
'))' +
|
|
(this.isStart() && !this.#options.dot ? startNoDot : '') +
|
|
star +
|
|
')'
|
|
: this.type === '@'
|
|
? ')'
|
|
: `)${this.type}`;
|
|
final = start + body + close;
|
|
}
|
|
return [
|
|
final,
|
|
(0, unescape_js_1.unescape)(body),
|
|
(this.#hasMagic = !!this.#hasMagic),
|
|
this.#uflag,
|
|
];
|
|
}
|
|
static #parseGlob(glob, hasMagic, noEmpty = false) {
|
|
let escaping = false;
|
|
let re = '';
|
|
let uflag = false;
|
|
for (let i = 0; i < glob.length; i++) {
|
|
const c = glob.charAt(i);
|
|
if (escaping) {
|
|
escaping = false;
|
|
re += (reSpecials.has(c) ? '\\' : '') + c;
|
|
continue;
|
|
}
|
|
if (c === '\\') {
|
|
if (i === glob.length - 1) {
|
|
re += '\\\\';
|
|
}
|
|
else {
|
|
escaping = true;
|
|
}
|
|
continue;
|
|
}
|
|
if (c === '[') {
|
|
const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i);
|
|
if (consumed) {
|
|
re += src;
|
|
uflag = uflag || needUflag;
|
|
i += consumed - 1;
|
|
hasMagic = hasMagic || magic;
|
|
continue;
|
|
}
|
|
}
|
|
if (c === '*') {
|
|
if (noEmpty && glob === '*')
|
|
re += starNoEmpty;
|
|
else
|
|
re += star;
|
|
hasMagic = true;
|
|
continue;
|
|
}
|
|
if (c === '?') {
|
|
re += qmark;
|
|
hasMagic = true;
|
|
continue;
|
|
}
|
|
re += regExpEscape(c);
|
|
}
|
|
return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag];
|
|
}
|
|
}
|
|
exports.AST = AST;
|
|
//# sourceMappingURL=ast.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 314:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
// translate the various posix character classes into unicode properties
|
|
// this works across all unicode locales
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.parseClass = void 0;
|
|
// { <posix class>: [<translation>, /u flag required, negated]
|
|
const posixClasses = {
|
|
'[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
|
|
'[:alpha:]': ['\\p{L}\\p{Nl}', true],
|
|
'[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
|
|
'[:blank:]': ['\\p{Zs}\\t', true],
|
|
'[:cntrl:]': ['\\p{Cc}', true],
|
|
'[:digit:]': ['\\p{Nd}', true],
|
|
'[:graph:]': ['\\p{Z}\\p{C}', true, true],
|
|
'[:lower:]': ['\\p{Ll}', true],
|
|
'[:print:]': ['\\p{C}', true],
|
|
'[:punct:]': ['\\p{P}', true],
|
|
'[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
|
|
'[:upper:]': ['\\p{Lu}', true],
|
|
'[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
|
|
'[:xdigit:]': ['A-Fa-f0-9', false],
|
|
};
|
|
// only need to escape a few things inside of brace expressions
|
|
// escapes: [ \ ] -
|
|
const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
|
|
// escape all regexp magic characters
|
|
const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
|
|
// everything has already been escaped, we just have to join
|
|
const rangesToString = (ranges) => ranges.join('');
|
|
// takes a glob string at a posix brace expression, and returns
|
|
// an equivalent regular expression source, and boolean indicating
|
|
// whether the /u flag needs to be applied, and the number of chars
|
|
// consumed to parse the character class.
|
|
// This also removes out of order ranges, and returns ($.) if the
|
|
// entire class just no good.
|
|
const parseClass = (glob, position) => {
|
|
const pos = position;
|
|
/* c8 ignore start */
|
|
if (glob.charAt(pos) !== '[') {
|
|
throw new Error('not in a brace expression');
|
|
}
|
|
/* c8 ignore stop */
|
|
const ranges = [];
|
|
const negs = [];
|
|
let i = pos + 1;
|
|
let sawStart = false;
|
|
let uflag = false;
|
|
let escaping = false;
|
|
let negate = false;
|
|
let endPos = pos;
|
|
let rangeStart = '';
|
|
WHILE: while (i < glob.length) {
|
|
const c = glob.charAt(i);
|
|
if ((c === '!' || c === '^') && i === pos + 1) {
|
|
negate = true;
|
|
i++;
|
|
continue;
|
|
}
|
|
if (c === ']' && sawStart && !escaping) {
|
|
endPos = i + 1;
|
|
break;
|
|
}
|
|
sawStart = true;
|
|
if (c === '\\') {
|
|
if (!escaping) {
|
|
escaping = true;
|
|
i++;
|
|
continue;
|
|
}
|
|
// escaped \ char, fall through and treat like normal char
|
|
}
|
|
if (c === '[' && !escaping) {
|
|
// either a posix class, a collation equivalent, or just a [
|
|
for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
|
|
if (glob.startsWith(cls, i)) {
|
|
// invalid, [a-[] is fine, but not [a-[:alpha]]
|
|
if (rangeStart) {
|
|
return ['$.', false, glob.length - pos, true];
|
|
}
|
|
i += cls.length;
|
|
if (neg)
|
|
negs.push(unip);
|
|
else
|
|
ranges.push(unip);
|
|
uflag = uflag || u;
|
|
continue WHILE;
|
|
}
|
|
}
|
|
}
|
|
// now it's just a normal character, effectively
|
|
escaping = false;
|
|
if (rangeStart) {
|
|
// throw this range away if it's not valid, but others
|
|
// can still match.
|
|
if (c > rangeStart) {
|
|
ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
|
|
}
|
|
else if (c === rangeStart) {
|
|
ranges.push(braceEscape(c));
|
|
}
|
|
rangeStart = '';
|
|
i++;
|
|
continue;
|
|
}
|
|
// now might be the start of a range.
|
|
// can be either c-d or c-] or c<more...>] or c] at this point
|
|
if (glob.startsWith('-]', i + 1)) {
|
|
ranges.push(braceEscape(c + '-'));
|
|
i += 2;
|
|
continue;
|
|
}
|
|
if (glob.startsWith('-', i + 1)) {
|
|
rangeStart = c;
|
|
i += 2;
|
|
continue;
|
|
}
|
|
// not the start of a range, just a single character
|
|
ranges.push(braceEscape(c));
|
|
i++;
|
|
}
|
|
if (endPos < i) {
|
|
// didn't see the end of the class, not a valid class,
|
|
// but might still be valid as a literal match.
|
|
return ['', false, 0, false];
|
|
}
|
|
// if we got no ranges and no negates, then we have a range that
|
|
// cannot possibly match anything, and that poisons the whole glob
|
|
if (!ranges.length && !negs.length) {
|
|
return ['$.', false, glob.length - pos, true];
|
|
}
|
|
// if we got one positive range, and it's a single character, then that's
|
|
// not actually a magic pattern, it's just that one literal character.
|
|
// we should not treat that as "magic", we should just return the literal
|
|
// character. [_] is a perfectly valid way to escape glob magic chars.
|
|
if (negs.length === 0 &&
|
|
ranges.length === 1 &&
|
|
/^\\?.$/.test(ranges[0]) &&
|
|
!negate) {
|
|
const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
|
|
return [regexpEscape(r), false, endPos - pos, false];
|
|
}
|
|
const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
|
|
const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
|
|
const comb = ranges.length && negs.length
|
|
? '(' + sranges + '|' + snegs + ')'
|
|
: ranges.length
|
|
? sranges
|
|
: snegs;
|
|
return [comb, uflag, endPos - pos, true];
|
|
};
|
|
exports.parseClass = parseClass;
|
|
//# sourceMappingURL=brace-expressions.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1477:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.escape = void 0;
|
|
/**
|
|
* Escape all magic characters in a glob pattern.
|
|
*
|
|
* If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
|
|
* option is used, then characters are escaped by wrapping in `[]`, because
|
|
* a magic character wrapped in a character class can only be satisfied by
|
|
* that exact character. In this mode, `\` is _not_ escaped, because it is
|
|
* not interpreted as a magic character, but instead as a path separator.
|
|
*/
|
|
const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
|
|
// don't need to escape +@! because we escape the parens
|
|
// that make those magic, and escaping ! as [!] isn't valid,
|
|
// because [!]] is a valid glob class meaning not ']'.
|
|
return windowsPathsNoEscape
|
|
? s.replace(/[?*()[\]]/g, '[$&]')
|
|
: s.replace(/[?*()[\]\\]/g, '\\$&');
|
|
};
|
|
exports.escape = escape;
|
|
//# sourceMappingURL=escape.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 266:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0;
|
|
const brace_expansion_1 = __importDefault(__nccwpck_require__(1046));
|
|
const assert_valid_pattern_js_1 = __nccwpck_require__(5934);
|
|
const ast_js_1 = __nccwpck_require__(7642);
|
|
const escape_js_1 = __nccwpck_require__(1477);
|
|
const unescape_js_1 = __nccwpck_require__(9820);
|
|
const minimatch = (p, pattern, options = {}) => {
|
|
(0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
|
|
// shortcut: comments match nothing.
|
|
if (!options.nocomment && pattern.charAt(0) === '#') {
|
|
return false;
|
|
}
|
|
return new Minimatch(pattern, options).match(p);
|
|
};
|
|
exports.minimatch = minimatch;
|
|
// Optimized checking for the most common glob patterns.
|
|
const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
|
|
const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
|
|
const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
|
|
const starDotExtTestNocase = (ext) => {
|
|
ext = ext.toLowerCase();
|
|
return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
|
|
};
|
|
const starDotExtTestNocaseDot = (ext) => {
|
|
ext = ext.toLowerCase();
|
|
return (f) => f.toLowerCase().endsWith(ext);
|
|
};
|
|
const starDotStarRE = /^\*+\.\*+$/;
|
|
const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
|
|
const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
|
|
const dotStarRE = /^\.\*+$/;
|
|
const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
|
|
const starRE = /^\*+$/;
|
|
const starTest = (f) => f.length !== 0 && !f.startsWith('.');
|
|
const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
|
|
const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
|
|
const qmarksTestNocase = ([$0, ext = '']) => {
|
|
const noext = qmarksTestNoExt([$0]);
|
|
if (!ext)
|
|
return noext;
|
|
ext = ext.toLowerCase();
|
|
return (f) => noext(f) && f.toLowerCase().endsWith(ext);
|
|
};
|
|
const qmarksTestNocaseDot = ([$0, ext = '']) => {
|
|
const noext = qmarksTestNoExtDot([$0]);
|
|
if (!ext)
|
|
return noext;
|
|
ext = ext.toLowerCase();
|
|
return (f) => noext(f) && f.toLowerCase().endsWith(ext);
|
|
};
|
|
const qmarksTestDot = ([$0, ext = '']) => {
|
|
const noext = qmarksTestNoExtDot([$0]);
|
|
return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
|
|
};
|
|
const qmarksTest = ([$0, ext = '']) => {
|
|
const noext = qmarksTestNoExt([$0]);
|
|
return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
|
|
};
|
|
const qmarksTestNoExt = ([$0]) => {
|
|
const len = $0.length;
|
|
return (f) => f.length === len && !f.startsWith('.');
|
|
};
|
|
const qmarksTestNoExtDot = ([$0]) => {
|
|
const len = $0.length;
|
|
return (f) => f.length === len && f !== '.' && f !== '..';
|
|
};
|
|
/* c8 ignore start */
|
|
const defaultPlatform = (typeof process === 'object' && process
|
|
? (typeof process.env === 'object' &&
|
|
process.env &&
|
|
process.env.__MINIMATCH_TESTING_PLATFORM__) ||
|
|
process.platform
|
|
: 'posix');
|
|
const path = {
|
|
win32: { sep: '\\' },
|
|
posix: { sep: '/' },
|
|
};
|
|
/* c8 ignore stop */
|
|
exports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
|
|
exports.minimatch.sep = exports.sep;
|
|
exports.GLOBSTAR = Symbol('globstar **');
|
|
exports.minimatch.GLOBSTAR = exports.GLOBSTAR;
|
|
// any single thing other than /
|
|
// don't need to escape / when using new RegExp()
|
|
const qmark = '[^/]';
|
|
// * => any number of characters
|
|
const star = qmark + '*?';
|
|
// ** when dots are allowed. Anything goes, except .. and .
|
|
// not (^ or / followed by one or two dots followed by $ or /),
|
|
// followed by anything, any number of times.
|
|
const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
|
|
// not a ^ or / followed by a dot,
|
|
// followed by anything, any number of times.
|
|
const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
|
|
const filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options);
|
|
exports.filter = filter;
|
|
exports.minimatch.filter = exports.filter;
|
|
const ext = (a, b = {}) => Object.assign({}, a, b);
|
|
const defaults = (def) => {
|
|
if (!def || typeof def !== 'object' || !Object.keys(def).length) {
|
|
return exports.minimatch;
|
|
}
|
|
const orig = exports.minimatch;
|
|
const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
|
|
return Object.assign(m, {
|
|
Minimatch: class Minimatch extends orig.Minimatch {
|
|
constructor(pattern, options = {}) {
|
|
super(pattern, ext(def, options));
|
|
}
|
|
static defaults(options) {
|
|
return orig.defaults(ext(def, options)).Minimatch;
|
|
}
|
|
},
|
|
AST: class AST extends orig.AST {
|
|
/* c8 ignore start */
|
|
constructor(type, parent, options = {}) {
|
|
super(type, parent, ext(def, options));
|
|
}
|
|
/* c8 ignore stop */
|
|
static fromGlob(pattern, options = {}) {
|
|
return orig.AST.fromGlob(pattern, ext(def, options));
|
|
}
|
|
},
|
|
unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
|
|
escape: (s, options = {}) => orig.escape(s, ext(def, options)),
|
|
filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
|
|
defaults: (options) => orig.defaults(ext(def, options)),
|
|
makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
|
|
braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
|
|
match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
|
|
sep: orig.sep,
|
|
GLOBSTAR: exports.GLOBSTAR,
|
|
});
|
|
};
|
|
exports.defaults = defaults;
|
|
exports.minimatch.defaults = exports.defaults;
|
|
// Brace expansion:
|
|
// a{b,c}d -> abd acd
|
|
// a{b,}c -> abc ac
|
|
// a{0..3}d -> a0d a1d a2d a3d
|
|
// a{b,c{d,e}f}g -> abg acdfg acefg
|
|
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
|
|
//
|
|
// Invalid sets are not expanded.
|
|
// a{2..}b -> a{2..}b
|
|
// a{b}c -> a{b}c
|
|
const braceExpand = (pattern, options = {}) => {
|
|
(0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
|
|
// Thanks to Yeting Li <https://github.com/yetingli> for
|
|
// improving this regexp to avoid a ReDOS vulnerability.
|
|
if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
|
|
// shortcut. no need to expand.
|
|
return [pattern];
|
|
}
|
|
return (0, brace_expansion_1.default)(pattern);
|
|
};
|
|
exports.braceExpand = braceExpand;
|
|
exports.minimatch.braceExpand = exports.braceExpand;
|
|
// parse a component of the expanded set.
|
|
// At this point, no pattern may contain "/" in it
|
|
// so we're going to return a 2d array, where each entry is the full
|
|
// pattern, split on '/', and then turned into a regular expression.
|
|
// A regexp is made at the end which joins each array with an
|
|
// escaped /, and another full one which joins each regexp with |.
|
|
//
|
|
// Following the lead of Bash 4.1, note that "**" only has special meaning
|
|
// when it is the *only* thing in a path portion. Otherwise, any series
|
|
// of * is equivalent to a single *. Globstar behavior is enabled by
|
|
// default, and can be disabled by setting options.noglobstar.
|
|
const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
|
|
exports.makeRe = makeRe;
|
|
exports.minimatch.makeRe = exports.makeRe;
|
|
const match = (list, pattern, options = {}) => {
|
|
const mm = new Minimatch(pattern, options);
|
|
list = list.filter(f => mm.match(f));
|
|
if (mm.options.nonull && !list.length) {
|
|
list.push(pattern);
|
|
}
|
|
return list;
|
|
};
|
|
exports.match = match;
|
|
exports.minimatch.match = exports.match;
|
|
// replace stuff like \* with *
|
|
const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
|
|
const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
|
|
class Minimatch {
|
|
options;
|
|
set;
|
|
pattern;
|
|
windowsPathsNoEscape;
|
|
nonegate;
|
|
negate;
|
|
comment;
|
|
empty;
|
|
preserveMultipleSlashes;
|
|
partial;
|
|
globSet;
|
|
globParts;
|
|
nocase;
|
|
isWindows;
|
|
platform;
|
|
windowsNoMagicRoot;
|
|
regexp;
|
|
constructor(pattern, options = {}) {
|
|
(0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
|
|
options = options || {};
|
|
this.options = options;
|
|
this.pattern = pattern;
|
|
this.platform = options.platform || defaultPlatform;
|
|
this.isWindows = this.platform === 'win32';
|
|
this.windowsPathsNoEscape =
|
|
!!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
|
|
if (this.windowsPathsNoEscape) {
|
|
this.pattern = this.pattern.replace(/\\/g, '/');
|
|
}
|
|
this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
|
|
this.regexp = null;
|
|
this.negate = false;
|
|
this.nonegate = !!options.nonegate;
|
|
this.comment = false;
|
|
this.empty = false;
|
|
this.partial = !!options.partial;
|
|
this.nocase = !!this.options.nocase;
|
|
this.windowsNoMagicRoot =
|
|
options.windowsNoMagicRoot !== undefined
|
|
? options.windowsNoMagicRoot
|
|
: !!(this.isWindows && this.nocase);
|
|
this.globSet = [];
|
|
this.globParts = [];
|
|
this.set = [];
|
|
// make the set of regexps etc.
|
|
this.make();
|
|
}
|
|
hasMagic() {
|
|
if (this.options.magicalBraces && this.set.length > 1) {
|
|
return true;
|
|
}
|
|
for (const pattern of this.set) {
|
|
for (const part of pattern) {
|
|
if (typeof part !== 'string')
|
|
return true;
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
debug(..._) { }
|
|
make() {
|
|
const pattern = this.pattern;
|
|
const options = this.options;
|
|
// empty patterns and comments match nothing.
|
|
if (!options.nocomment && pattern.charAt(0) === '#') {
|
|
this.comment = true;
|
|
return;
|
|
}
|
|
if (!pattern) {
|
|
this.empty = true;
|
|
return;
|
|
}
|
|
// step 1: figure out negation, etc.
|
|
this.parseNegate();
|
|
// step 2: expand braces
|
|
this.globSet = [...new Set(this.braceExpand())];
|
|
if (options.debug) {
|
|
this.debug = (...args) => console.error(...args);
|
|
}
|
|
this.debug(this.pattern, this.globSet);
|
|
// step 3: now we have a set, so turn each one into a series of
|
|
// path-portion matching patterns.
|
|
// These will be regexps, except in the case of "**", which is
|
|
// set to the GLOBSTAR object for globstar behavior,
|
|
// and will not contain any / characters
|
|
//
|
|
// First, we preprocess to make the glob pattern sets a bit simpler
|
|
// and deduped. There are some perf-killing patterns that can cause
|
|
// problems with a glob walk, but we can simplify them down a bit.
|
|
const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
|
|
this.globParts = this.preprocess(rawGlobParts);
|
|
this.debug(this.pattern, this.globParts);
|
|
// glob --> regexps
|
|
let set = this.globParts.map((s, _, __) => {
|
|
if (this.isWindows && this.windowsNoMagicRoot) {
|
|
// check if it's a drive or unc path.
|
|
const isUNC = s[0] === '' &&
|
|
s[1] === '' &&
|
|
(s[2] === '?' || !globMagic.test(s[2])) &&
|
|
!globMagic.test(s[3]);
|
|
const isDrive = /^[a-z]:/i.test(s[0]);
|
|
if (isUNC) {
|
|
return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
|
|
}
|
|
else if (isDrive) {
|
|
return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
|
|
}
|
|
}
|
|
return s.map(ss => this.parse(ss));
|
|
});
|
|
this.debug(this.pattern, set);
|
|
// filter out everything that didn't compile properly.
|
|
this.set = set.filter(s => s.indexOf(false) === -1);
|
|
// do not treat the ? in UNC paths as magic
|
|
if (this.isWindows) {
|
|
for (let i = 0; i < this.set.length; i++) {
|
|
const p = this.set[i];
|
|
if (p[0] === '' &&
|
|
p[1] === '' &&
|
|
this.globParts[i][2] === '?' &&
|
|
typeof p[3] === 'string' &&
|
|
/^[a-z]:$/i.test(p[3])) {
|
|
p[2] = '?';
|
|
}
|
|
}
|
|
}
|
|
this.debug(this.pattern, this.set);
|
|
}
|
|
// various transforms to equivalent pattern sets that are
|
|
// faster to process in a filesystem walk. The goal is to
|
|
// eliminate what we can, and push all ** patterns as far
|
|
// to the right as possible, even if it increases the number
|
|
// of patterns that we have to process.
|
|
preprocess(globParts) {
|
|
// if we're not in globstar mode, then turn all ** into *
|
|
if (this.options.noglobstar) {
|
|
for (let i = 0; i < globParts.length; i++) {
|
|
for (let j = 0; j < globParts[i].length; j++) {
|
|
if (globParts[i][j] === '**') {
|
|
globParts[i][j] = '*';
|
|
}
|
|
}
|
|
}
|
|
}
|
|
const { optimizationLevel = 1 } = this.options;
|
|
if (optimizationLevel >= 2) {
|
|
// aggressive optimization for the purpose of fs walking
|
|
globParts = this.firstPhasePreProcess(globParts);
|
|
globParts = this.secondPhasePreProcess(globParts);
|
|
}
|
|
else if (optimizationLevel >= 1) {
|
|
// just basic optimizations to remove some .. parts
|
|
globParts = this.levelOneOptimize(globParts);
|
|
}
|
|
else {
|
|
globParts = this.adjascentGlobstarOptimize(globParts);
|
|
}
|
|
return globParts;
|
|
}
|
|
// just get rid of adjascent ** portions
|
|
adjascentGlobstarOptimize(globParts) {
|
|
return globParts.map(parts => {
|
|
let gs = -1;
|
|
while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
|
|
let i = gs;
|
|
while (parts[i + 1] === '**') {
|
|
i++;
|
|
}
|
|
if (i !== gs) {
|
|
parts.splice(gs, i - gs);
|
|
}
|
|
}
|
|
return parts;
|
|
});
|
|
}
|
|
// get rid of adjascent ** and resolve .. portions
|
|
levelOneOptimize(globParts) {
|
|
return globParts.map(parts => {
|
|
parts = parts.reduce((set, part) => {
|
|
const prev = set[set.length - 1];
|
|
if (part === '**' && prev === '**') {
|
|
return set;
|
|
}
|
|
if (part === '..') {
|
|
if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
|
|
set.pop();
|
|
return set;
|
|
}
|
|
}
|
|
set.push(part);
|
|
return set;
|
|
}, []);
|
|
return parts.length === 0 ? [''] : parts;
|
|
});
|
|
}
|
|
levelTwoFileOptimize(parts) {
|
|
if (!Array.isArray(parts)) {
|
|
parts = this.slashSplit(parts);
|
|
}
|
|
let didSomething = false;
|
|
do {
|
|
didSomething = false;
|
|
// <pre>/<e>/<rest> -> <pre>/<rest>
|
|
if (!this.preserveMultipleSlashes) {
|
|
for (let i = 1; i < parts.length - 1; i++) {
|
|
const p = parts[i];
|
|
// don't squeeze out UNC patterns
|
|
if (i === 1 && p === '' && parts[0] === '')
|
|
continue;
|
|
if (p === '.' || p === '') {
|
|
didSomething = true;
|
|
parts.splice(i, 1);
|
|
i--;
|
|
}
|
|
}
|
|
if (parts[0] === '.' &&
|
|
parts.length === 2 &&
|
|
(parts[1] === '.' || parts[1] === '')) {
|
|
didSomething = true;
|
|
parts.pop();
|
|
}
|
|
}
|
|
// <pre>/<p>/../<rest> -> <pre>/<rest>
|
|
let dd = 0;
|
|
while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
|
|
const p = parts[dd - 1];
|
|
if (p && p !== '.' && p !== '..' && p !== '**') {
|
|
didSomething = true;
|
|
parts.splice(dd - 1, 2);
|
|
dd -= 2;
|
|
}
|
|
}
|
|
} while (didSomething);
|
|
return parts.length === 0 ? [''] : parts;
|
|
}
|
|
// First phase: single-pattern processing
|
|
// <pre> is 1 or more portions
|
|
// <rest> is 1 or more portions
|
|
// <p> is any portion other than ., .., '', or **
|
|
// <e> is . or ''
|
|
//
|
|
// **/.. is *brutal* for filesystem walking performance, because
|
|
// it effectively resets the recursive walk each time it occurs,
|
|
// and ** cannot be reduced out by a .. pattern part like a regexp
|
|
// or most strings (other than .., ., and '') can be.
|
|
//
|
|
// <pre>/**/../<p>/<p>/<rest> -> {<pre>/../<p>/<p>/<rest>,<pre>/**/<p>/<p>/<rest>}
|
|
// <pre>/<e>/<rest> -> <pre>/<rest>
|
|
// <pre>/<p>/../<rest> -> <pre>/<rest>
|
|
// **/**/<rest> -> **/<rest>
|
|
//
|
|
// **/*/<rest> -> */**/<rest> <== not valid because ** doesn't follow
|
|
// this WOULD be allowed if ** did follow symlinks, or * didn't
|
|
firstPhasePreProcess(globParts) {
|
|
let didSomething = false;
|
|
do {
|
|
didSomething = false;
|
|
// <pre>/**/../<p>/<p>/<rest> -> {<pre>/../<p>/<p>/<rest>,<pre>/**/<p>/<p>/<rest>}
|
|
for (let parts of globParts) {
|
|
let gs = -1;
|
|
while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
|
|
let gss = gs;
|
|
while (parts[gss + 1] === '**') {
|
|
// <pre>/**/**/<rest> -> <pre>/**/<rest>
|
|
gss++;
|
|
}
|
|
// eg, if gs is 2 and gss is 4, that means we have 3 **
|
|
// parts, and can remove 2 of them.
|
|
if (gss > gs) {
|
|
parts.splice(gs + 1, gss - gs);
|
|
}
|
|
let next = parts[gs + 1];
|
|
const p = parts[gs + 2];
|
|
const p2 = parts[gs + 3];
|
|
if (next !== '..')
|
|
continue;
|
|
if (!p ||
|
|
p === '.' ||
|
|
p === '..' ||
|
|
!p2 ||
|
|
p2 === '.' ||
|
|
p2 === '..') {
|
|
continue;
|
|
}
|
|
didSomething = true;
|
|
// edit parts in place, and push the new one
|
|
parts.splice(gs, 1);
|
|
const other = parts.slice(0);
|
|
other[gs] = '**';
|
|
globParts.push(other);
|
|
gs--;
|
|
}
|
|
// <pre>/<e>/<rest> -> <pre>/<rest>
|
|
if (!this.preserveMultipleSlashes) {
|
|
for (let i = 1; i < parts.length - 1; i++) {
|
|
const p = parts[i];
|
|
// don't squeeze out UNC patterns
|
|
if (i === 1 && p === '' && parts[0] === '')
|
|
continue;
|
|
if (p === '.' || p === '') {
|
|
didSomething = true;
|
|
parts.splice(i, 1);
|
|
i--;
|
|
}
|
|
}
|
|
if (parts[0] === '.' &&
|
|
parts.length === 2 &&
|
|
(parts[1] === '.' || parts[1] === '')) {
|
|
didSomething = true;
|
|
parts.pop();
|
|
}
|
|
}
|
|
// <pre>/<p>/../<rest> -> <pre>/<rest>
|
|
let dd = 0;
|
|
while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
|
|
const p = parts[dd - 1];
|
|
if (p && p !== '.' && p !== '..' && p !== '**') {
|
|
didSomething = true;
|
|
const needDot = dd === 1 && parts[dd + 1] === '**';
|
|
const splin = needDot ? ['.'] : [];
|
|
parts.splice(dd - 1, 2, ...splin);
|
|
if (parts.length === 0)
|
|
parts.push('');
|
|
dd -= 2;
|
|
}
|
|
}
|
|
}
|
|
} while (didSomething);
|
|
return globParts;
|
|
}
|
|
// second phase: multi-pattern dedupes
|
|
// {<pre>/*/<rest>,<pre>/<p>/<rest>} -> <pre>/*/<rest>
|
|
// {<pre>/<rest>,<pre>/<rest>} -> <pre>/<rest>
|
|
// {<pre>/**/<rest>,<pre>/<rest>} -> <pre>/**/<rest>
|
|
//
|
|
// {<pre>/**/<rest>,<pre>/**/<p>/<rest>} -> <pre>/**/<rest>
|
|
// ^-- not valid because ** doens't follow symlinks
|
|
secondPhasePreProcess(globParts) {
|
|
for (let i = 0; i < globParts.length - 1; i++) {
|
|
for (let j = i + 1; j < globParts.length; j++) {
|
|
const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
|
|
if (!matched)
|
|
continue;
|
|
globParts[i] = matched;
|
|
globParts[j] = [];
|
|
}
|
|
}
|
|
return globParts.filter(gs => gs.length);
|
|
}
|
|
partsMatch(a, b, emptyGSMatch = false) {
|
|
let ai = 0;
|
|
let bi = 0;
|
|
let result = [];
|
|
let which = '';
|
|
while (ai < a.length && bi < b.length) {
|
|
if (a[ai] === b[bi]) {
|
|
result.push(which === 'b' ? b[bi] : a[ai]);
|
|
ai++;
|
|
bi++;
|
|
}
|
|
else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
|
|
result.push(a[ai]);
|
|
ai++;
|
|
}
|
|
else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
|
|
result.push(b[bi]);
|
|
bi++;
|
|
}
|
|
else if (a[ai] === '*' &&
|
|
b[bi] &&
|
|
(this.options.dot || !b[bi].startsWith('.')) &&
|
|
b[bi] !== '**') {
|
|
if (which === 'b')
|
|
return false;
|
|
which = 'a';
|
|
result.push(a[ai]);
|
|
ai++;
|
|
bi++;
|
|
}
|
|
else if (b[bi] === '*' &&
|
|
a[ai] &&
|
|
(this.options.dot || !a[ai].startsWith('.')) &&
|
|
a[ai] !== '**') {
|
|
if (which === 'a')
|
|
return false;
|
|
which = 'b';
|
|
result.push(b[bi]);
|
|
ai++;
|
|
bi++;
|
|
}
|
|
else {
|
|
return false;
|
|
}
|
|
}
|
|
// if we fall out of the loop, it means they two are identical
|
|
// as long as their lengths match
|
|
return a.length === b.length && result;
|
|
}
|
|
parseNegate() {
|
|
if (this.nonegate)
|
|
return;
|
|
const pattern = this.pattern;
|
|
let negate = false;
|
|
let negateOffset = 0;
|
|
for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
|
|
negate = !negate;
|
|
negateOffset++;
|
|
}
|
|
if (negateOffset)
|
|
this.pattern = pattern.slice(negateOffset);
|
|
this.negate = negate;
|
|
}
|
|
// set partial to true to test if, for example,
|
|
// "/a/b" matches the start of "/*/b/*/d"
|
|
// Partial means, if you run out of file before you run
|
|
// out of pattern, then that's fine, as long as all
|
|
// the parts match.
|
|
matchOne(file, pattern, partial = false) {
|
|
const options = this.options;
|
|
// UNC paths like //?/X:/... can match X:/... and vice versa
|
|
// Drive letters in absolute drive or unc paths are always compared
|
|
// case-insensitively.
|
|
if (this.isWindows) {
|
|
const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
|
|
const fileUNC = !fileDrive &&
|
|
file[0] === '' &&
|
|
file[1] === '' &&
|
|
file[2] === '?' &&
|
|
/^[a-z]:$/i.test(file[3]);
|
|
const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
|
|
const patternUNC = !patternDrive &&
|
|
pattern[0] === '' &&
|
|
pattern[1] === '' &&
|
|
pattern[2] === '?' &&
|
|
typeof pattern[3] === 'string' &&
|
|
/^[a-z]:$/i.test(pattern[3]);
|
|
const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
|
|
const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
|
|
if (typeof fdi === 'number' && typeof pdi === 'number') {
|
|
const [fd, pd] = [file[fdi], pattern[pdi]];
|
|
if (fd.toLowerCase() === pd.toLowerCase()) {
|
|
pattern[pdi] = fd;
|
|
if (pdi > fdi) {
|
|
pattern = pattern.slice(pdi);
|
|
}
|
|
else if (fdi > pdi) {
|
|
file = file.slice(fdi);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
// resolve and reduce . and .. portions in the file as well.
|
|
// dont' need to do the second phase, because it's only one string[]
|
|
const { optimizationLevel = 1 } = this.options;
|
|
if (optimizationLevel >= 2) {
|
|
file = this.levelTwoFileOptimize(file);
|
|
}
|
|
this.debug('matchOne', this, { file, pattern });
|
|
this.debug('matchOne', file.length, pattern.length);
|
|
for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
|
|
this.debug('matchOne loop');
|
|
var p = pattern[pi];
|
|
var f = file[fi];
|
|
this.debug(pattern, p, f);
|
|
// should be impossible.
|
|
// some invalid regexp stuff in the set.
|
|
/* c8 ignore start */
|
|
if (p === false) {
|
|
return false;
|
|
}
|
|
/* c8 ignore stop */
|
|
if (p === exports.GLOBSTAR) {
|
|
this.debug('GLOBSTAR', [pattern, p, f]);
|
|
// "**"
|
|
// a/**/b/**/c would match the following:
|
|
// a/b/x/y/z/c
|
|
// a/x/y/z/b/c
|
|
// a/b/x/b/x/c
|
|
// a/b/c
|
|
// To do this, take the rest of the pattern after
|
|
// the **, and see if it would match the file remainder.
|
|
// If so, return success.
|
|
// If not, the ** "swallows" a segment, and try again.
|
|
// This is recursively awful.
|
|
//
|
|
// a/**/b/**/c matching a/b/x/y/z/c
|
|
// - a matches a
|
|
// - doublestar
|
|
// - matchOne(b/x/y/z/c, b/**/c)
|
|
// - b matches b
|
|
// - doublestar
|
|
// - matchOne(x/y/z/c, c) -> no
|
|
// - matchOne(y/z/c, c) -> no
|
|
// - matchOne(z/c, c) -> no
|
|
// - matchOne(c, c) yes, hit
|
|
var fr = fi;
|
|
var pr = pi + 1;
|
|
if (pr === pl) {
|
|
this.debug('** at the end');
|
|
// a ** at the end will just swallow the rest.
|
|
// We have found a match.
|
|
// however, it will not swallow /.x, unless
|
|
// options.dot is set.
|
|
// . and .. are *never* matched by **, for explosively
|
|
// exponential reasons.
|
|
for (; fi < fl; fi++) {
|
|
if (file[fi] === '.' ||
|
|
file[fi] === '..' ||
|
|
(!options.dot && file[fi].charAt(0) === '.'))
|
|
return false;
|
|
}
|
|
return true;
|
|
}
|
|
// ok, let's see if we can swallow whatever we can.
|
|
while (fr < fl) {
|
|
var swallowee = file[fr];
|
|
this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
|
|
// XXX remove this slice. Just pass the start index.
|
|
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
|
|
this.debug('globstar found match!', fr, fl, swallowee);
|
|
// found a match.
|
|
return true;
|
|
}
|
|
else {
|
|
// can't swallow "." or ".." ever.
|
|
// can only swallow ".foo" when explicitly asked.
|
|
if (swallowee === '.' ||
|
|
swallowee === '..' ||
|
|
(!options.dot && swallowee.charAt(0) === '.')) {
|
|
this.debug('dot detected!', file, fr, pattern, pr);
|
|
break;
|
|
}
|
|
// ** swallows a segment, and continue.
|
|
this.debug('globstar swallow a segment, and continue');
|
|
fr++;
|
|
}
|
|
}
|
|
// no match was found.
|
|
// However, in partial mode, we can't say this is necessarily over.
|
|
/* c8 ignore start */
|
|
if (partial) {
|
|
// ran out of file
|
|
this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
|
|
if (fr === fl) {
|
|
return true;
|
|
}
|
|
}
|
|
/* c8 ignore stop */
|
|
return false;
|
|
}
|
|
// something other than **
|
|
// non-magic patterns just have to match exactly
|
|
// patterns with magic have been turned into regexps.
|
|
let hit;
|
|
if (typeof p === 'string') {
|
|
hit = f === p;
|
|
this.debug('string match', p, f, hit);
|
|
}
|
|
else {
|
|
hit = p.test(f);
|
|
this.debug('pattern match', p, f, hit);
|
|
}
|
|
if (!hit)
|
|
return false;
|
|
}
|
|
// Note: ending in / means that we'll get a final ""
|
|
// at the end of the pattern. This can only match a
|
|
// corresponding "" at the end of the file.
|
|
// If the file ends in /, then it can only match a
|
|
// a pattern that ends in /, unless the pattern just
|
|
// doesn't have any more for it. But, a/b/ should *not*
|
|
// match "a/b/*", even though "" matches against the
|
|
// [^/]*? pattern, except in partial mode, where it might
|
|
// simply not be reached yet.
|
|
// However, a/b/ should still satisfy a/*
|
|
// now either we fell off the end of the pattern, or we're done.
|
|
if (fi === fl && pi === pl) {
|
|
// ran out of pattern and filename at the same time.
|
|
// an exact hit!
|
|
return true;
|
|
}
|
|
else if (fi === fl) {
|
|
// ran out of file, but still had pattern left.
|
|
// this is ok if we're doing the match as part of
|
|
// a glob fs traversal.
|
|
return partial;
|
|
}
|
|
else if (pi === pl) {
|
|
// ran out of pattern, still have file left.
|
|
// this is only acceptable if we're on the very last
|
|
// empty segment of a file with a trailing slash.
|
|
// a/* should match a/b/
|
|
return fi === fl - 1 && file[fi] === '';
|
|
/* c8 ignore start */
|
|
}
|
|
else {
|
|
// should be unreachable.
|
|
throw new Error('wtf?');
|
|
}
|
|
/* c8 ignore stop */
|
|
}
|
|
braceExpand() {
|
|
return (0, exports.braceExpand)(this.pattern, this.options);
|
|
}
|
|
parse(pattern) {
|
|
(0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
|
|
const options = this.options;
|
|
// shortcuts
|
|
if (pattern === '**')
|
|
return exports.GLOBSTAR;
|
|
if (pattern === '')
|
|
return '';
|
|
// far and away, the most common glob pattern parts are
|
|
// *, *.*, and *.<ext> Add a fast check method for those.
|
|
let m;
|
|
let fastTest = null;
|
|
if ((m = pattern.match(starRE))) {
|
|
fastTest = options.dot ? starTestDot : starTest;
|
|
}
|
|
else if ((m = pattern.match(starDotExtRE))) {
|
|
fastTest = (options.nocase
|
|
? options.dot
|
|
? starDotExtTestNocaseDot
|
|
: starDotExtTestNocase
|
|
: options.dot
|
|
? starDotExtTestDot
|
|
: starDotExtTest)(m[1]);
|
|
}
|
|
else if ((m = pattern.match(qmarksRE))) {
|
|
fastTest = (options.nocase
|
|
? options.dot
|
|
? qmarksTestNocaseDot
|
|
: qmarksTestNocase
|
|
: options.dot
|
|
? qmarksTestDot
|
|
: qmarksTest)(m);
|
|
}
|
|
else if ((m = pattern.match(starDotStarRE))) {
|
|
fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
|
|
}
|
|
else if ((m = pattern.match(dotStarRE))) {
|
|
fastTest = dotStarTest;
|
|
}
|
|
const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
|
|
return fastTest ? Object.assign(re, { test: fastTest }) : re;
|
|
}
|
|
makeRe() {
|
|
if (this.regexp || this.regexp === false)
|
|
return this.regexp;
|
|
// at this point, this.set is a 2d array of partial
|
|
// pattern strings, or "**".
|
|
//
|
|
// It's better to use .match(). This function shouldn't
|
|
// be used, really, but it's pretty convenient sometimes,
|
|
// when you just want to work with a regex.
|
|
const set = this.set;
|
|
if (!set.length) {
|
|
this.regexp = false;
|
|
return this.regexp;
|
|
}
|
|
const options = this.options;
|
|
const twoStar = options.noglobstar
|
|
? star
|
|
: options.dot
|
|
? twoStarDot
|
|
: twoStarNoDot;
|
|
const flags = new Set(options.nocase ? ['i'] : []);
|
|
// regexpify non-globstar patterns
|
|
// if ** is only item, then we just do one twoStar
|
|
// if ** is first, and there are more, prepend (\/|twoStar\/)? to next
|
|
// if ** is last, append (\/twoStar|) to previous
|
|
// if ** is in the middle, append (\/|\/twoStar\/) to previous
|
|
// then filter out GLOBSTAR symbols
|
|
let re = set
|
|
.map(pattern => {
|
|
const pp = pattern.map(p => {
|
|
if (p instanceof RegExp) {
|
|
for (const f of p.flags.split(''))
|
|
flags.add(f);
|
|
}
|
|
return typeof p === 'string'
|
|
? regExpEscape(p)
|
|
: p === exports.GLOBSTAR
|
|
? exports.GLOBSTAR
|
|
: p._src;
|
|
});
|
|
pp.forEach((p, i) => {
|
|
const next = pp[i + 1];
|
|
const prev = pp[i - 1];
|
|
if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
|
|
return;
|
|
}
|
|
if (prev === undefined) {
|
|
if (next !== undefined && next !== exports.GLOBSTAR) {
|
|
pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
|
|
}
|
|
else {
|
|
pp[i] = twoStar;
|
|
}
|
|
}
|
|
else if (next === undefined) {
|
|
pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
|
|
}
|
|
else if (next !== exports.GLOBSTAR) {
|
|
pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
|
|
pp[i + 1] = exports.GLOBSTAR;
|
|
}
|
|
});
|
|
return pp.filter(p => p !== exports.GLOBSTAR).join('/');
|
|
})
|
|
.join('|');
|
|
// need to wrap in parens if we had more than one thing with |,
|
|
// otherwise only the first will be anchored to ^ and the last to $
|
|
const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
|
|
// must match entire pattern
|
|
// ending in a * or ** will make it less strict.
|
|
re = '^' + open + re + close + '$';
|
|
// can match anything, as long as it's not this.
|
|
if (this.negate)
|
|
re = '^(?!' + re + ').+$';
|
|
try {
|
|
this.regexp = new RegExp(re, [...flags].join(''));
|
|
/* c8 ignore start */
|
|
}
|
|
catch (ex) {
|
|
// should be impossible
|
|
this.regexp = false;
|
|
}
|
|
/* c8 ignore stop */
|
|
return this.regexp;
|
|
}
|
|
slashSplit(p) {
|
|
// if p starts with // on windows, we preserve that
|
|
// so that UNC paths aren't broken. Otherwise, any number of
|
|
// / characters are coalesced into one, unless
|
|
// preserveMultipleSlashes is set to true.
|
|
if (this.preserveMultipleSlashes) {
|
|
return p.split('/');
|
|
}
|
|
else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
|
|
// add an extra '' for the one we lose
|
|
return ['', ...p.split(/\/+/)];
|
|
}
|
|
else {
|
|
return p.split(/\/+/);
|
|
}
|
|
}
|
|
match(f, partial = this.partial) {
|
|
this.debug('match', f, this.pattern);
|
|
// short-circuit in the case of busted things.
|
|
// comments, etc.
|
|
if (this.comment) {
|
|
return false;
|
|
}
|
|
if (this.empty) {
|
|
return f === '';
|
|
}
|
|
if (f === '/' && partial) {
|
|
return true;
|
|
}
|
|
const options = this.options;
|
|
// windows: need to use /, not \
|
|
if (this.isWindows) {
|
|
f = f.split('\\').join('/');
|
|
}
|
|
// treat the test path as a set of pathparts.
|
|
const ff = this.slashSplit(f);
|
|
this.debug(this.pattern, 'split', ff);
|
|
// just ONE of the pattern sets in this.set needs to match
|
|
// in order for it to be valid. If negating, then just one
|
|
// match means that we have failed.
|
|
// Either way, return on the first hit.
|
|
const set = this.set;
|
|
this.debug(this.pattern, 'set', set);
|
|
// Find the basename of the path by looking for the last non-empty segment
|
|
let filename = ff[ff.length - 1];
|
|
if (!filename) {
|
|
for (let i = ff.length - 2; !filename && i >= 0; i--) {
|
|
filename = ff[i];
|
|
}
|
|
}
|
|
for (let i = 0; i < set.length; i++) {
|
|
const pattern = set[i];
|
|
let file = ff;
|
|
if (options.matchBase && pattern.length === 1) {
|
|
file = [filename];
|
|
}
|
|
const hit = this.matchOne(file, pattern, partial);
|
|
if (hit) {
|
|
if (options.flipNegate) {
|
|
return true;
|
|
}
|
|
return !this.negate;
|
|
}
|
|
}
|
|
// didn't get any hits. this is success if it's a negative
|
|
// pattern, failure otherwise.
|
|
if (options.flipNegate) {
|
|
return false;
|
|
}
|
|
return this.negate;
|
|
}
|
|
static defaults(def) {
|
|
return exports.minimatch.defaults(def).Minimatch;
|
|
}
|
|
}
|
|
exports.Minimatch = Minimatch;
|
|
/* c8 ignore start */
|
|
var ast_js_2 = __nccwpck_require__(7642);
|
|
Object.defineProperty(exports, "AST", ({ enumerable: true, get: function () { return ast_js_2.AST; } }));
|
|
var escape_js_2 = __nccwpck_require__(1477);
|
|
Object.defineProperty(exports, "escape", ({ enumerable: true, get: function () { return escape_js_2.escape; } }));
|
|
var unescape_js_2 = __nccwpck_require__(9820);
|
|
Object.defineProperty(exports, "unescape", ({ enumerable: true, get: function () { return unescape_js_2.unescape; } }));
|
|
/* c8 ignore stop */
|
|
exports.minimatch.AST = ast_js_1.AST;
|
|
exports.minimatch.Minimatch = Minimatch;
|
|
exports.minimatch.escape = escape_js_1.escape;
|
|
exports.minimatch.unescape = unescape_js_1.unescape;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9820:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.unescape = void 0;
|
|
/**
|
|
* Un-escape a string that has been escaped with {@link escape}.
|
|
*
|
|
* If the {@link windowsPathsNoEscape} option is used, then square-brace
|
|
* escapes are removed, but not backslash escapes. For example, it will turn
|
|
* the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
|
|
* becuase `\` is a path separator in `windowsPathsNoEscape` mode.
|
|
*
|
|
* When `windowsPathsNoEscape` is not set, then both brace escapes and
|
|
* backslash escapes are removed.
|
|
*
|
|
* Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
|
|
* or unescaped.
|
|
*/
|
|
const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
|
|
return windowsPathsNoEscape
|
|
? s.replace(/\[([^\/\\])\]/g, '$1')
|
|
: s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
|
|
};
|
|
exports.unescape = unescape;
|
|
//# sourceMappingURL=unescape.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8865:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0;
|
|
const proc = typeof process === 'object' && process
|
|
? process
|
|
: {
|
|
stdout: null,
|
|
stderr: null,
|
|
};
|
|
const node_events_1 = __nccwpck_require__(5673);
|
|
const node_stream_1 = __importDefault(__nccwpck_require__(4492));
|
|
const node_string_decoder_1 = __nccwpck_require__(6915);
|
|
/**
|
|
* Return true if the argument is a Minipass stream, Node stream, or something
|
|
* else that Minipass can interact with.
|
|
*/
|
|
const isStream = (s) => !!s &&
|
|
typeof s === 'object' &&
|
|
(s instanceof Minipass ||
|
|
s instanceof node_stream_1.default ||
|
|
(0, exports.isReadable)(s) ||
|
|
(0, exports.isWritable)(s));
|
|
exports.isStream = isStream;
|
|
/**
|
|
* Return true if the argument is a valid {@link Minipass.Readable}
|
|
*/
|
|
const isReadable = (s) => !!s &&
|
|
typeof s === 'object' &&
|
|
s instanceof node_events_1.EventEmitter &&
|
|
typeof s.pipe === 'function' &&
|
|
// node core Writable streams have a pipe() method, but it throws
|
|
s.pipe !== node_stream_1.default.Writable.prototype.pipe;
|
|
exports.isReadable = isReadable;
|
|
/**
|
|
* Return true if the argument is a valid {@link Minipass.Writable}
|
|
*/
|
|
const isWritable = (s) => !!s &&
|
|
typeof s === 'object' &&
|
|
s instanceof node_events_1.EventEmitter &&
|
|
typeof s.write === 'function' &&
|
|
typeof s.end === 'function';
|
|
exports.isWritable = isWritable;
|
|
const EOF = Symbol('EOF');
|
|
const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
|
|
const EMITTED_END = Symbol('emittedEnd');
|
|
const EMITTING_END = Symbol('emittingEnd');
|
|
const EMITTED_ERROR = Symbol('emittedError');
|
|
const CLOSED = Symbol('closed');
|
|
const READ = Symbol('read');
|
|
const FLUSH = Symbol('flush');
|
|
const FLUSHCHUNK = Symbol('flushChunk');
|
|
const ENCODING = Symbol('encoding');
|
|
const DECODER = Symbol('decoder');
|
|
const FLOWING = Symbol('flowing');
|
|
const PAUSED = Symbol('paused');
|
|
const RESUME = Symbol('resume');
|
|
const BUFFER = Symbol('buffer');
|
|
const PIPES = Symbol('pipes');
|
|
const BUFFERLENGTH = Symbol('bufferLength');
|
|
const BUFFERPUSH = Symbol('bufferPush');
|
|
const BUFFERSHIFT = Symbol('bufferShift');
|
|
const OBJECTMODE = Symbol('objectMode');
|
|
// internal event when stream is destroyed
|
|
const DESTROYED = Symbol('destroyed');
|
|
// internal event when stream has an error
|
|
const ERROR = Symbol('error');
|
|
const EMITDATA = Symbol('emitData');
|
|
const EMITEND = Symbol('emitEnd');
|
|
const EMITEND2 = Symbol('emitEnd2');
|
|
const ASYNC = Symbol('async');
|
|
const ABORT = Symbol('abort');
|
|
const ABORTED = Symbol('aborted');
|
|
const SIGNAL = Symbol('signal');
|
|
const DATALISTENERS = Symbol('dataListeners');
|
|
const DISCARDED = Symbol('discarded');
|
|
const defer = (fn) => Promise.resolve().then(fn);
|
|
const nodefer = (fn) => fn();
|
|
const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
|
|
const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
|
|
(!!b &&
|
|
typeof b === 'object' &&
|
|
b.constructor &&
|
|
b.constructor.name === 'ArrayBuffer' &&
|
|
b.byteLength >= 0);
|
|
const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
|
|
/**
|
|
* Internal class representing a pipe to a destination stream.
|
|
*
|
|
* @internal
|
|
*/
|
|
class Pipe {
|
|
src;
|
|
dest;
|
|
opts;
|
|
ondrain;
|
|
constructor(src, dest, opts) {
|
|
this.src = src;
|
|
this.dest = dest;
|
|
this.opts = opts;
|
|
this.ondrain = () => src[RESUME]();
|
|
this.dest.on('drain', this.ondrain);
|
|
}
|
|
unpipe() {
|
|
this.dest.removeListener('drain', this.ondrain);
|
|
}
|
|
// only here for the prototype
|
|
/* c8 ignore start */
|
|
proxyErrors(_er) { }
|
|
/* c8 ignore stop */
|
|
end() {
|
|
this.unpipe();
|
|
if (this.opts.end)
|
|
this.dest.end();
|
|
}
|
|
}
|
|
/**
|
|
* Internal class representing a pipe to a destination stream where
|
|
* errors are proxied.
|
|
*
|
|
* @internal
|
|
*/
|
|
class PipeProxyErrors extends Pipe {
|
|
unpipe() {
|
|
this.src.removeListener('error', this.proxyErrors);
|
|
super.unpipe();
|
|
}
|
|
constructor(src, dest, opts) {
|
|
super(src, dest, opts);
|
|
this.proxyErrors = er => dest.emit('error', er);
|
|
src.on('error', this.proxyErrors);
|
|
}
|
|
}
|
|
const isObjectModeOptions = (o) => !!o.objectMode;
|
|
const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
|
|
/**
|
|
* Main export, the Minipass class
|
|
*
|
|
* `RType` is the type of data emitted, defaults to Buffer
|
|
*
|
|
* `WType` is the type of data to be written, if RType is buffer or string,
|
|
* then any {@link Minipass.ContiguousData} is allowed.
|
|
*
|
|
* `Events` is the set of event handler signatures that this object
|
|
* will emit, see {@link Minipass.Events}
|
|
*/
|
|
class Minipass extends node_events_1.EventEmitter {
|
|
[FLOWING] = false;
|
|
[PAUSED] = false;
|
|
[PIPES] = [];
|
|
[BUFFER] = [];
|
|
[OBJECTMODE];
|
|
[ENCODING];
|
|
[ASYNC];
|
|
[DECODER];
|
|
[EOF] = false;
|
|
[EMITTED_END] = false;
|
|
[EMITTING_END] = false;
|
|
[CLOSED] = false;
|
|
[EMITTED_ERROR] = null;
|
|
[BUFFERLENGTH] = 0;
|
|
[DESTROYED] = false;
|
|
[SIGNAL];
|
|
[ABORTED] = false;
|
|
[DATALISTENERS] = 0;
|
|
[DISCARDED] = false;
|
|
/**
|
|
* true if the stream can be written
|
|
*/
|
|
writable = true;
|
|
/**
|
|
* true if the stream can be read
|
|
*/
|
|
readable = true;
|
|
/**
|
|
* If `RType` is Buffer, then options do not need to be provided.
|
|
* Otherwise, an options object must be provided to specify either
|
|
* {@link Minipass.SharedOptions.objectMode} or
|
|
* {@link Minipass.SharedOptions.encoding}, as appropriate.
|
|
*/
|
|
constructor(...args) {
|
|
const options = (args[0] ||
|
|
{});
|
|
super();
|
|
if (options.objectMode && typeof options.encoding === 'string') {
|
|
throw new TypeError('Encoding and objectMode may not be used together');
|
|
}
|
|
if (isObjectModeOptions(options)) {
|
|
this[OBJECTMODE] = true;
|
|
this[ENCODING] = null;
|
|
}
|
|
else if (isEncodingOptions(options)) {
|
|
this[ENCODING] = options.encoding;
|
|
this[OBJECTMODE] = false;
|
|
}
|
|
else {
|
|
this[OBJECTMODE] = false;
|
|
this[ENCODING] = null;
|
|
}
|
|
this[ASYNC] = !!options.async;
|
|
this[DECODER] = this[ENCODING]
|
|
? new node_string_decoder_1.StringDecoder(this[ENCODING])
|
|
: null;
|
|
//@ts-ignore - private option for debugging and testing
|
|
if (options && options.debugExposeBuffer === true) {
|
|
Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
|
|
}
|
|
//@ts-ignore - private option for debugging and testing
|
|
if (options && options.debugExposePipes === true) {
|
|
Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
|
|
}
|
|
const { signal } = options;
|
|
if (signal) {
|
|
this[SIGNAL] = signal;
|
|
if (signal.aborted) {
|
|
this[ABORT]();
|
|
}
|
|
else {
|
|
signal.addEventListener('abort', () => this[ABORT]());
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* The amount of data stored in the buffer waiting to be read.
|
|
*
|
|
* For Buffer strings, this will be the total byte length.
|
|
* For string encoding streams, this will be the string character length,
|
|
* according to JavaScript's `string.length` logic.
|
|
* For objectMode streams, this is a count of the items waiting to be
|
|
* emitted.
|
|
*/
|
|
get bufferLength() {
|
|
return this[BUFFERLENGTH];
|
|
}
|
|
/**
|
|
* The `BufferEncoding` currently in use, or `null`
|
|
*/
|
|
get encoding() {
|
|
return this[ENCODING];
|
|
}
|
|
/**
|
|
* @deprecated - This is a read only property
|
|
*/
|
|
set encoding(_enc) {
|
|
throw new Error('Encoding must be set at instantiation time');
|
|
}
|
|
/**
|
|
* @deprecated - Encoding may only be set at instantiation time
|
|
*/
|
|
setEncoding(_enc) {
|
|
throw new Error('Encoding must be set at instantiation time');
|
|
}
|
|
/**
|
|
* True if this is an objectMode stream
|
|
*/
|
|
get objectMode() {
|
|
return this[OBJECTMODE];
|
|
}
|
|
/**
|
|
* @deprecated - This is a read-only property
|
|
*/
|
|
set objectMode(_om) {
|
|
throw new Error('objectMode must be set at instantiation time');
|
|
}
|
|
/**
|
|
* true if this is an async stream
|
|
*/
|
|
get ['async']() {
|
|
return this[ASYNC];
|
|
}
|
|
/**
|
|
* Set to true to make this stream async.
|
|
*
|
|
* Once set, it cannot be unset, as this would potentially cause incorrect
|
|
* behavior. Ie, a sync stream can be made async, but an async stream
|
|
* cannot be safely made sync.
|
|
*/
|
|
set ['async'](a) {
|
|
this[ASYNC] = this[ASYNC] || !!a;
|
|
}
|
|
// drop everything and get out of the flow completely
|
|
[ABORT]() {
|
|
this[ABORTED] = true;
|
|
this.emit('abort', this[SIGNAL]?.reason);
|
|
this.destroy(this[SIGNAL]?.reason);
|
|
}
|
|
/**
|
|
* True if the stream has been aborted.
|
|
*/
|
|
get aborted() {
|
|
return this[ABORTED];
|
|
}
|
|
/**
|
|
* No-op setter. Stream aborted status is set via the AbortSignal provided
|
|
* in the constructor options.
|
|
*/
|
|
set aborted(_) { }
|
|
write(chunk, encoding, cb) {
|
|
if (this[ABORTED])
|
|
return false;
|
|
if (this[EOF])
|
|
throw new Error('write after end');
|
|
if (this[DESTROYED]) {
|
|
this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
|
|
return true;
|
|
}
|
|
if (typeof encoding === 'function') {
|
|
cb = encoding;
|
|
encoding = 'utf8';
|
|
}
|
|
if (!encoding)
|
|
encoding = 'utf8';
|
|
const fn = this[ASYNC] ? defer : nodefer;
|
|
// convert array buffers and typed array views into buffers
|
|
// at some point in the future, we may want to do the opposite!
|
|
// leave strings and buffers as-is
|
|
// anything is only allowed if in object mode, so throw
|
|
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
|
|
if (isArrayBufferView(chunk)) {
|
|
//@ts-ignore - sinful unsafe type changing
|
|
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
|
|
}
|
|
else if (isArrayBufferLike(chunk)) {
|
|
//@ts-ignore - sinful unsafe type changing
|
|
chunk = Buffer.from(chunk);
|
|
}
|
|
else if (typeof chunk !== 'string') {
|
|
throw new Error('Non-contiguous data written to non-objectMode stream');
|
|
}
|
|
}
|
|
// handle object mode up front, since it's simpler
|
|
// this yields better performance, fewer checks later.
|
|
if (this[OBJECTMODE]) {
|
|
// maybe impossible?
|
|
/* c8 ignore start */
|
|
if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
|
|
this[FLUSH](true);
|
|
/* c8 ignore stop */
|
|
if (this[FLOWING])
|
|
this.emit('data', chunk);
|
|
else
|
|
this[BUFFERPUSH](chunk);
|
|
if (this[BUFFERLENGTH] !== 0)
|
|
this.emit('readable');
|
|
if (cb)
|
|
fn(cb);
|
|
return this[FLOWING];
|
|
}
|
|
// at this point the chunk is a buffer or string
|
|
// don't buffer it up or send it to the decoder
|
|
if (!chunk.length) {
|
|
if (this[BUFFERLENGTH] !== 0)
|
|
this.emit('readable');
|
|
if (cb)
|
|
fn(cb);
|
|
return this[FLOWING];
|
|
}
|
|
// fast-path writing strings of same encoding to a stream with
|
|
// an empty buffer, skipping the buffer/decoder dance
|
|
if (typeof chunk === 'string' &&
|
|
// unless it is a string already ready for us to use
|
|
!(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
|
|
//@ts-ignore - sinful unsafe type change
|
|
chunk = Buffer.from(chunk, encoding);
|
|
}
|
|
if (Buffer.isBuffer(chunk) && this[ENCODING]) {
|
|
//@ts-ignore - sinful unsafe type change
|
|
chunk = this[DECODER].write(chunk);
|
|
}
|
|
// Note: flushing CAN potentially switch us into not-flowing mode
|
|
if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
|
|
this[FLUSH](true);
|
|
if (this[FLOWING])
|
|
this.emit('data', chunk);
|
|
else
|
|
this[BUFFERPUSH](chunk);
|
|
if (this[BUFFERLENGTH] !== 0)
|
|
this.emit('readable');
|
|
if (cb)
|
|
fn(cb);
|
|
return this[FLOWING];
|
|
}
|
|
/**
|
|
* Low-level explicit read method.
|
|
*
|
|
* In objectMode, the argument is ignored, and one item is returned if
|
|
* available.
|
|
*
|
|
* `n` is the number of bytes (or in the case of encoding streams,
|
|
* characters) to consume. If `n` is not provided, then the entire buffer
|
|
* is returned, or `null` is returned if no data is available.
|
|
*
|
|
* If `n` is greater that the amount of data in the internal buffer,
|
|
* then `null` is returned.
|
|
*/
|
|
read(n) {
|
|
if (this[DESTROYED])
|
|
return null;
|
|
this[DISCARDED] = false;
|
|
if (this[BUFFERLENGTH] === 0 ||
|
|
n === 0 ||
|
|
(n && n > this[BUFFERLENGTH])) {
|
|
this[MAYBE_EMIT_END]();
|
|
return null;
|
|
}
|
|
if (this[OBJECTMODE])
|
|
n = null;
|
|
if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
|
|
// not object mode, so if we have an encoding, then RType is string
|
|
// otherwise, must be Buffer
|
|
this[BUFFER] = [
|
|
(this[ENCODING]
|
|
? this[BUFFER].join('')
|
|
: Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
|
|
];
|
|
}
|
|
const ret = this[READ](n || null, this[BUFFER][0]);
|
|
this[MAYBE_EMIT_END]();
|
|
return ret;
|
|
}
|
|
[READ](n, chunk) {
|
|
if (this[OBJECTMODE])
|
|
this[BUFFERSHIFT]();
|
|
else {
|
|
const c = chunk;
|
|
if (n === c.length || n === null)
|
|
this[BUFFERSHIFT]();
|
|
else if (typeof c === 'string') {
|
|
this[BUFFER][0] = c.slice(n);
|
|
chunk = c.slice(0, n);
|
|
this[BUFFERLENGTH] -= n;
|
|
}
|
|
else {
|
|
this[BUFFER][0] = c.subarray(n);
|
|
chunk = c.subarray(0, n);
|
|
this[BUFFERLENGTH] -= n;
|
|
}
|
|
}
|
|
this.emit('data', chunk);
|
|
if (!this[BUFFER].length && !this[EOF])
|
|
this.emit('drain');
|
|
return chunk;
|
|
}
|
|
end(chunk, encoding, cb) {
|
|
if (typeof chunk === 'function') {
|
|
cb = chunk;
|
|
chunk = undefined;
|
|
}
|
|
if (typeof encoding === 'function') {
|
|
cb = encoding;
|
|
encoding = 'utf8';
|
|
}
|
|
if (chunk !== undefined)
|
|
this.write(chunk, encoding);
|
|
if (cb)
|
|
this.once('end', cb);
|
|
this[EOF] = true;
|
|
this.writable = false;
|
|
// if we haven't written anything, then go ahead and emit,
|
|
// even if we're not reading.
|
|
// we'll re-emit if a new 'end' listener is added anyway.
|
|
// This makes MP more suitable to write-only use cases.
|
|
if (this[FLOWING] || !this[PAUSED])
|
|
this[MAYBE_EMIT_END]();
|
|
return this;
|
|
}
|
|
// don't let the internal resume be overwritten
|
|
[RESUME]() {
|
|
if (this[DESTROYED])
|
|
return;
|
|
if (!this[DATALISTENERS] && !this[PIPES].length) {
|
|
this[DISCARDED] = true;
|
|
}
|
|
this[PAUSED] = false;
|
|
this[FLOWING] = true;
|
|
this.emit('resume');
|
|
if (this[BUFFER].length)
|
|
this[FLUSH]();
|
|
else if (this[EOF])
|
|
this[MAYBE_EMIT_END]();
|
|
else
|
|
this.emit('drain');
|
|
}
|
|
/**
|
|
* Resume the stream if it is currently in a paused state
|
|
*
|
|
* If called when there are no pipe destinations or `data` event listeners,
|
|
* this will place the stream in a "discarded" state, where all data will
|
|
* be thrown away. The discarded state is removed if a pipe destination or
|
|
* data handler is added, if pause() is called, or if any synchronous or
|
|
* asynchronous iteration is started.
|
|
*/
|
|
resume() {
|
|
return this[RESUME]();
|
|
}
|
|
/**
|
|
* Pause the stream
|
|
*/
|
|
pause() {
|
|
this[FLOWING] = false;
|
|
this[PAUSED] = true;
|
|
this[DISCARDED] = false;
|
|
}
|
|
/**
|
|
* true if the stream has been forcibly destroyed
|
|
*/
|
|
get destroyed() {
|
|
return this[DESTROYED];
|
|
}
|
|
/**
|
|
* true if the stream is currently in a flowing state, meaning that
|
|
* any writes will be immediately emitted.
|
|
*/
|
|
get flowing() {
|
|
return this[FLOWING];
|
|
}
|
|
/**
|
|
* true if the stream is currently in a paused state
|
|
*/
|
|
get paused() {
|
|
return this[PAUSED];
|
|
}
|
|
[BUFFERPUSH](chunk) {
|
|
if (this[OBJECTMODE])
|
|
this[BUFFERLENGTH] += 1;
|
|
else
|
|
this[BUFFERLENGTH] += chunk.length;
|
|
this[BUFFER].push(chunk);
|
|
}
|
|
[BUFFERSHIFT]() {
|
|
if (this[OBJECTMODE])
|
|
this[BUFFERLENGTH] -= 1;
|
|
else
|
|
this[BUFFERLENGTH] -= this[BUFFER][0].length;
|
|
return this[BUFFER].shift();
|
|
}
|
|
[FLUSH](noDrain = false) {
|
|
do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
|
|
this[BUFFER].length);
|
|
if (!noDrain && !this[BUFFER].length && !this[EOF])
|
|
this.emit('drain');
|
|
}
|
|
[FLUSHCHUNK](chunk) {
|
|
this.emit('data', chunk);
|
|
return this[FLOWING];
|
|
}
|
|
/**
|
|
* Pipe all data emitted by this stream into the destination provided.
|
|
*
|
|
* Triggers the flow of data.
|
|
*/
|
|
pipe(dest, opts) {
|
|
if (this[DESTROYED])
|
|
return dest;
|
|
this[DISCARDED] = false;
|
|
const ended = this[EMITTED_END];
|
|
opts = opts || {};
|
|
if (dest === proc.stdout || dest === proc.stderr)
|
|
opts.end = false;
|
|
else
|
|
opts.end = opts.end !== false;
|
|
opts.proxyErrors = !!opts.proxyErrors;
|
|
// piping an ended stream ends immediately
|
|
if (ended) {
|
|
if (opts.end)
|
|
dest.end();
|
|
}
|
|
else {
|
|
// "as" here just ignores the WType, which pipes don't care about,
|
|
// since they're only consuming from us, and writing to the dest
|
|
this[PIPES].push(!opts.proxyErrors
|
|
? new Pipe(this, dest, opts)
|
|
: new PipeProxyErrors(this, dest, opts));
|
|
if (this[ASYNC])
|
|
defer(() => this[RESUME]());
|
|
else
|
|
this[RESUME]();
|
|
}
|
|
return dest;
|
|
}
|
|
/**
|
|
* Fully unhook a piped destination stream.
|
|
*
|
|
* If the destination stream was the only consumer of this stream (ie,
|
|
* there are no other piped destinations or `'data'` event listeners)
|
|
* then the flow of data will stop until there is another consumer or
|
|
* {@link Minipass#resume} is explicitly called.
|
|
*/
|
|
unpipe(dest) {
|
|
const p = this[PIPES].find(p => p.dest === dest);
|
|
if (p) {
|
|
if (this[PIPES].length === 1) {
|
|
if (this[FLOWING] && this[DATALISTENERS] === 0) {
|
|
this[FLOWING] = false;
|
|
}
|
|
this[PIPES] = [];
|
|
}
|
|
else
|
|
this[PIPES].splice(this[PIPES].indexOf(p), 1);
|
|
p.unpipe();
|
|
}
|
|
}
|
|
/**
|
|
* Alias for {@link Minipass#on}
|
|
*/
|
|
addListener(ev, handler) {
|
|
return this.on(ev, handler);
|
|
}
|
|
/**
|
|
* Mostly identical to `EventEmitter.on`, with the following
|
|
* behavior differences to prevent data loss and unnecessary hangs:
|
|
*
|
|
* - Adding a 'data' event handler will trigger the flow of data
|
|
*
|
|
* - Adding a 'readable' event handler when there is data waiting to be read
|
|
* will cause 'readable' to be emitted immediately.
|
|
*
|
|
* - Adding an 'endish' event handler ('end', 'finish', etc.) which has
|
|
* already passed will cause the event to be emitted immediately and all
|
|
* handlers removed.
|
|
*
|
|
* - Adding an 'error' event handler after an error has been emitted will
|
|
* cause the event to be re-emitted immediately with the error previously
|
|
* raised.
|
|
*/
|
|
on(ev, handler) {
|
|
const ret = super.on(ev, handler);
|
|
if (ev === 'data') {
|
|
this[DISCARDED] = false;
|
|
this[DATALISTENERS]++;
|
|
if (!this[PIPES].length && !this[FLOWING]) {
|
|
this[RESUME]();
|
|
}
|
|
}
|
|
else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
|
|
super.emit('readable');
|
|
}
|
|
else if (isEndish(ev) && this[EMITTED_END]) {
|
|
super.emit(ev);
|
|
this.removeAllListeners(ev);
|
|
}
|
|
else if (ev === 'error' && this[EMITTED_ERROR]) {
|
|
const h = handler;
|
|
if (this[ASYNC])
|
|
defer(() => h.call(this, this[EMITTED_ERROR]));
|
|
else
|
|
h.call(this, this[EMITTED_ERROR]);
|
|
}
|
|
return ret;
|
|
}
|
|
/**
|
|
* Alias for {@link Minipass#off}
|
|
*/
|
|
removeListener(ev, handler) {
|
|
return this.off(ev, handler);
|
|
}
|
|
/**
|
|
* Mostly identical to `EventEmitter.off`
|
|
*
|
|
* If a 'data' event handler is removed, and it was the last consumer
|
|
* (ie, there are no pipe destinations or other 'data' event listeners),
|
|
* then the flow of data will stop until there is another consumer or
|
|
* {@link Minipass#resume} is explicitly called.
|
|
*/
|
|
off(ev, handler) {
|
|
const ret = super.off(ev, handler);
|
|
// if we previously had listeners, and now we don't, and we don't
|
|
// have any pipes, then stop the flow, unless it's been explicitly
|
|
// put in a discarded flowing state via stream.resume().
|
|
if (ev === 'data') {
|
|
this[DATALISTENERS] = this.listeners('data').length;
|
|
if (this[DATALISTENERS] === 0 &&
|
|
!this[DISCARDED] &&
|
|
!this[PIPES].length) {
|
|
this[FLOWING] = false;
|
|
}
|
|
}
|
|
return ret;
|
|
}
|
|
/**
|
|
* Mostly identical to `EventEmitter.removeAllListeners`
|
|
*
|
|
* If all 'data' event handlers are removed, and they were the last consumer
|
|
* (ie, there are no pipe destinations), then the flow of data will stop
|
|
* until there is another consumer or {@link Minipass#resume} is explicitly
|
|
* called.
|
|
*/
|
|
removeAllListeners(ev) {
|
|
const ret = super.removeAllListeners(ev);
|
|
if (ev === 'data' || ev === undefined) {
|
|
this[DATALISTENERS] = 0;
|
|
if (!this[DISCARDED] && !this[PIPES].length) {
|
|
this[FLOWING] = false;
|
|
}
|
|
}
|
|
return ret;
|
|
}
|
|
/**
|
|
* true if the 'end' event has been emitted
|
|
*/
|
|
get emittedEnd() {
|
|
return this[EMITTED_END];
|
|
}
|
|
[MAYBE_EMIT_END]() {
|
|
if (!this[EMITTING_END] &&
|
|
!this[EMITTED_END] &&
|
|
!this[DESTROYED] &&
|
|
this[BUFFER].length === 0 &&
|
|
this[EOF]) {
|
|
this[EMITTING_END] = true;
|
|
this.emit('end');
|
|
this.emit('prefinish');
|
|
this.emit('finish');
|
|
if (this[CLOSED])
|
|
this.emit('close');
|
|
this[EMITTING_END] = false;
|
|
}
|
|
}
|
|
/**
|
|
* Mostly identical to `EventEmitter.emit`, with the following
|
|
* behavior differences to prevent data loss and unnecessary hangs:
|
|
*
|
|
* If the stream has been destroyed, and the event is something other
|
|
* than 'close' or 'error', then `false` is returned and no handlers
|
|
* are called.
|
|
*
|
|
* If the event is 'end', and has already been emitted, then the event
|
|
* is ignored. If the stream is in a paused or non-flowing state, then
|
|
* the event will be deferred until data flow resumes. If the stream is
|
|
* async, then handlers will be called on the next tick rather than
|
|
* immediately.
|
|
*
|
|
* If the event is 'close', and 'end' has not yet been emitted, then
|
|
* the event will be deferred until after 'end' is emitted.
|
|
*
|
|
* If the event is 'error', and an AbortSignal was provided for the stream,
|
|
* and there are no listeners, then the event is ignored, matching the
|
|
* behavior of node core streams in the presense of an AbortSignal.
|
|
*
|
|
* If the event is 'finish' or 'prefinish', then all listeners will be
|
|
* removed after emitting the event, to prevent double-firing.
|
|
*/
|
|
emit(ev, ...args) {
|
|
const data = args[0];
|
|
// error and close are only events allowed after calling destroy()
|
|
if (ev !== 'error' &&
|
|
ev !== 'close' &&
|
|
ev !== DESTROYED &&
|
|
this[DESTROYED]) {
|
|
return false;
|
|
}
|
|
else if (ev === 'data') {
|
|
return !this[OBJECTMODE] && !data
|
|
? false
|
|
: this[ASYNC]
|
|
? (defer(() => this[EMITDATA](data)), true)
|
|
: this[EMITDATA](data);
|
|
}
|
|
else if (ev === 'end') {
|
|
return this[EMITEND]();
|
|
}
|
|
else if (ev === 'close') {
|
|
this[CLOSED] = true;
|
|
// don't emit close before 'end' and 'finish'
|
|
if (!this[EMITTED_END] && !this[DESTROYED])
|
|
return false;
|
|
const ret = super.emit('close');
|
|
this.removeAllListeners('close');
|
|
return ret;
|
|
}
|
|
else if (ev === 'error') {
|
|
this[EMITTED_ERROR] = data;
|
|
super.emit(ERROR, data);
|
|
const ret = !this[SIGNAL] || this.listeners('error').length
|
|
? super.emit('error', data)
|
|
: false;
|
|
this[MAYBE_EMIT_END]();
|
|
return ret;
|
|
}
|
|
else if (ev === 'resume') {
|
|
const ret = super.emit('resume');
|
|
this[MAYBE_EMIT_END]();
|
|
return ret;
|
|
}
|
|
else if (ev === 'finish' || ev === 'prefinish') {
|
|
const ret = super.emit(ev);
|
|
this.removeAllListeners(ev);
|
|
return ret;
|
|
}
|
|
// Some other unknown event
|
|
const ret = super.emit(ev, ...args);
|
|
this[MAYBE_EMIT_END]();
|
|
return ret;
|
|
}
|
|
[EMITDATA](data) {
|
|
for (const p of this[PIPES]) {
|
|
if (p.dest.write(data) === false)
|
|
this.pause();
|
|
}
|
|
const ret = this[DISCARDED] ? false : super.emit('data', data);
|
|
this[MAYBE_EMIT_END]();
|
|
return ret;
|
|
}
|
|
[EMITEND]() {
|
|
if (this[EMITTED_END])
|
|
return false;
|
|
this[EMITTED_END] = true;
|
|
this.readable = false;
|
|
return this[ASYNC]
|
|
? (defer(() => this[EMITEND2]()), true)
|
|
: this[EMITEND2]();
|
|
}
|
|
[EMITEND2]() {
|
|
if (this[DECODER]) {
|
|
const data = this[DECODER].end();
|
|
if (data) {
|
|
for (const p of this[PIPES]) {
|
|
p.dest.write(data);
|
|
}
|
|
if (!this[DISCARDED])
|
|
super.emit('data', data);
|
|
}
|
|
}
|
|
for (const p of this[PIPES]) {
|
|
p.end();
|
|
}
|
|
const ret = super.emit('end');
|
|
this.removeAllListeners('end');
|
|
return ret;
|
|
}
|
|
/**
|
|
* Return a Promise that resolves to an array of all emitted data once
|
|
* the stream ends.
|
|
*/
|
|
async collect() {
|
|
const buf = Object.assign([], {
|
|
dataLength: 0,
|
|
});
|
|
if (!this[OBJECTMODE])
|
|
buf.dataLength = 0;
|
|
// set the promise first, in case an error is raised
|
|
// by triggering the flow here.
|
|
const p = this.promise();
|
|
this.on('data', c => {
|
|
buf.push(c);
|
|
if (!this[OBJECTMODE])
|
|
buf.dataLength += c.length;
|
|
});
|
|
await p;
|
|
return buf;
|
|
}
|
|
/**
|
|
* Return a Promise that resolves to the concatenation of all emitted data
|
|
* once the stream ends.
|
|
*
|
|
* Not allowed on objectMode streams.
|
|
*/
|
|
async concat() {
|
|
if (this[OBJECTMODE]) {
|
|
throw new Error('cannot concat in objectMode');
|
|
}
|
|
const buf = await this.collect();
|
|
return this[ENCODING]
|
|
? buf.join('')
|
|
: Buffer.concat(buf, buf.dataLength);
|
|
}
|
|
/**
|
|
* Return a void Promise that resolves once the stream ends.
|
|
*/
|
|
async promise() {
|
|
return new Promise((resolve, reject) => {
|
|
this.on(DESTROYED, () => reject(new Error('stream destroyed')));
|
|
this.on('error', er => reject(er));
|
|
this.on('end', () => resolve());
|
|
});
|
|
}
|
|
/**
|
|
* Asynchronous `for await of` iteration.
|
|
*
|
|
* This will continue emitting all chunks until the stream terminates.
|
|
*/
|
|
[Symbol.asyncIterator]() {
|
|
// set this up front, in case the consumer doesn't call next()
|
|
// right away.
|
|
this[DISCARDED] = false;
|
|
let stopped = false;
|
|
const stop = async () => {
|
|
this.pause();
|
|
stopped = true;
|
|
return { value: undefined, done: true };
|
|
};
|
|
const next = () => {
|
|
if (stopped)
|
|
return stop();
|
|
const res = this.read();
|
|
if (res !== null)
|
|
return Promise.resolve({ done: false, value: res });
|
|
if (this[EOF])
|
|
return stop();
|
|
let resolve;
|
|
let reject;
|
|
const onerr = (er) => {
|
|
this.off('data', ondata);
|
|
this.off('end', onend);
|
|
this.off(DESTROYED, ondestroy);
|
|
stop();
|
|
reject(er);
|
|
};
|
|
const ondata = (value) => {
|
|
this.off('error', onerr);
|
|
this.off('end', onend);
|
|
this.off(DESTROYED, ondestroy);
|
|
this.pause();
|
|
resolve({ value, done: !!this[EOF] });
|
|
};
|
|
const onend = () => {
|
|
this.off('error', onerr);
|
|
this.off('data', ondata);
|
|
this.off(DESTROYED, ondestroy);
|
|
stop();
|
|
resolve({ done: true, value: undefined });
|
|
};
|
|
const ondestroy = () => onerr(new Error('stream destroyed'));
|
|
return new Promise((res, rej) => {
|
|
reject = rej;
|
|
resolve = res;
|
|
this.once(DESTROYED, ondestroy);
|
|
this.once('error', onerr);
|
|
this.once('end', onend);
|
|
this.once('data', ondata);
|
|
});
|
|
};
|
|
return {
|
|
next,
|
|
throw: stop,
|
|
return: stop,
|
|
[Symbol.asyncIterator]() {
|
|
return this;
|
|
},
|
|
};
|
|
}
|
|
/**
|
|
* Synchronous `for of` iteration.
|
|
*
|
|
* The iteration will terminate when the internal buffer runs out, even
|
|
* if the stream has not yet terminated.
|
|
*/
|
|
[Symbol.iterator]() {
|
|
// set this up front, in case the consumer doesn't call next()
|
|
// right away.
|
|
this[DISCARDED] = false;
|
|
let stopped = false;
|
|
const stop = () => {
|
|
this.pause();
|
|
this.off(ERROR, stop);
|
|
this.off(DESTROYED, stop);
|
|
this.off('end', stop);
|
|
stopped = true;
|
|
return { done: true, value: undefined };
|
|
};
|
|
const next = () => {
|
|
if (stopped)
|
|
return stop();
|
|
const value = this.read();
|
|
return value === null ? stop() : { done: false, value };
|
|
};
|
|
this.once('end', stop);
|
|
this.once(ERROR, stop);
|
|
this.once(DESTROYED, stop);
|
|
return {
|
|
next,
|
|
throw: stop,
|
|
return: stop,
|
|
[Symbol.iterator]() {
|
|
return this;
|
|
},
|
|
};
|
|
}
|
|
/**
|
|
* Destroy a stream, preventing it from being used for any further purpose.
|
|
*
|
|
* If the stream has a `close()` method, then it will be called on
|
|
* destruction.
|
|
*
|
|
* After destruction, any attempt to write data, read data, or emit most
|
|
* events will be ignored.
|
|
*
|
|
* If an error argument is provided, then it will be emitted in an
|
|
* 'error' event.
|
|
*/
|
|
destroy(er) {
|
|
if (this[DESTROYED]) {
|
|
if (er)
|
|
this.emit('error', er);
|
|
else
|
|
this.emit(DESTROYED);
|
|
return this;
|
|
}
|
|
this[DESTROYED] = true;
|
|
this[DISCARDED] = true;
|
|
// throw away all buffered data, it's never coming out
|
|
this[BUFFER].length = 0;
|
|
this[BUFFERLENGTH] = 0;
|
|
const wc = this;
|
|
if (typeof wc.close === 'function' && !this[CLOSED])
|
|
wc.close();
|
|
if (er)
|
|
this.emit('error', er);
|
|
// if no error to emit, still reject pending promises
|
|
else
|
|
this.emit(DESTROYED);
|
|
return this;
|
|
}
|
|
/**
|
|
* Alias for {@link isStream}
|
|
*
|
|
* Former export location, maintained for backwards compatibility.
|
|
*
|
|
* @deprecated
|
|
*/
|
|
static get isStream() {
|
|
return exports.isStream;
|
|
}
|
|
}
|
|
exports.Minipass = Minipass;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9569:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
}
|
|
Object.defineProperty(o, k2, desc);
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.PathScurry = exports.Path = exports.PathScurryDarwin = exports.PathScurryPosix = exports.PathScurryWin32 = exports.PathScurryBase = exports.PathPosix = exports.PathWin32 = exports.PathBase = exports.ChildrenCache = exports.ResolveCache = void 0;
|
|
const lru_cache_1 = __nccwpck_require__(7433);
|
|
const path_1 = __nccwpck_require__(1017);
|
|
const url_1 = __nccwpck_require__(7310);
|
|
const actualFS = __importStar(__nccwpck_require__(7147));
|
|
const fs_1 = __nccwpck_require__(7147);
|
|
const realpathSync = fs_1.realpathSync.native;
|
|
// TODO: test perf of fs/promises realpath vs realpathCB,
|
|
// since the promises one uses realpath.native
|
|
const promises_1 = __nccwpck_require__(3292);
|
|
const minipass_1 = __nccwpck_require__(8865);
|
|
const defaultFS = {
|
|
lstatSync: fs_1.lstatSync,
|
|
readdir: fs_1.readdir,
|
|
readdirSync: fs_1.readdirSync,
|
|
readlinkSync: fs_1.readlinkSync,
|
|
realpathSync,
|
|
promises: {
|
|
lstat: promises_1.lstat,
|
|
readdir: promises_1.readdir,
|
|
readlink: promises_1.readlink,
|
|
realpath: promises_1.realpath,
|
|
},
|
|
};
|
|
// if they just gave us require('fs') then use our default
|
|
const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS
|
|
? defaultFS
|
|
: {
|
|
...defaultFS,
|
|
...fsOption,
|
|
promises: {
|
|
...defaultFS.promises,
|
|
...(fsOption.promises || {}),
|
|
},
|
|
};
|
|
// turn something like //?/c:/ into c:\
|
|
const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
|
|
const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
|
|
// windows paths are separated by either / or \
|
|
const eitherSep = /[\\\/]/;
|
|
const UNKNOWN = 0; // may not even exist, for all we know
|
|
const IFIFO = 0b0001;
|
|
const IFCHR = 0b0010;
|
|
const IFDIR = 0b0100;
|
|
const IFBLK = 0b0110;
|
|
const IFREG = 0b1000;
|
|
const IFLNK = 0b1010;
|
|
const IFSOCK = 0b1100;
|
|
const IFMT = 0b1111;
|
|
// mask to unset low 4 bits
|
|
const IFMT_UNKNOWN = ~IFMT;
|
|
// set after successfully calling readdir() and getting entries.
|
|
const READDIR_CALLED = 16;
|
|
// set after a successful lstat()
|
|
const LSTAT_CALLED = 32;
|
|
// set if an entry (or one of its parents) is definitely not a dir
|
|
const ENOTDIR = 64;
|
|
// set if an entry (or one of its parents) does not exist
|
|
// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
|
|
const ENOENT = 128;
|
|
// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
|
|
// set if we fail to readlink
|
|
const ENOREADLINK = 256;
|
|
// set if we know realpath() will fail
|
|
const ENOREALPATH = 512;
|
|
const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
|
|
const TYPEMASK = 1023;
|
|
const entToType = (s) => s.isFile()
|
|
? IFREG
|
|
: s.isDirectory()
|
|
? IFDIR
|
|
: s.isSymbolicLink()
|
|
? IFLNK
|
|
: s.isCharacterDevice()
|
|
? IFCHR
|
|
: s.isBlockDevice()
|
|
? IFBLK
|
|
: s.isSocket()
|
|
? IFSOCK
|
|
: s.isFIFO()
|
|
? IFIFO
|
|
: UNKNOWN;
|
|
// normalize unicode path names
|
|
const normalizeCache = new Map();
|
|
const normalize = (s) => {
|
|
const c = normalizeCache.get(s);
|
|
if (c)
|
|
return c;
|
|
const n = s.normalize('NFKD');
|
|
normalizeCache.set(s, n);
|
|
return n;
|
|
};
|
|
const normalizeNocaseCache = new Map();
|
|
const normalizeNocase = (s) => {
|
|
const c = normalizeNocaseCache.get(s);
|
|
if (c)
|
|
return c;
|
|
const n = normalize(s.toLowerCase());
|
|
normalizeNocaseCache.set(s, n);
|
|
return n;
|
|
};
|
|
/**
|
|
* An LRUCache for storing resolved path strings or Path objects.
|
|
* @internal
|
|
*/
|
|
class ResolveCache extends lru_cache_1.LRUCache {
|
|
constructor() {
|
|
super({ max: 256 });
|
|
}
|
|
}
|
|
exports.ResolveCache = ResolveCache;
|
|
// In order to prevent blowing out the js heap by allocating hundreds of
|
|
// thousands of Path entries when walking extremely large trees, the "children"
|
|
// in this tree are represented by storing an array of Path entries in an
|
|
// LRUCache, indexed by the parent. At any time, Path.children() may return an
|
|
// empty array, indicating that it doesn't know about any of its children, and
|
|
// thus has to rebuild that cache. This is fine, it just means that we don't
|
|
// benefit as much from having the cached entries, but huge directory walks
|
|
// don't blow out the stack, and smaller ones are still as fast as possible.
|
|
//
|
|
//It does impose some complexity when building up the readdir data, because we
|
|
//need to pass a reference to the children array that we started with.
|
|
/**
|
|
* an LRUCache for storing child entries.
|
|
* @internal
|
|
*/
|
|
class ChildrenCache extends lru_cache_1.LRUCache {
|
|
constructor(maxSize = 16 * 1024) {
|
|
super({
|
|
maxSize,
|
|
// parent + children
|
|
sizeCalculation: a => a.length + 1,
|
|
});
|
|
}
|
|
}
|
|
exports.ChildrenCache = ChildrenCache;
|
|
const setAsCwd = Symbol('PathScurry setAsCwd');
|
|
/**
|
|
* Path objects are sort of like a super-powered
|
|
* {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
|
|
*
|
|
* Each one represents a single filesystem entry on disk, which may or may not
|
|
* exist. It includes methods for reading various types of information via
|
|
* lstat, readlink, and readdir, and caches all information to the greatest
|
|
* degree possible.
|
|
*
|
|
* Note that fs operations that would normally throw will instead return an
|
|
* "empty" value. This is in order to prevent excessive overhead from error
|
|
* stack traces.
|
|
*/
|
|
class PathBase {
|
|
/**
|
|
* the basename of this path
|
|
*
|
|
* **Important**: *always* test the path name against any test string
|
|
* usingthe {@link isNamed} method, and not by directly comparing this
|
|
* string. Otherwise, unicode path strings that the system sees as identical
|
|
* will not be properly treated as the same path, leading to incorrect
|
|
* behavior and possible security issues.
|
|
*/
|
|
name;
|
|
/**
|
|
* the Path entry corresponding to the path root.
|
|
*
|
|
* @internal
|
|
*/
|
|
root;
|
|
/**
|
|
* All roots found within the current PathScurry family
|
|
*
|
|
* @internal
|
|
*/
|
|
roots;
|
|
/**
|
|
* a reference to the parent path, or undefined in the case of root entries
|
|
*
|
|
* @internal
|
|
*/
|
|
parent;
|
|
/**
|
|
* boolean indicating whether paths are compared case-insensitively
|
|
* @internal
|
|
*/
|
|
nocase;
|
|
// potential default fs override
|
|
#fs;
|
|
// Stats fields
|
|
#dev;
|
|
get dev() {
|
|
return this.#dev;
|
|
}
|
|
#mode;
|
|
get mode() {
|
|
return this.#mode;
|
|
}
|
|
#nlink;
|
|
get nlink() {
|
|
return this.#nlink;
|
|
}
|
|
#uid;
|
|
get uid() {
|
|
return this.#uid;
|
|
}
|
|
#gid;
|
|
get gid() {
|
|
return this.#gid;
|
|
}
|
|
#rdev;
|
|
get rdev() {
|
|
return this.#rdev;
|
|
}
|
|
#blksize;
|
|
get blksize() {
|
|
return this.#blksize;
|
|
}
|
|
#ino;
|
|
get ino() {
|
|
return this.#ino;
|
|
}
|
|
#size;
|
|
get size() {
|
|
return this.#size;
|
|
}
|
|
#blocks;
|
|
get blocks() {
|
|
return this.#blocks;
|
|
}
|
|
#atimeMs;
|
|
get atimeMs() {
|
|
return this.#atimeMs;
|
|
}
|
|
#mtimeMs;
|
|
get mtimeMs() {
|
|
return this.#mtimeMs;
|
|
}
|
|
#ctimeMs;
|
|
get ctimeMs() {
|
|
return this.#ctimeMs;
|
|
}
|
|
#birthtimeMs;
|
|
get birthtimeMs() {
|
|
return this.#birthtimeMs;
|
|
}
|
|
#atime;
|
|
get atime() {
|
|
return this.#atime;
|
|
}
|
|
#mtime;
|
|
get mtime() {
|
|
return this.#mtime;
|
|
}
|
|
#ctime;
|
|
get ctime() {
|
|
return this.#ctime;
|
|
}
|
|
#birthtime;
|
|
get birthtime() {
|
|
return this.#birthtime;
|
|
}
|
|
#matchName;
|
|
#depth;
|
|
#fullpath;
|
|
#fullpathPosix;
|
|
#relative;
|
|
#relativePosix;
|
|
#type;
|
|
#children;
|
|
#linkTarget;
|
|
#realpath;
|
|
/**
|
|
* This property is for compatibility with the Dirent class as of
|
|
* Node v20, where Dirent['path'] refers to the path of the directory
|
|
* that was passed to readdir. So, somewhat counterintuitively, this
|
|
* property refers to the *parent* path, not the path object itself.
|
|
* For root entries, it's the path to the entry itself.
|
|
*/
|
|
get path() {
|
|
return (this.parent || this).fullpath();
|
|
}
|
|
/**
|
|
* Do not create new Path objects directly. They should always be accessed
|
|
* via the PathScurry class or other methods on the Path class.
|
|
*
|
|
* @internal
|
|
*/
|
|
constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
|
|
this.name = name;
|
|
this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
|
|
this.#type = type & TYPEMASK;
|
|
this.nocase = nocase;
|
|
this.roots = roots;
|
|
this.root = root || this;
|
|
this.#children = children;
|
|
this.#fullpath = opts.fullpath;
|
|
this.#relative = opts.relative;
|
|
this.#relativePosix = opts.relativePosix;
|
|
this.parent = opts.parent;
|
|
if (this.parent) {
|
|
this.#fs = this.parent.#fs;
|
|
}
|
|
else {
|
|
this.#fs = fsFromOption(opts.fs);
|
|
}
|
|
}
|
|
/**
|
|
* Returns the depth of the Path object from its root.
|
|
*
|
|
* For example, a path at `/foo/bar` would have a depth of 2.
|
|
*/
|
|
depth() {
|
|
if (this.#depth !== undefined)
|
|
return this.#depth;
|
|
if (!this.parent)
|
|
return (this.#depth = 0);
|
|
return (this.#depth = this.parent.depth() + 1);
|
|
}
|
|
/**
|
|
* @internal
|
|
*/
|
|
childrenCache() {
|
|
return this.#children;
|
|
}
|
|
/**
|
|
* Get the Path object referenced by the string path, resolved from this Path
|
|
*/
|
|
resolve(path) {
|
|
if (!path) {
|
|
return this;
|
|
}
|
|
const rootPath = this.getRootString(path);
|
|
const dir = path.substring(rootPath.length);
|
|
const dirParts = dir.split(this.splitSep);
|
|
const result = rootPath
|
|
? this.getRoot(rootPath).#resolveParts(dirParts)
|
|
: this.#resolveParts(dirParts);
|
|
return result;
|
|
}
|
|
#resolveParts(dirParts) {
|
|
let p = this;
|
|
for (const part of dirParts) {
|
|
p = p.child(part);
|
|
}
|
|
return p;
|
|
}
|
|
/**
|
|
* Returns the cached children Path objects, if still available. If they
|
|
* have fallen out of the cache, then returns an empty array, and resets the
|
|
* READDIR_CALLED bit, so that future calls to readdir() will require an fs
|
|
* lookup.
|
|
*
|
|
* @internal
|
|
*/
|
|
children() {
|
|
const cached = this.#children.get(this);
|
|
if (cached) {
|
|
return cached;
|
|
}
|
|
const children = Object.assign([], { provisional: 0 });
|
|
this.#children.set(this, children);
|
|
this.#type &= ~READDIR_CALLED;
|
|
return children;
|
|
}
|
|
/**
|
|
* Resolves a path portion and returns or creates the child Path.
|
|
*
|
|
* Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
|
|
* `'..'`.
|
|
*
|
|
* This should not be called directly. If `pathPart` contains any path
|
|
* separators, it will lead to unsafe undefined behavior.
|
|
*
|
|
* Use `Path.resolve()` instead.
|
|
*
|
|
* @internal
|
|
*/
|
|
child(pathPart, opts) {
|
|
if (pathPart === '' || pathPart === '.') {
|
|
return this;
|
|
}
|
|
if (pathPart === '..') {
|
|
return this.parent || this;
|
|
}
|
|
// find the child
|
|
const children = this.children();
|
|
const name = this.nocase
|
|
? normalizeNocase(pathPart)
|
|
: normalize(pathPart);
|
|
for (const p of children) {
|
|
if (p.#matchName === name) {
|
|
return p;
|
|
}
|
|
}
|
|
// didn't find it, create provisional child, since it might not
|
|
// actually exist. If we know the parent isn't a dir, then
|
|
// in fact it CAN'T exist.
|
|
const s = this.parent ? this.sep : '';
|
|
const fullpath = this.#fullpath
|
|
? this.#fullpath + s + pathPart
|
|
: undefined;
|
|
const pchild = this.newChild(pathPart, UNKNOWN, {
|
|
...opts,
|
|
parent: this,
|
|
fullpath,
|
|
});
|
|
if (!this.canReaddir()) {
|
|
pchild.#type |= ENOENT;
|
|
}
|
|
// don't have to update provisional, because if we have real children,
|
|
// then provisional is set to children.length, otherwise a lower number
|
|
children.push(pchild);
|
|
return pchild;
|
|
}
|
|
/**
|
|
* The relative path from the cwd. If it does not share an ancestor with
|
|
* the cwd, then this ends up being equivalent to the fullpath()
|
|
*/
|
|
relative() {
|
|
if (this.#relative !== undefined) {
|
|
return this.#relative;
|
|
}
|
|
const name = this.name;
|
|
const p = this.parent;
|
|
if (!p) {
|
|
return (this.#relative = this.name);
|
|
}
|
|
const pv = p.relative();
|
|
return pv + (!pv || !p.parent ? '' : this.sep) + name;
|
|
}
|
|
/**
|
|
* The relative path from the cwd, using / as the path separator.
|
|
* If it does not share an ancestor with
|
|
* the cwd, then this ends up being equivalent to the fullpathPosix()
|
|
* On posix systems, this is identical to relative().
|
|
*/
|
|
relativePosix() {
|
|
if (this.sep === '/')
|
|
return this.relative();
|
|
if (this.#relativePosix !== undefined)
|
|
return this.#relativePosix;
|
|
const name = this.name;
|
|
const p = this.parent;
|
|
if (!p) {
|
|
return (this.#relativePosix = this.fullpathPosix());
|
|
}
|
|
const pv = p.relativePosix();
|
|
return pv + (!pv || !p.parent ? '' : '/') + name;
|
|
}
|
|
/**
|
|
* The fully resolved path string for this Path entry
|
|
*/
|
|
fullpath() {
|
|
if (this.#fullpath !== undefined) {
|
|
return this.#fullpath;
|
|
}
|
|
const name = this.name;
|
|
const p = this.parent;
|
|
if (!p) {
|
|
return (this.#fullpath = this.name);
|
|
}
|
|
const pv = p.fullpath();
|
|
const fp = pv + (!p.parent ? '' : this.sep) + name;
|
|
return (this.#fullpath = fp);
|
|
}
|
|
/**
|
|
* On platforms other than windows, this is identical to fullpath.
|
|
*
|
|
* On windows, this is overridden to return the forward-slash form of the
|
|
* full UNC path.
|
|
*/
|
|
fullpathPosix() {
|
|
if (this.#fullpathPosix !== undefined)
|
|
return this.#fullpathPosix;
|
|
if (this.sep === '/')
|
|
return (this.#fullpathPosix = this.fullpath());
|
|
if (!this.parent) {
|
|
const p = this.fullpath().replace(/\\/g, '/');
|
|
if (/^[a-z]:\//i.test(p)) {
|
|
return (this.#fullpathPosix = `//?/${p}`);
|
|
}
|
|
else {
|
|
return (this.#fullpathPosix = p);
|
|
}
|
|
}
|
|
const p = this.parent;
|
|
const pfpp = p.fullpathPosix();
|
|
const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
|
|
return (this.#fullpathPosix = fpp);
|
|
}
|
|
/**
|
|
* Is the Path of an unknown type?
|
|
*
|
|
* Note that we might know *something* about it if there has been a previous
|
|
* filesystem operation, for example that it does not exist, or is not a
|
|
* link, or whether it has child entries.
|
|
*/
|
|
isUnknown() {
|
|
return (this.#type & IFMT) === UNKNOWN;
|
|
}
|
|
isType(type) {
|
|
return this[`is${type}`]();
|
|
}
|
|
getType() {
|
|
return this.isUnknown()
|
|
? 'Unknown'
|
|
: this.isDirectory()
|
|
? 'Directory'
|
|
: this.isFile()
|
|
? 'File'
|
|
: this.isSymbolicLink()
|
|
? 'SymbolicLink'
|
|
: this.isFIFO()
|
|
? 'FIFO'
|
|
: this.isCharacterDevice()
|
|
? 'CharacterDevice'
|
|
: this.isBlockDevice()
|
|
? 'BlockDevice'
|
|
: /* c8 ignore start */ this.isSocket()
|
|
? 'Socket'
|
|
: 'Unknown';
|
|
/* c8 ignore stop */
|
|
}
|
|
/**
|
|
* Is the Path a regular file?
|
|
*/
|
|
isFile() {
|
|
return (this.#type & IFMT) === IFREG;
|
|
}
|
|
/**
|
|
* Is the Path a directory?
|
|
*/
|
|
isDirectory() {
|
|
return (this.#type & IFMT) === IFDIR;
|
|
}
|
|
/**
|
|
* Is the path a character device?
|
|
*/
|
|
isCharacterDevice() {
|
|
return (this.#type & IFMT) === IFCHR;
|
|
}
|
|
/**
|
|
* Is the path a block device?
|
|
*/
|
|
isBlockDevice() {
|
|
return (this.#type & IFMT) === IFBLK;
|
|
}
|
|
/**
|
|
* Is the path a FIFO pipe?
|
|
*/
|
|
isFIFO() {
|
|
return (this.#type & IFMT) === IFIFO;
|
|
}
|
|
/**
|
|
* Is the path a socket?
|
|
*/
|
|
isSocket() {
|
|
return (this.#type & IFMT) === IFSOCK;
|
|
}
|
|
/**
|
|
* Is the path a symbolic link?
|
|
*/
|
|
isSymbolicLink() {
|
|
return (this.#type & IFLNK) === IFLNK;
|
|
}
|
|
/**
|
|
* Return the entry if it has been subject of a successful lstat, or
|
|
* undefined otherwise.
|
|
*
|
|
* Does not read the filesystem, so an undefined result *could* simply
|
|
* mean that we haven't called lstat on it.
|
|
*/
|
|
lstatCached() {
|
|
return this.#type & LSTAT_CALLED ? this : undefined;
|
|
}
|
|
/**
|
|
* Return the cached link target if the entry has been the subject of a
|
|
* successful readlink, or undefined otherwise.
|
|
*
|
|
* Does not read the filesystem, so an undefined result *could* just mean we
|
|
* don't have any cached data. Only use it if you are very sure that a
|
|
* readlink() has been called at some point.
|
|
*/
|
|
readlinkCached() {
|
|
return this.#linkTarget;
|
|
}
|
|
/**
|
|
* Returns the cached realpath target if the entry has been the subject
|
|
* of a successful realpath, or undefined otherwise.
|
|
*
|
|
* Does not read the filesystem, so an undefined result *could* just mean we
|
|
* don't have any cached data. Only use it if you are very sure that a
|
|
* realpath() has been called at some point.
|
|
*/
|
|
realpathCached() {
|
|
return this.#realpath;
|
|
}
|
|
/**
|
|
* Returns the cached child Path entries array if the entry has been the
|
|
* subject of a successful readdir(), or [] otherwise.
|
|
*
|
|
* Does not read the filesystem, so an empty array *could* just mean we
|
|
* don't have any cached data. Only use it if you are very sure that a
|
|
* readdir() has been called recently enough to still be valid.
|
|
*/
|
|
readdirCached() {
|
|
const children = this.children();
|
|
return children.slice(0, children.provisional);
|
|
}
|
|
/**
|
|
* Return true if it's worth trying to readlink. Ie, we don't (yet) have
|
|
* any indication that readlink will definitely fail.
|
|
*
|
|
* Returns false if the path is known to not be a symlink, if a previous
|
|
* readlink failed, or if the entry does not exist.
|
|
*/
|
|
canReadlink() {
|
|
if (this.#linkTarget)
|
|
return true;
|
|
if (!this.parent)
|
|
return false;
|
|
// cases where it cannot possibly succeed
|
|
const ifmt = this.#type & IFMT;
|
|
return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
|
|
this.#type & ENOREADLINK ||
|
|
this.#type & ENOENT);
|
|
}
|
|
/**
|
|
* Return true if readdir has previously been successfully called on this
|
|
* path, indicating that cachedReaddir() is likely valid.
|
|
*/
|
|
calledReaddir() {
|
|
return !!(this.#type & READDIR_CALLED);
|
|
}
|
|
/**
|
|
* Returns true if the path is known to not exist. That is, a previous lstat
|
|
* or readdir failed to verify its existence when that would have been
|
|
* expected, or a parent entry was marked either enoent or enotdir.
|
|
*/
|
|
isENOENT() {
|
|
return !!(this.#type & ENOENT);
|
|
}
|
|
/**
|
|
* Return true if the path is a match for the given path name. This handles
|
|
* case sensitivity and unicode normalization.
|
|
*
|
|
* Note: even on case-sensitive systems, it is **not** safe to test the
|
|
* equality of the `.name` property to determine whether a given pathname
|
|
* matches, due to unicode normalization mismatches.
|
|
*
|
|
* Always use this method instead of testing the `path.name` property
|
|
* directly.
|
|
*/
|
|
isNamed(n) {
|
|
return !this.nocase
|
|
? this.#matchName === normalize(n)
|
|
: this.#matchName === normalizeNocase(n);
|
|
}
|
|
/**
|
|
* Return the Path object corresponding to the target of a symbolic link.
|
|
*
|
|
* If the Path is not a symbolic link, or if the readlink call fails for any
|
|
* reason, `undefined` is returned.
|
|
*
|
|
* Result is cached, and thus may be outdated if the filesystem is mutated.
|
|
*/
|
|
async readlink() {
|
|
const target = this.#linkTarget;
|
|
if (target) {
|
|
return target;
|
|
}
|
|
if (!this.canReadlink()) {
|
|
return undefined;
|
|
}
|
|
/* c8 ignore start */
|
|
// already covered by the canReadlink test, here for ts grumples
|
|
if (!this.parent) {
|
|
return undefined;
|
|
}
|
|
/* c8 ignore stop */
|
|
try {
|
|
const read = await this.#fs.promises.readlink(this.fullpath());
|
|
const linkTarget = this.parent.resolve(read);
|
|
if (linkTarget) {
|
|
return (this.#linkTarget = linkTarget);
|
|
}
|
|
}
|
|
catch (er) {
|
|
this.#readlinkFail(er.code);
|
|
return undefined;
|
|
}
|
|
}
|
|
/**
|
|
* Synchronous {@link PathBase.readlink}
|
|
*/
|
|
readlinkSync() {
|
|
const target = this.#linkTarget;
|
|
if (target) {
|
|
return target;
|
|
}
|
|
if (!this.canReadlink()) {
|
|
return undefined;
|
|
}
|
|
/* c8 ignore start */
|
|
// already covered by the canReadlink test, here for ts grumples
|
|
if (!this.parent) {
|
|
return undefined;
|
|
}
|
|
/* c8 ignore stop */
|
|
try {
|
|
const read = this.#fs.readlinkSync(this.fullpath());
|
|
const linkTarget = this.parent.resolve(read);
|
|
if (linkTarget) {
|
|
return (this.#linkTarget = linkTarget);
|
|
}
|
|
}
|
|
catch (er) {
|
|
this.#readlinkFail(er.code);
|
|
return undefined;
|
|
}
|
|
}
|
|
#readdirSuccess(children) {
|
|
// succeeded, mark readdir called bit
|
|
this.#type |= READDIR_CALLED;
|
|
// mark all remaining provisional children as ENOENT
|
|
for (let p = children.provisional; p < children.length; p++) {
|
|
children[p].#markENOENT();
|
|
}
|
|
}
|
|
#markENOENT() {
|
|
// mark as UNKNOWN and ENOENT
|
|
if (this.#type & ENOENT)
|
|
return;
|
|
this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
|
|
this.#markChildrenENOENT();
|
|
}
|
|
#markChildrenENOENT() {
|
|
// all children are provisional and do not exist
|
|
const children = this.children();
|
|
children.provisional = 0;
|
|
for (const p of children) {
|
|
p.#markENOENT();
|
|
}
|
|
}
|
|
#markENOREALPATH() {
|
|
this.#type |= ENOREALPATH;
|
|
this.#markENOTDIR();
|
|
}
|
|
// save the information when we know the entry is not a dir
|
|
#markENOTDIR() {
|
|
// entry is not a directory, so any children can't exist.
|
|
// this *should* be impossible, since any children created
|
|
// after it's been marked ENOTDIR should be marked ENOENT,
|
|
// so it won't even get to this point.
|
|
/* c8 ignore start */
|
|
if (this.#type & ENOTDIR)
|
|
return;
|
|
/* c8 ignore stop */
|
|
let t = this.#type;
|
|
// this could happen if we stat a dir, then delete it,
|
|
// then try to read it or one of its children.
|
|
if ((t & IFMT) === IFDIR)
|
|
t &= IFMT_UNKNOWN;
|
|
this.#type = t | ENOTDIR;
|
|
this.#markChildrenENOENT();
|
|
}
|
|
#readdirFail(code = '') {
|
|
// markENOTDIR and markENOENT also set provisional=0
|
|
if (code === 'ENOTDIR' || code === 'EPERM') {
|
|
this.#markENOTDIR();
|
|
}
|
|
else if (code === 'ENOENT') {
|
|
this.#markENOENT();
|
|
}
|
|
else {
|
|
this.children().provisional = 0;
|
|
}
|
|
}
|
|
#lstatFail(code = '') {
|
|
// Windows just raises ENOENT in this case, disable for win CI
|
|
/* c8 ignore start */
|
|
if (code === 'ENOTDIR') {
|
|
// already know it has a parent by this point
|
|
const p = this.parent;
|
|
p.#markENOTDIR();
|
|
}
|
|
else if (code === 'ENOENT') {
|
|
/* c8 ignore stop */
|
|
this.#markENOENT();
|
|
}
|
|
}
|
|
#readlinkFail(code = '') {
|
|
let ter = this.#type;
|
|
ter |= ENOREADLINK;
|
|
if (code === 'ENOENT')
|
|
ter |= ENOENT;
|
|
// windows gets a weird error when you try to readlink a file
|
|
if (code === 'EINVAL' || code === 'UNKNOWN') {
|
|
// exists, but not a symlink, we don't know WHAT it is, so remove
|
|
// all IFMT bits.
|
|
ter &= IFMT_UNKNOWN;
|
|
}
|
|
this.#type = ter;
|
|
// windows just gets ENOENT in this case. We do cover the case,
|
|
// just disabled because it's impossible on Windows CI
|
|
/* c8 ignore start */
|
|
if (code === 'ENOTDIR' && this.parent) {
|
|
this.parent.#markENOTDIR();
|
|
}
|
|
/* c8 ignore stop */
|
|
}
|
|
#readdirAddChild(e, c) {
|
|
return (this.#readdirMaybePromoteChild(e, c) ||
|
|
this.#readdirAddNewChild(e, c));
|
|
}
|
|
#readdirAddNewChild(e, c) {
|
|
// alloc new entry at head, so it's never provisional
|
|
const type = entToType(e);
|
|
const child = this.newChild(e.name, type, { parent: this });
|
|
const ifmt = child.#type & IFMT;
|
|
if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
|
|
child.#type |= ENOTDIR;
|
|
}
|
|
c.unshift(child);
|
|
c.provisional++;
|
|
return child;
|
|
}
|
|
#readdirMaybePromoteChild(e, c) {
|
|
for (let p = c.provisional; p < c.length; p++) {
|
|
const pchild = c[p];
|
|
const name = this.nocase
|
|
? normalizeNocase(e.name)
|
|
: normalize(e.name);
|
|
if (name !== pchild.#matchName) {
|
|
continue;
|
|
}
|
|
return this.#readdirPromoteChild(e, pchild, p, c);
|
|
}
|
|
}
|
|
#readdirPromoteChild(e, p, index, c) {
|
|
const v = p.name;
|
|
// retain any other flags, but set ifmt from dirent
|
|
p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
|
|
// case sensitivity fixing when we learn the true name.
|
|
if (v !== e.name)
|
|
p.name = e.name;
|
|
// just advance provisional index (potentially off the list),
|
|
// otherwise we have to splice/pop it out and re-insert at head
|
|
if (index !== c.provisional) {
|
|
if (index === c.length - 1)
|
|
c.pop();
|
|
else
|
|
c.splice(index, 1);
|
|
c.unshift(p);
|
|
}
|
|
c.provisional++;
|
|
return p;
|
|
}
|
|
/**
|
|
* Call lstat() on this Path, and update all known information that can be
|
|
* determined.
|
|
*
|
|
* Note that unlike `fs.lstat()`, the returned value does not contain some
|
|
* information, such as `mode`, `dev`, `nlink`, and `ino`. If that
|
|
* information is required, you will need to call `fs.lstat` yourself.
|
|
*
|
|
* If the Path refers to a nonexistent file, or if the lstat call fails for
|
|
* any reason, `undefined` is returned. Otherwise the updated Path object is
|
|
* returned.
|
|
*
|
|
* Results are cached, and thus may be out of date if the filesystem is
|
|
* mutated.
|
|
*/
|
|
async lstat() {
|
|
if ((this.#type & ENOENT) === 0) {
|
|
try {
|
|
this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
|
|
return this;
|
|
}
|
|
catch (er) {
|
|
this.#lstatFail(er.code);
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* synchronous {@link PathBase.lstat}
|
|
*/
|
|
lstatSync() {
|
|
if ((this.#type & ENOENT) === 0) {
|
|
try {
|
|
this.#applyStat(this.#fs.lstatSync(this.fullpath()));
|
|
return this;
|
|
}
|
|
catch (er) {
|
|
this.#lstatFail(er.code);
|
|
}
|
|
}
|
|
}
|
|
#applyStat(st) {
|
|
const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
|
|
this.#atime = atime;
|
|
this.#atimeMs = atimeMs;
|
|
this.#birthtime = birthtime;
|
|
this.#birthtimeMs = birthtimeMs;
|
|
this.#blksize = blksize;
|
|
this.#blocks = blocks;
|
|
this.#ctime = ctime;
|
|
this.#ctimeMs = ctimeMs;
|
|
this.#dev = dev;
|
|
this.#gid = gid;
|
|
this.#ino = ino;
|
|
this.#mode = mode;
|
|
this.#mtime = mtime;
|
|
this.#mtimeMs = mtimeMs;
|
|
this.#nlink = nlink;
|
|
this.#rdev = rdev;
|
|
this.#size = size;
|
|
this.#uid = uid;
|
|
const ifmt = entToType(st);
|
|
// retain any other flags, but set the ifmt
|
|
this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
|
|
if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
|
|
this.#type |= ENOTDIR;
|
|
}
|
|
}
|
|
#onReaddirCB = [];
|
|
#readdirCBInFlight = false;
|
|
#callOnReaddirCB(children) {
|
|
this.#readdirCBInFlight = false;
|
|
const cbs = this.#onReaddirCB.slice();
|
|
this.#onReaddirCB.length = 0;
|
|
cbs.forEach(cb => cb(null, children));
|
|
}
|
|
/**
|
|
* Standard node-style callback interface to get list of directory entries.
|
|
*
|
|
* If the Path cannot or does not contain any children, then an empty array
|
|
* is returned.
|
|
*
|
|
* Results are cached, and thus may be out of date if the filesystem is
|
|
* mutated.
|
|
*
|
|
* @param cb The callback called with (er, entries). Note that the `er`
|
|
* param is somewhat extraneous, as all readdir() errors are handled and
|
|
* simply result in an empty set of entries being returned.
|
|
* @param allowZalgo Boolean indicating that immediately known results should
|
|
* *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
|
|
* zalgo at your peril, the dark pony lord is devious and unforgiving.
|
|
*/
|
|
readdirCB(cb, allowZalgo = false) {
|
|
if (!this.canReaddir()) {
|
|
if (allowZalgo)
|
|
cb(null, []);
|
|
else
|
|
queueMicrotask(() => cb(null, []));
|
|
return;
|
|
}
|
|
const children = this.children();
|
|
if (this.calledReaddir()) {
|
|
const c = children.slice(0, children.provisional);
|
|
if (allowZalgo)
|
|
cb(null, c);
|
|
else
|
|
queueMicrotask(() => cb(null, c));
|
|
return;
|
|
}
|
|
// don't have to worry about zalgo at this point.
|
|
this.#onReaddirCB.push(cb);
|
|
if (this.#readdirCBInFlight) {
|
|
return;
|
|
}
|
|
this.#readdirCBInFlight = true;
|
|
// else read the directory, fill up children
|
|
// de-provisionalize any provisional children.
|
|
const fullpath = this.fullpath();
|
|
this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
|
|
if (er) {
|
|
this.#readdirFail(er.code);
|
|
children.provisional = 0;
|
|
}
|
|
else {
|
|
// if we didn't get an error, we always get entries.
|
|
//@ts-ignore
|
|
for (const e of entries) {
|
|
this.#readdirAddChild(e, children);
|
|
}
|
|
this.#readdirSuccess(children);
|
|
}
|
|
this.#callOnReaddirCB(children.slice(0, children.provisional));
|
|
return;
|
|
});
|
|
}
|
|
#asyncReaddirInFlight;
|
|
/**
|
|
* Return an array of known child entries.
|
|
*
|
|
* If the Path cannot or does not contain any children, then an empty array
|
|
* is returned.
|
|
*
|
|
* Results are cached, and thus may be out of date if the filesystem is
|
|
* mutated.
|
|
*/
|
|
async readdir() {
|
|
if (!this.canReaddir()) {
|
|
return [];
|
|
}
|
|
const children = this.children();
|
|
if (this.calledReaddir()) {
|
|
return children.slice(0, children.provisional);
|
|
}
|
|
// else read the directory, fill up children
|
|
// de-provisionalize any provisional children.
|
|
const fullpath = this.fullpath();
|
|
if (this.#asyncReaddirInFlight) {
|
|
await this.#asyncReaddirInFlight;
|
|
}
|
|
else {
|
|
/* c8 ignore start */
|
|
let resolve = () => { };
|
|
/* c8 ignore stop */
|
|
this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
|
|
try {
|
|
for (const e of await this.#fs.promises.readdir(fullpath, {
|
|
withFileTypes: true,
|
|
})) {
|
|
this.#readdirAddChild(e, children);
|
|
}
|
|
this.#readdirSuccess(children);
|
|
}
|
|
catch (er) {
|
|
this.#readdirFail(er.code);
|
|
children.provisional = 0;
|
|
}
|
|
this.#asyncReaddirInFlight = undefined;
|
|
resolve();
|
|
}
|
|
return children.slice(0, children.provisional);
|
|
}
|
|
/**
|
|
* synchronous {@link PathBase.readdir}
|
|
*/
|
|
readdirSync() {
|
|
if (!this.canReaddir()) {
|
|
return [];
|
|
}
|
|
const children = this.children();
|
|
if (this.calledReaddir()) {
|
|
return children.slice(0, children.provisional);
|
|
}
|
|
// else read the directory, fill up children
|
|
// de-provisionalize any provisional children.
|
|
const fullpath = this.fullpath();
|
|
try {
|
|
for (const e of this.#fs.readdirSync(fullpath, {
|
|
withFileTypes: true,
|
|
})) {
|
|
this.#readdirAddChild(e, children);
|
|
}
|
|
this.#readdirSuccess(children);
|
|
}
|
|
catch (er) {
|
|
this.#readdirFail(er.code);
|
|
children.provisional = 0;
|
|
}
|
|
return children.slice(0, children.provisional);
|
|
}
|
|
canReaddir() {
|
|
if (this.#type & ENOCHILD)
|
|
return false;
|
|
const ifmt = IFMT & this.#type;
|
|
// we always set ENOTDIR when setting IFMT, so should be impossible
|
|
/* c8 ignore start */
|
|
if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
|
|
return false;
|
|
}
|
|
/* c8 ignore stop */
|
|
return true;
|
|
}
|
|
shouldWalk(dirs, walkFilter) {
|
|
return ((this.#type & IFDIR) === IFDIR &&
|
|
!(this.#type & ENOCHILD) &&
|
|
!dirs.has(this) &&
|
|
(!walkFilter || walkFilter(this)));
|
|
}
|
|
/**
|
|
* Return the Path object corresponding to path as resolved
|
|
* by realpath(3).
|
|
*
|
|
* If the realpath call fails for any reason, `undefined` is returned.
|
|
*
|
|
* Result is cached, and thus may be outdated if the filesystem is mutated.
|
|
* On success, returns a Path object.
|
|
*/
|
|
async realpath() {
|
|
if (this.#realpath)
|
|
return this.#realpath;
|
|
if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
|
|
return undefined;
|
|
try {
|
|
const rp = await this.#fs.promises.realpath(this.fullpath());
|
|
return (this.#realpath = this.resolve(rp));
|
|
}
|
|
catch (_) {
|
|
this.#markENOREALPATH();
|
|
}
|
|
}
|
|
/**
|
|
* Synchronous {@link realpath}
|
|
*/
|
|
realpathSync() {
|
|
if (this.#realpath)
|
|
return this.#realpath;
|
|
if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
|
|
return undefined;
|
|
try {
|
|
const rp = this.#fs.realpathSync(this.fullpath());
|
|
return (this.#realpath = this.resolve(rp));
|
|
}
|
|
catch (_) {
|
|
this.#markENOREALPATH();
|
|
}
|
|
}
|
|
/**
|
|
* Internal method to mark this Path object as the scurry cwd,
|
|
* called by {@link PathScurry#chdir}
|
|
*
|
|
* @internal
|
|
*/
|
|
[setAsCwd](oldCwd) {
|
|
if (oldCwd === this)
|
|
return;
|
|
const changed = new Set([]);
|
|
let rp = [];
|
|
let p = this;
|
|
while (p && p.parent) {
|
|
changed.add(p);
|
|
p.#relative = rp.join(this.sep);
|
|
p.#relativePosix = rp.join('/');
|
|
p = p.parent;
|
|
rp.push('..');
|
|
}
|
|
// now un-memoize parents of old cwd
|
|
p = oldCwd;
|
|
while (p && p.parent && !changed.has(p)) {
|
|
p.#relative = undefined;
|
|
p.#relativePosix = undefined;
|
|
p = p.parent;
|
|
}
|
|
}
|
|
}
|
|
exports.PathBase = PathBase;
|
|
/**
|
|
* Path class used on win32 systems
|
|
*
|
|
* Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
|
|
* as the path separator for parsing paths.
|
|
*/
|
|
class PathWin32 extends PathBase {
|
|
/**
|
|
* Separator for generating path strings.
|
|
*/
|
|
sep = '\\';
|
|
/**
|
|
* Separator for parsing path strings.
|
|
*/
|
|
splitSep = eitherSep;
|
|
/**
|
|
* Do not create new Path objects directly. They should always be accessed
|
|
* via the PathScurry class or other methods on the Path class.
|
|
*
|
|
* @internal
|
|
*/
|
|
constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
|
|
super(name, type, root, roots, nocase, children, opts);
|
|
}
|
|
/**
|
|
* @internal
|
|
*/
|
|
newChild(name, type = UNKNOWN, opts = {}) {
|
|
return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
|
|
}
|
|
/**
|
|
* @internal
|
|
*/
|
|
getRootString(path) {
|
|
return path_1.win32.parse(path).root;
|
|
}
|
|
/**
|
|
* @internal
|
|
*/
|
|
getRoot(rootPath) {
|
|
rootPath = uncToDrive(rootPath.toUpperCase());
|
|
if (rootPath === this.root.name) {
|
|
return this.root;
|
|
}
|
|
// ok, not that one, check if it matches another we know about
|
|
for (const [compare, root] of Object.entries(this.roots)) {
|
|
if (this.sameRoot(rootPath, compare)) {
|
|
return (this.roots[rootPath] = root);
|
|
}
|
|
}
|
|
// otherwise, have to create a new one.
|
|
return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
|
|
}
|
|
/**
|
|
* @internal
|
|
*/
|
|
sameRoot(rootPath, compare = this.root.name) {
|
|
// windows can (rarely) have case-sensitive filesystem, but
|
|
// UNC and drive letters are always case-insensitive, and canonically
|
|
// represented uppercase.
|
|
rootPath = rootPath
|
|
.toUpperCase()
|
|
.replace(/\//g, '\\')
|
|
.replace(uncDriveRegexp, '$1\\');
|
|
return rootPath === compare;
|
|
}
|
|
}
|
|
exports.PathWin32 = PathWin32;
|
|
/**
|
|
* Path class used on all posix systems.
|
|
*
|
|
* Uses `'/'` as the path separator.
|
|
*/
|
|
class PathPosix extends PathBase {
|
|
/**
|
|
* separator for parsing path strings
|
|
*/
|
|
splitSep = '/';
|
|
/**
|
|
* separator for generating path strings
|
|
*/
|
|
sep = '/';
|
|
/**
|
|
* Do not create new Path objects directly. They should always be accessed
|
|
* via the PathScurry class or other methods on the Path class.
|
|
*
|
|
* @internal
|
|
*/
|
|
constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
|
|
super(name, type, root, roots, nocase, children, opts);
|
|
}
|
|
/**
|
|
* @internal
|
|
*/
|
|
getRootString(path) {
|
|
return path.startsWith('/') ? '/' : '';
|
|
}
|
|
/**
|
|
* @internal
|
|
*/
|
|
getRoot(_rootPath) {
|
|
return this.root;
|
|
}
|
|
/**
|
|
* @internal
|
|
*/
|
|
newChild(name, type = UNKNOWN, opts = {}) {
|
|
return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
|
|
}
|
|
}
|
|
exports.PathPosix = PathPosix;
|
|
/**
|
|
* The base class for all PathScurry classes, providing the interface for path
|
|
* resolution and filesystem operations.
|
|
*
|
|
* Typically, you should *not* instantiate this class directly, but rather one
|
|
* of the platform-specific classes, or the exported {@link PathScurry} which
|
|
* defaults to the current platform.
|
|
*/
|
|
class PathScurryBase {
|
|
/**
|
|
* The root Path entry for the current working directory of this Scurry
|
|
*/
|
|
root;
|
|
/**
|
|
* The string path for the root of this Scurry's current working directory
|
|
*/
|
|
rootPath;
|
|
/**
|
|
* A collection of all roots encountered, referenced by rootPath
|
|
*/
|
|
roots;
|
|
/**
|
|
* The Path entry corresponding to this PathScurry's current working directory.
|
|
*/
|
|
cwd;
|
|
#resolveCache;
|
|
#resolvePosixCache;
|
|
#children;
|
|
/**
|
|
* Perform path comparisons case-insensitively.
|
|
*
|
|
* Defaults true on Darwin and Windows systems, false elsewhere.
|
|
*/
|
|
nocase;
|
|
#fs;
|
|
/**
|
|
* This class should not be instantiated directly.
|
|
*
|
|
* Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
|
|
*
|
|
* @internal
|
|
*/
|
|
constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
|
|
this.#fs = fsFromOption(fs);
|
|
if (cwd instanceof URL || cwd.startsWith('file://')) {
|
|
cwd = (0, url_1.fileURLToPath)(cwd);
|
|
}
|
|
// resolve and split root, and then add to the store.
|
|
// this is the only time we call path.resolve()
|
|
const cwdPath = pathImpl.resolve(cwd);
|
|
this.roots = Object.create(null);
|
|
this.rootPath = this.parseRootPath(cwdPath);
|
|
this.#resolveCache = new ResolveCache();
|
|
this.#resolvePosixCache = new ResolveCache();
|
|
this.#children = new ChildrenCache(childrenCacheSize);
|
|
const split = cwdPath.substring(this.rootPath.length).split(sep);
|
|
// resolve('/') leaves '', splits to [''], we don't want that.
|
|
if (split.length === 1 && !split[0]) {
|
|
split.pop();
|
|
}
|
|
/* c8 ignore start */
|
|
if (nocase === undefined) {
|
|
throw new TypeError('must provide nocase setting to PathScurryBase ctor');
|
|
}
|
|
/* c8 ignore stop */
|
|
this.nocase = nocase;
|
|
this.root = this.newRoot(this.#fs);
|
|
this.roots[this.rootPath] = this.root;
|
|
let prev = this.root;
|
|
let len = split.length - 1;
|
|
const joinSep = pathImpl.sep;
|
|
let abs = this.rootPath;
|
|
let sawFirst = false;
|
|
for (const part of split) {
|
|
const l = len--;
|
|
prev = prev.child(part, {
|
|
relative: new Array(l).fill('..').join(joinSep),
|
|
relativePosix: new Array(l).fill('..').join('/'),
|
|
fullpath: (abs += (sawFirst ? '' : joinSep) + part),
|
|
});
|
|
sawFirst = true;
|
|
}
|
|
this.cwd = prev;
|
|
}
|
|
/**
|
|
* Get the depth of a provided path, string, or the cwd
|
|
*/
|
|
depth(path = this.cwd) {
|
|
if (typeof path === 'string') {
|
|
path = this.cwd.resolve(path);
|
|
}
|
|
return path.depth();
|
|
}
|
|
/**
|
|
* Return the cache of child entries. Exposed so subclasses can create
|
|
* child Path objects in a platform-specific way.
|
|
*
|
|
* @internal
|
|
*/
|
|
childrenCache() {
|
|
return this.#children;
|
|
}
|
|
/**
|
|
* Resolve one or more path strings to a resolved string
|
|
*
|
|
* Same interface as require('path').resolve.
|
|
*
|
|
* Much faster than path.resolve() when called multiple times for the same
|
|
* path, because the resolved Path objects are cached. Much slower
|
|
* otherwise.
|
|
*/
|
|
resolve(...paths) {
|
|
// first figure out the minimum number of paths we have to test
|
|
// we always start at cwd, but any absolutes will bump the start
|
|
let r = '';
|
|
for (let i = paths.length - 1; i >= 0; i--) {
|
|
const p = paths[i];
|
|
if (!p || p === '.')
|
|
continue;
|
|
r = r ? `${p}/${r}` : p;
|
|
if (this.isAbsolute(p)) {
|
|
break;
|
|
}
|
|
}
|
|
const cached = this.#resolveCache.get(r);
|
|
if (cached !== undefined) {
|
|
return cached;
|
|
}
|
|
const result = this.cwd.resolve(r).fullpath();
|
|
this.#resolveCache.set(r, result);
|
|
return result;
|
|
}
|
|
/**
|
|
* Resolve one or more path strings to a resolved string, returning
|
|
* the posix path. Identical to .resolve() on posix systems, but on
|
|
* windows will return a forward-slash separated UNC path.
|
|
*
|
|
* Same interface as require('path').resolve.
|
|
*
|
|
* Much faster than path.resolve() when called multiple times for the same
|
|
* path, because the resolved Path objects are cached. Much slower
|
|
* otherwise.
|
|
*/
|
|
resolvePosix(...paths) {
|
|
// first figure out the minimum number of paths we have to test
|
|
// we always start at cwd, but any absolutes will bump the start
|
|
let r = '';
|
|
for (let i = paths.length - 1; i >= 0; i--) {
|
|
const p = paths[i];
|
|
if (!p || p === '.')
|
|
continue;
|
|
r = r ? `${p}/${r}` : p;
|
|
if (this.isAbsolute(p)) {
|
|
break;
|
|
}
|
|
}
|
|
const cached = this.#resolvePosixCache.get(r);
|
|
if (cached !== undefined) {
|
|
return cached;
|
|
}
|
|
const result = this.cwd.resolve(r).fullpathPosix();
|
|
this.#resolvePosixCache.set(r, result);
|
|
return result;
|
|
}
|
|
/**
|
|
* find the relative path from the cwd to the supplied path string or entry
|
|
*/
|
|
relative(entry = this.cwd) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
return entry.relative();
|
|
}
|
|
/**
|
|
* find the relative path from the cwd to the supplied path string or
|
|
* entry, using / as the path delimiter, even on Windows.
|
|
*/
|
|
relativePosix(entry = this.cwd) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
return entry.relativePosix();
|
|
}
|
|
/**
|
|
* Return the basename for the provided string or Path object
|
|
*/
|
|
basename(entry = this.cwd) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
return entry.name;
|
|
}
|
|
/**
|
|
* Return the dirname for the provided string or Path object
|
|
*/
|
|
dirname(entry = this.cwd) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
return (entry.parent || entry).fullpath();
|
|
}
|
|
async readdir(entry = this.cwd, opts = {
|
|
withFileTypes: true,
|
|
}) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
else if (!(entry instanceof PathBase)) {
|
|
opts = entry;
|
|
entry = this.cwd;
|
|
}
|
|
const { withFileTypes } = opts;
|
|
if (!entry.canReaddir()) {
|
|
return [];
|
|
}
|
|
else {
|
|
const p = await entry.readdir();
|
|
return withFileTypes ? p : p.map(e => e.name);
|
|
}
|
|
}
|
|
readdirSync(entry = this.cwd, opts = {
|
|
withFileTypes: true,
|
|
}) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
else if (!(entry instanceof PathBase)) {
|
|
opts = entry;
|
|
entry = this.cwd;
|
|
}
|
|
const { withFileTypes = true } = opts;
|
|
if (!entry.canReaddir()) {
|
|
return [];
|
|
}
|
|
else if (withFileTypes) {
|
|
return entry.readdirSync();
|
|
}
|
|
else {
|
|
return entry.readdirSync().map(e => e.name);
|
|
}
|
|
}
|
|
/**
|
|
* Call lstat() on the string or Path object, and update all known
|
|
* information that can be determined.
|
|
*
|
|
* Note that unlike `fs.lstat()`, the returned value does not contain some
|
|
* information, such as `mode`, `dev`, `nlink`, and `ino`. If that
|
|
* information is required, you will need to call `fs.lstat` yourself.
|
|
*
|
|
* If the Path refers to a nonexistent file, or if the lstat call fails for
|
|
* any reason, `undefined` is returned. Otherwise the updated Path object is
|
|
* returned.
|
|
*
|
|
* Results are cached, and thus may be out of date if the filesystem is
|
|
* mutated.
|
|
*/
|
|
async lstat(entry = this.cwd) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
return entry.lstat();
|
|
}
|
|
/**
|
|
* synchronous {@link PathScurryBase.lstat}
|
|
*/
|
|
lstatSync(entry = this.cwd) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
return entry.lstatSync();
|
|
}
|
|
async readlink(entry = this.cwd, { withFileTypes } = {
|
|
withFileTypes: false,
|
|
}) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
else if (!(entry instanceof PathBase)) {
|
|
withFileTypes = entry.withFileTypes;
|
|
entry = this.cwd;
|
|
}
|
|
const e = await entry.readlink();
|
|
return withFileTypes ? e : e?.fullpath();
|
|
}
|
|
readlinkSync(entry = this.cwd, { withFileTypes } = {
|
|
withFileTypes: false,
|
|
}) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
else if (!(entry instanceof PathBase)) {
|
|
withFileTypes = entry.withFileTypes;
|
|
entry = this.cwd;
|
|
}
|
|
const e = entry.readlinkSync();
|
|
return withFileTypes ? e : e?.fullpath();
|
|
}
|
|
async realpath(entry = this.cwd, { withFileTypes } = {
|
|
withFileTypes: false,
|
|
}) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
else if (!(entry instanceof PathBase)) {
|
|
withFileTypes = entry.withFileTypes;
|
|
entry = this.cwd;
|
|
}
|
|
const e = await entry.realpath();
|
|
return withFileTypes ? e : e?.fullpath();
|
|
}
|
|
realpathSync(entry = this.cwd, { withFileTypes } = {
|
|
withFileTypes: false,
|
|
}) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
else if (!(entry instanceof PathBase)) {
|
|
withFileTypes = entry.withFileTypes;
|
|
entry = this.cwd;
|
|
}
|
|
const e = entry.realpathSync();
|
|
return withFileTypes ? e : e?.fullpath();
|
|
}
|
|
async walk(entry = this.cwd, opts = {}) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
else if (!(entry instanceof PathBase)) {
|
|
opts = entry;
|
|
entry = this.cwd;
|
|
}
|
|
const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
|
|
const results = [];
|
|
if (!filter || filter(entry)) {
|
|
results.push(withFileTypes ? entry : entry.fullpath());
|
|
}
|
|
const dirs = new Set();
|
|
const walk = (dir, cb) => {
|
|
dirs.add(dir);
|
|
dir.readdirCB((er, entries) => {
|
|
/* c8 ignore start */
|
|
if (er) {
|
|
return cb(er);
|
|
}
|
|
/* c8 ignore stop */
|
|
let len = entries.length;
|
|
if (!len)
|
|
return cb();
|
|
const next = () => {
|
|
if (--len === 0) {
|
|
cb();
|
|
}
|
|
};
|
|
for (const e of entries) {
|
|
if (!filter || filter(e)) {
|
|
results.push(withFileTypes ? e : e.fullpath());
|
|
}
|
|
if (follow && e.isSymbolicLink()) {
|
|
e.realpath()
|
|
.then(r => (r?.isUnknown() ? r.lstat() : r))
|
|
.then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
|
|
}
|
|
else {
|
|
if (e.shouldWalk(dirs, walkFilter)) {
|
|
walk(e, next);
|
|
}
|
|
else {
|
|
next();
|
|
}
|
|
}
|
|
}
|
|
}, true); // zalgooooooo
|
|
};
|
|
const start = entry;
|
|
return new Promise((res, rej) => {
|
|
walk(start, er => {
|
|
/* c8 ignore start */
|
|
if (er)
|
|
return rej(er);
|
|
/* c8 ignore stop */
|
|
res(results);
|
|
});
|
|
});
|
|
}
|
|
walkSync(entry = this.cwd, opts = {}) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
else if (!(entry instanceof PathBase)) {
|
|
opts = entry;
|
|
entry = this.cwd;
|
|
}
|
|
const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
|
|
const results = [];
|
|
if (!filter || filter(entry)) {
|
|
results.push(withFileTypes ? entry : entry.fullpath());
|
|
}
|
|
const dirs = new Set([entry]);
|
|
for (const dir of dirs) {
|
|
const entries = dir.readdirSync();
|
|
for (const e of entries) {
|
|
if (!filter || filter(e)) {
|
|
results.push(withFileTypes ? e : e.fullpath());
|
|
}
|
|
let r = e;
|
|
if (e.isSymbolicLink()) {
|
|
if (!(follow && (r = e.realpathSync())))
|
|
continue;
|
|
if (r.isUnknown())
|
|
r.lstatSync();
|
|
}
|
|
if (r.shouldWalk(dirs, walkFilter)) {
|
|
dirs.add(r);
|
|
}
|
|
}
|
|
}
|
|
return results;
|
|
}
|
|
/**
|
|
* Support for `for await`
|
|
*
|
|
* Alias for {@link PathScurryBase.iterate}
|
|
*
|
|
* Note: As of Node 19, this is very slow, compared to other methods of
|
|
* walking. Consider using {@link PathScurryBase.stream} if memory overhead
|
|
* and backpressure are concerns, or {@link PathScurryBase.walk} if not.
|
|
*/
|
|
[Symbol.asyncIterator]() {
|
|
return this.iterate();
|
|
}
|
|
iterate(entry = this.cwd, options = {}) {
|
|
// iterating async over the stream is significantly more performant,
|
|
// especially in the warm-cache scenario, because it buffers up directory
|
|
// entries in the background instead of waiting for a yield for each one.
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
else if (!(entry instanceof PathBase)) {
|
|
options = entry;
|
|
entry = this.cwd;
|
|
}
|
|
return this.stream(entry, options)[Symbol.asyncIterator]();
|
|
}
|
|
/**
|
|
* Iterating over a PathScurry performs a synchronous walk.
|
|
*
|
|
* Alias for {@link PathScurryBase.iterateSync}
|
|
*/
|
|
[Symbol.iterator]() {
|
|
return this.iterateSync();
|
|
}
|
|
*iterateSync(entry = this.cwd, opts = {}) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
else if (!(entry instanceof PathBase)) {
|
|
opts = entry;
|
|
entry = this.cwd;
|
|
}
|
|
const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
|
|
if (!filter || filter(entry)) {
|
|
yield withFileTypes ? entry : entry.fullpath();
|
|
}
|
|
const dirs = new Set([entry]);
|
|
for (const dir of dirs) {
|
|
const entries = dir.readdirSync();
|
|
for (const e of entries) {
|
|
if (!filter || filter(e)) {
|
|
yield withFileTypes ? e : e.fullpath();
|
|
}
|
|
let r = e;
|
|
if (e.isSymbolicLink()) {
|
|
if (!(follow && (r = e.realpathSync())))
|
|
continue;
|
|
if (r.isUnknown())
|
|
r.lstatSync();
|
|
}
|
|
if (r.shouldWalk(dirs, walkFilter)) {
|
|
dirs.add(r);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
stream(entry = this.cwd, opts = {}) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
else if (!(entry instanceof PathBase)) {
|
|
opts = entry;
|
|
entry = this.cwd;
|
|
}
|
|
const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
|
|
const results = new minipass_1.Minipass({ objectMode: true });
|
|
if (!filter || filter(entry)) {
|
|
results.write(withFileTypes ? entry : entry.fullpath());
|
|
}
|
|
const dirs = new Set();
|
|
const queue = [entry];
|
|
let processing = 0;
|
|
const process = () => {
|
|
let paused = false;
|
|
while (!paused) {
|
|
const dir = queue.shift();
|
|
if (!dir) {
|
|
if (processing === 0)
|
|
results.end();
|
|
return;
|
|
}
|
|
processing++;
|
|
dirs.add(dir);
|
|
const onReaddir = (er, entries, didRealpaths = false) => {
|
|
/* c8 ignore start */
|
|
if (er)
|
|
return results.emit('error', er);
|
|
/* c8 ignore stop */
|
|
if (follow && !didRealpaths) {
|
|
const promises = [];
|
|
for (const e of entries) {
|
|
if (e.isSymbolicLink()) {
|
|
promises.push(e
|
|
.realpath()
|
|
.then((r) => r?.isUnknown() ? r.lstat() : r));
|
|
}
|
|
}
|
|
if (promises.length) {
|
|
Promise.all(promises).then(() => onReaddir(null, entries, true));
|
|
return;
|
|
}
|
|
}
|
|
for (const e of entries) {
|
|
if (e && (!filter || filter(e))) {
|
|
if (!results.write(withFileTypes ? e : e.fullpath())) {
|
|
paused = true;
|
|
}
|
|
}
|
|
}
|
|
processing--;
|
|
for (const e of entries) {
|
|
const r = e.realpathCached() || e;
|
|
if (r.shouldWalk(dirs, walkFilter)) {
|
|
queue.push(r);
|
|
}
|
|
}
|
|
if (paused && !results.flowing) {
|
|
results.once('drain', process);
|
|
}
|
|
else if (!sync) {
|
|
process();
|
|
}
|
|
};
|
|
// zalgo containment
|
|
let sync = true;
|
|
dir.readdirCB(onReaddir, true);
|
|
sync = false;
|
|
}
|
|
};
|
|
process();
|
|
return results;
|
|
}
|
|
streamSync(entry = this.cwd, opts = {}) {
|
|
if (typeof entry === 'string') {
|
|
entry = this.cwd.resolve(entry);
|
|
}
|
|
else if (!(entry instanceof PathBase)) {
|
|
opts = entry;
|
|
entry = this.cwd;
|
|
}
|
|
const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
|
|
const results = new minipass_1.Minipass({ objectMode: true });
|
|
const dirs = new Set();
|
|
if (!filter || filter(entry)) {
|
|
results.write(withFileTypes ? entry : entry.fullpath());
|
|
}
|
|
const queue = [entry];
|
|
let processing = 0;
|
|
const process = () => {
|
|
let paused = false;
|
|
while (!paused) {
|
|
const dir = queue.shift();
|
|
if (!dir) {
|
|
if (processing === 0)
|
|
results.end();
|
|
return;
|
|
}
|
|
processing++;
|
|
dirs.add(dir);
|
|
const entries = dir.readdirSync();
|
|
for (const e of entries) {
|
|
if (!filter || filter(e)) {
|
|
if (!results.write(withFileTypes ? e : e.fullpath())) {
|
|
paused = true;
|
|
}
|
|
}
|
|
}
|
|
processing--;
|
|
for (const e of entries) {
|
|
let r = e;
|
|
if (e.isSymbolicLink()) {
|
|
if (!(follow && (r = e.realpathSync())))
|
|
continue;
|
|
if (r.isUnknown())
|
|
r.lstatSync();
|
|
}
|
|
if (r.shouldWalk(dirs, walkFilter)) {
|
|
queue.push(r);
|
|
}
|
|
}
|
|
}
|
|
if (paused && !results.flowing)
|
|
results.once('drain', process);
|
|
};
|
|
process();
|
|
return results;
|
|
}
|
|
chdir(path = this.cwd) {
|
|
const oldCwd = this.cwd;
|
|
this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
|
|
this.cwd[setAsCwd](oldCwd);
|
|
}
|
|
}
|
|
exports.PathScurryBase = PathScurryBase;
|
|
/**
|
|
* Windows implementation of {@link PathScurryBase}
|
|
*
|
|
* Defaults to case insensitve, uses `'\\'` to generate path strings. Uses
|
|
* {@link PathWin32} for Path objects.
|
|
*/
|
|
class PathScurryWin32 extends PathScurryBase {
|
|
/**
|
|
* separator for generating path strings
|
|
*/
|
|
sep = '\\';
|
|
constructor(cwd = process.cwd(), opts = {}) {
|
|
const { nocase = true } = opts;
|
|
super(cwd, path_1.win32, '\\', { ...opts, nocase });
|
|
this.nocase = nocase;
|
|
for (let p = this.cwd; p; p = p.parent) {
|
|
p.nocase = this.nocase;
|
|
}
|
|
}
|
|
/**
|
|
* @internal
|
|
*/
|
|
parseRootPath(dir) {
|
|
// if the path starts with a single separator, it's not a UNC, and we'll
|
|
// just get separator as the root, and driveFromUNC will return \
|
|
// In that case, mount \ on the root from the cwd.
|
|
return path_1.win32.parse(dir).root.toUpperCase();
|
|
}
|
|
/**
|
|
* @internal
|
|
*/
|
|
newRoot(fs) {
|
|
return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
|
|
}
|
|
/**
|
|
* Return true if the provided path string is an absolute path
|
|
*/
|
|
isAbsolute(p) {
|
|
return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
|
|
}
|
|
}
|
|
exports.PathScurryWin32 = PathScurryWin32;
|
|
/**
|
|
* {@link PathScurryBase} implementation for all posix systems other than Darwin.
|
|
*
|
|
* Defaults to case-sensitive matching, uses `'/'` to generate path strings.
|
|
*
|
|
* Uses {@link PathPosix} for Path objects.
|
|
*/
|
|
class PathScurryPosix extends PathScurryBase {
|
|
/**
|
|
* separator for generating path strings
|
|
*/
|
|
sep = '/';
|
|
constructor(cwd = process.cwd(), opts = {}) {
|
|
const { nocase = false } = opts;
|
|
super(cwd, path_1.posix, '/', { ...opts, nocase });
|
|
this.nocase = nocase;
|
|
}
|
|
/**
|
|
* @internal
|
|
*/
|
|
parseRootPath(_dir) {
|
|
return '/';
|
|
}
|
|
/**
|
|
* @internal
|
|
*/
|
|
newRoot(fs) {
|
|
return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
|
|
}
|
|
/**
|
|
* Return true if the provided path string is an absolute path
|
|
*/
|
|
isAbsolute(p) {
|
|
return p.startsWith('/');
|
|
}
|
|
}
|
|
exports.PathScurryPosix = PathScurryPosix;
|
|
/**
|
|
* {@link PathScurryBase} implementation for Darwin (macOS) systems.
|
|
*
|
|
* Defaults to case-insensitive matching, uses `'/'` for generating path
|
|
* strings.
|
|
*
|
|
* Uses {@link PathPosix} for Path objects.
|
|
*/
|
|
class PathScurryDarwin extends PathScurryPosix {
|
|
constructor(cwd = process.cwd(), opts = {}) {
|
|
const { nocase = true } = opts;
|
|
super(cwd, { ...opts, nocase });
|
|
}
|
|
}
|
|
exports.PathScurryDarwin = PathScurryDarwin;
|
|
/**
|
|
* Default {@link PathBase} implementation for the current platform.
|
|
*
|
|
* {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
|
|
*/
|
|
exports.Path = process.platform === 'win32' ? PathWin32 : PathPosix;
|
|
/**
|
|
* Default {@link PathScurryBase} implementation for the current platform.
|
|
*
|
|
* {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
|
|
* Darwin (macOS) systems, {@link PathScurryPosix} on all others.
|
|
*/
|
|
exports.PathScurry = process.platform === 'win32'
|
|
? PathScurryWin32
|
|
: process.platform === 'darwin'
|
|
? PathScurryDarwin
|
|
: PathScurryPosix;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7433:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
/**
|
|
* @module LRUCache
|
|
*/
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.LRUCache = void 0;
|
|
const perf = typeof performance === 'object' &&
|
|
performance &&
|
|
typeof performance.now === 'function'
|
|
? performance
|
|
: Date;
|
|
const warned = new Set();
|
|
/* c8 ignore start */
|
|
const PROCESS = (typeof process === 'object' && !!process ? process : {});
|
|
/* c8 ignore start */
|
|
const emitWarning = (msg, type, code, fn) => {
|
|
typeof PROCESS.emitWarning === 'function'
|
|
? PROCESS.emitWarning(msg, type, code, fn)
|
|
: console.error(`[${code}] ${type}: ${msg}`);
|
|
};
|
|
let AC = globalThis.AbortController;
|
|
let AS = globalThis.AbortSignal;
|
|
/* c8 ignore start */
|
|
if (typeof AC === 'undefined') {
|
|
//@ts-ignore
|
|
AS = class AbortSignal {
|
|
onabort;
|
|
_onabort = [];
|
|
reason;
|
|
aborted = false;
|
|
addEventListener(_, fn) {
|
|
this._onabort.push(fn);
|
|
}
|
|
};
|
|
//@ts-ignore
|
|
AC = class AbortController {
|
|
constructor() {
|
|
warnACPolyfill();
|
|
}
|
|
signal = new AS();
|
|
abort(reason) {
|
|
if (this.signal.aborted)
|
|
return;
|
|
//@ts-ignore
|
|
this.signal.reason = reason;
|
|
//@ts-ignore
|
|
this.signal.aborted = true;
|
|
//@ts-ignore
|
|
for (const fn of this.signal._onabort) {
|
|
fn(reason);
|
|
}
|
|
this.signal.onabort?.(reason);
|
|
}
|
|
};
|
|
let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
|
|
const warnACPolyfill = () => {
|
|
if (!printACPolyfillWarning)
|
|
return;
|
|
printACPolyfillWarning = false;
|
|
emitWarning('AbortController is not defined. If using lru-cache in ' +
|
|
'node 14, load an AbortController polyfill from the ' +
|
|
'`node-abort-controller` package. A minimal polyfill is ' +
|
|
'provided for use by LRUCache.fetch(), but it should not be ' +
|
|
'relied upon in other contexts (eg, passing it to other APIs that ' +
|
|
'use AbortController/AbortSignal might have undesirable effects). ' +
|
|
'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
|
|
};
|
|
}
|
|
/* c8 ignore stop */
|
|
const shouldWarn = (code) => !warned.has(code);
|
|
const TYPE = Symbol('type');
|
|
const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
|
|
/* c8 ignore start */
|
|
// This is a little bit ridiculous, tbh.
|
|
// The maximum array length is 2^32-1 or thereabouts on most JS impls.
|
|
// And well before that point, you're caching the entire world, I mean,
|
|
// that's ~32GB of just integers for the next/prev links, plus whatever
|
|
// else to hold that many keys and values. Just filling the memory with
|
|
// zeroes at init time is brutal when you get that big.
|
|
// But why not be complete?
|
|
// Maybe in the future, these limits will have expanded.
|
|
const getUintArray = (max) => !isPosInt(max)
|
|
? null
|
|
: max <= Math.pow(2, 8)
|
|
? Uint8Array
|
|
: max <= Math.pow(2, 16)
|
|
? Uint16Array
|
|
: max <= Math.pow(2, 32)
|
|
? Uint32Array
|
|
: max <= Number.MAX_SAFE_INTEGER
|
|
? ZeroArray
|
|
: null;
|
|
/* c8 ignore stop */
|
|
class ZeroArray extends Array {
|
|
constructor(size) {
|
|
super(size);
|
|
this.fill(0);
|
|
}
|
|
}
|
|
class Stack {
|
|
heap;
|
|
length;
|
|
// private constructor
|
|
static #constructing = false;
|
|
static create(max) {
|
|
const HeapCls = getUintArray(max);
|
|
if (!HeapCls)
|
|
return [];
|
|
Stack.#constructing = true;
|
|
const s = new Stack(max, HeapCls);
|
|
Stack.#constructing = false;
|
|
return s;
|
|
}
|
|
constructor(max, HeapCls) {
|
|
/* c8 ignore start */
|
|
if (!Stack.#constructing) {
|
|
throw new TypeError('instantiate Stack using Stack.create(n)');
|
|
}
|
|
/* c8 ignore stop */
|
|
this.heap = new HeapCls(max);
|
|
this.length = 0;
|
|
}
|
|
push(n) {
|
|
this.heap[this.length++] = n;
|
|
}
|
|
pop() {
|
|
return this.heap[--this.length];
|
|
}
|
|
}
|
|
/**
|
|
* Default export, the thing you're using this module to get.
|
|
*
|
|
* All properties from the options object (with the exception of
|
|
* {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
|
|
* normal public members. (`max` and `maxBase` are read-only getters.)
|
|
* Changing any of these will alter the defaults for subsequent method calls,
|
|
* but is otherwise safe.
|
|
*/
|
|
class LRUCache {
|
|
// properties coming in from the options of these, only max and maxSize
|
|
// really *need* to be protected. The rest can be modified, as they just
|
|
// set defaults for various methods.
|
|
#max;
|
|
#maxSize;
|
|
#dispose;
|
|
#disposeAfter;
|
|
#fetchMethod;
|
|
/**
|
|
* {@link LRUCache.OptionsBase.ttl}
|
|
*/
|
|
ttl;
|
|
/**
|
|
* {@link LRUCache.OptionsBase.ttlResolution}
|
|
*/
|
|
ttlResolution;
|
|
/**
|
|
* {@link LRUCache.OptionsBase.ttlAutopurge}
|
|
*/
|
|
ttlAutopurge;
|
|
/**
|
|
* {@link LRUCache.OptionsBase.updateAgeOnGet}
|
|
*/
|
|
updateAgeOnGet;
|
|
/**
|
|
* {@link LRUCache.OptionsBase.updateAgeOnHas}
|
|
*/
|
|
updateAgeOnHas;
|
|
/**
|
|
* {@link LRUCache.OptionsBase.allowStale}
|
|
*/
|
|
allowStale;
|
|
/**
|
|
* {@link LRUCache.OptionsBase.noDisposeOnSet}
|
|
*/
|
|
noDisposeOnSet;
|
|
/**
|
|
* {@link LRUCache.OptionsBase.noUpdateTTL}
|
|
*/
|
|
noUpdateTTL;
|
|
/**
|
|
* {@link LRUCache.OptionsBase.maxEntrySize}
|
|
*/
|
|
maxEntrySize;
|
|
/**
|
|
* {@link LRUCache.OptionsBase.sizeCalculation}
|
|
*/
|
|
sizeCalculation;
|
|
/**
|
|
* {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
|
|
*/
|
|
noDeleteOnFetchRejection;
|
|
/**
|
|
* {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
|
|
*/
|
|
noDeleteOnStaleGet;
|
|
/**
|
|
* {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
|
|
*/
|
|
allowStaleOnFetchAbort;
|
|
/**
|
|
* {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
|
|
*/
|
|
allowStaleOnFetchRejection;
|
|
/**
|
|
* {@link LRUCache.OptionsBase.ignoreFetchAbort}
|
|
*/
|
|
ignoreFetchAbort;
|
|
// computed properties
|
|
#size;
|
|
#calculatedSize;
|
|
#keyMap;
|
|
#keyList;
|
|
#valList;
|
|
#next;
|
|
#prev;
|
|
#head;
|
|
#tail;
|
|
#free;
|
|
#disposed;
|
|
#sizes;
|
|
#starts;
|
|
#ttls;
|
|
#hasDispose;
|
|
#hasFetchMethod;
|
|
#hasDisposeAfter;
|
|
/**
|
|
* Do not call this method unless you need to inspect the
|
|
* inner workings of the cache. If anything returned by this
|
|
* object is modified in any way, strange breakage may occur.
|
|
*
|
|
* These fields are private for a reason!
|
|
*
|
|
* @internal
|
|
*/
|
|
static unsafeExposeInternals(c) {
|
|
return {
|
|
// properties
|
|
starts: c.#starts,
|
|
ttls: c.#ttls,
|
|
sizes: c.#sizes,
|
|
keyMap: c.#keyMap,
|
|
keyList: c.#keyList,
|
|
valList: c.#valList,
|
|
next: c.#next,
|
|
prev: c.#prev,
|
|
get head() {
|
|
return c.#head;
|
|
},
|
|
get tail() {
|
|
return c.#tail;
|
|
},
|
|
free: c.#free,
|
|
// methods
|
|
isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
|
|
backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
|
|
moveToTail: (index) => c.#moveToTail(index),
|
|
indexes: (options) => c.#indexes(options),
|
|
rindexes: (options) => c.#rindexes(options),
|
|
isStale: (index) => c.#isStale(index),
|
|
};
|
|
}
|
|
// Protected read-only members
|
|
/**
|
|
* {@link LRUCache.OptionsBase.max} (read-only)
|
|
*/
|
|
get max() {
|
|
return this.#max;
|
|
}
|
|
/**
|
|
* {@link LRUCache.OptionsBase.maxSize} (read-only)
|
|
*/
|
|
get maxSize() {
|
|
return this.#maxSize;
|
|
}
|
|
/**
|
|
* The total computed size of items in the cache (read-only)
|
|
*/
|
|
get calculatedSize() {
|
|
return this.#calculatedSize;
|
|
}
|
|
/**
|
|
* The number of items stored in the cache (read-only)
|
|
*/
|
|
get size() {
|
|
return this.#size;
|
|
}
|
|
/**
|
|
* {@link LRUCache.OptionsBase.fetchMethod} (read-only)
|
|
*/
|
|
get fetchMethod() {
|
|
return this.#fetchMethod;
|
|
}
|
|
/**
|
|
* {@link LRUCache.OptionsBase.dispose} (read-only)
|
|
*/
|
|
get dispose() {
|
|
return this.#dispose;
|
|
}
|
|
/**
|
|
* {@link LRUCache.OptionsBase.disposeAfter} (read-only)
|
|
*/
|
|
get disposeAfter() {
|
|
return this.#disposeAfter;
|
|
}
|
|
constructor(options) {
|
|
const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
|
|
if (max !== 0 && !isPosInt(max)) {
|
|
throw new TypeError('max option must be a nonnegative integer');
|
|
}
|
|
const UintArray = max ? getUintArray(max) : Array;
|
|
if (!UintArray) {
|
|
throw new Error('invalid max value: ' + max);
|
|
}
|
|
this.#max = max;
|
|
this.#maxSize = maxSize;
|
|
this.maxEntrySize = maxEntrySize || this.#maxSize;
|
|
this.sizeCalculation = sizeCalculation;
|
|
if (this.sizeCalculation) {
|
|
if (!this.#maxSize && !this.maxEntrySize) {
|
|
throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
|
|
}
|
|
if (typeof this.sizeCalculation !== 'function') {
|
|
throw new TypeError('sizeCalculation set to non-function');
|
|
}
|
|
}
|
|
if (fetchMethod !== undefined &&
|
|
typeof fetchMethod !== 'function') {
|
|
throw new TypeError('fetchMethod must be a function if specified');
|
|
}
|
|
this.#fetchMethod = fetchMethod;
|
|
this.#hasFetchMethod = !!fetchMethod;
|
|
this.#keyMap = new Map();
|
|
this.#keyList = new Array(max).fill(undefined);
|
|
this.#valList = new Array(max).fill(undefined);
|
|
this.#next = new UintArray(max);
|
|
this.#prev = new UintArray(max);
|
|
this.#head = 0;
|
|
this.#tail = 0;
|
|
this.#free = Stack.create(max);
|
|
this.#size = 0;
|
|
this.#calculatedSize = 0;
|
|
if (typeof dispose === 'function') {
|
|
this.#dispose = dispose;
|
|
}
|
|
if (typeof disposeAfter === 'function') {
|
|
this.#disposeAfter = disposeAfter;
|
|
this.#disposed = [];
|
|
}
|
|
else {
|
|
this.#disposeAfter = undefined;
|
|
this.#disposed = undefined;
|
|
}
|
|
this.#hasDispose = !!this.#dispose;
|
|
this.#hasDisposeAfter = !!this.#disposeAfter;
|
|
this.noDisposeOnSet = !!noDisposeOnSet;
|
|
this.noUpdateTTL = !!noUpdateTTL;
|
|
this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
|
|
this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
|
|
this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
|
|
this.ignoreFetchAbort = !!ignoreFetchAbort;
|
|
// NB: maxEntrySize is set to maxSize if it's set
|
|
if (this.maxEntrySize !== 0) {
|
|
if (this.#maxSize !== 0) {
|
|
if (!isPosInt(this.#maxSize)) {
|
|
throw new TypeError('maxSize must be a positive integer if specified');
|
|
}
|
|
}
|
|
if (!isPosInt(this.maxEntrySize)) {
|
|
throw new TypeError('maxEntrySize must be a positive integer if specified');
|
|
}
|
|
this.#initializeSizeTracking();
|
|
}
|
|
this.allowStale = !!allowStale;
|
|
this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
|
|
this.updateAgeOnGet = !!updateAgeOnGet;
|
|
this.updateAgeOnHas = !!updateAgeOnHas;
|
|
this.ttlResolution =
|
|
isPosInt(ttlResolution) || ttlResolution === 0
|
|
? ttlResolution
|
|
: 1;
|
|
this.ttlAutopurge = !!ttlAutopurge;
|
|
this.ttl = ttl || 0;
|
|
if (this.ttl) {
|
|
if (!isPosInt(this.ttl)) {
|
|
throw new TypeError('ttl must be a positive integer if specified');
|
|
}
|
|
this.#initializeTTLTracking();
|
|
}
|
|
// do not allow completely unbounded caches
|
|
if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
|
|
throw new TypeError('At least one of max, maxSize, or ttl is required');
|
|
}
|
|
if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
|
|
const code = 'LRU_CACHE_UNBOUNDED';
|
|
if (shouldWarn(code)) {
|
|
warned.add(code);
|
|
const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
|
|
'result in unbounded memory consumption.';
|
|
emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Return the remaining TTL time for a given entry key
|
|
*/
|
|
getRemainingTTL(key) {
|
|
return this.#keyMap.has(key) ? Infinity : 0;
|
|
}
|
|
#initializeTTLTracking() {
|
|
const ttls = new ZeroArray(this.#max);
|
|
const starts = new ZeroArray(this.#max);
|
|
this.#ttls = ttls;
|
|
this.#starts = starts;
|
|
this.#setItemTTL = (index, ttl, start = perf.now()) => {
|
|
starts[index] = ttl !== 0 ? start : 0;
|
|
ttls[index] = ttl;
|
|
if (ttl !== 0 && this.ttlAutopurge) {
|
|
const t = setTimeout(() => {
|
|
if (this.#isStale(index)) {
|
|
this.delete(this.#keyList[index]);
|
|
}
|
|
}, ttl + 1);
|
|
// unref() not supported on all platforms
|
|
/* c8 ignore start */
|
|
if (t.unref) {
|
|
t.unref();
|
|
}
|
|
/* c8 ignore stop */
|
|
}
|
|
};
|
|
this.#updateItemAge = index => {
|
|
starts[index] = ttls[index] !== 0 ? perf.now() : 0;
|
|
};
|
|
this.#statusTTL = (status, index) => {
|
|
if (ttls[index]) {
|
|
const ttl = ttls[index];
|
|
const start = starts[index];
|
|
status.ttl = ttl;
|
|
status.start = start;
|
|
status.now = cachedNow || getNow();
|
|
const age = status.now - start;
|
|
status.remainingTTL = ttl - age;
|
|
}
|
|
};
|
|
// debounce calls to perf.now() to 1s so we're not hitting
|
|
// that costly call repeatedly.
|
|
let cachedNow = 0;
|
|
const getNow = () => {
|
|
const n = perf.now();
|
|
if (this.ttlResolution > 0) {
|
|
cachedNow = n;
|
|
const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
|
|
// not available on all platforms
|
|
/* c8 ignore start */
|
|
if (t.unref) {
|
|
t.unref();
|
|
}
|
|
/* c8 ignore stop */
|
|
}
|
|
return n;
|
|
};
|
|
this.getRemainingTTL = key => {
|
|
const index = this.#keyMap.get(key);
|
|
if (index === undefined) {
|
|
return 0;
|
|
}
|
|
const ttl = ttls[index];
|
|
const start = starts[index];
|
|
if (ttl === 0 || start === 0) {
|
|
return Infinity;
|
|
}
|
|
const age = (cachedNow || getNow()) - start;
|
|
return ttl - age;
|
|
};
|
|
this.#isStale = index => {
|
|
return (ttls[index] !== 0 &&
|
|
starts[index] !== 0 &&
|
|
(cachedNow || getNow()) - starts[index] > ttls[index]);
|
|
};
|
|
}
|
|
// conditionally set private methods related to TTL
|
|
#updateItemAge = () => { };
|
|
#statusTTL = () => { };
|
|
#setItemTTL = () => { };
|
|
/* c8 ignore stop */
|
|
#isStale = () => false;
|
|
#initializeSizeTracking() {
|
|
const sizes = new ZeroArray(this.#max);
|
|
this.#calculatedSize = 0;
|
|
this.#sizes = sizes;
|
|
this.#removeItemSize = index => {
|
|
this.#calculatedSize -= sizes[index];
|
|
sizes[index] = 0;
|
|
};
|
|
this.#requireSize = (k, v, size, sizeCalculation) => {
|
|
// provisionally accept background fetches.
|
|
// actual value size will be checked when they return.
|
|
if (this.#isBackgroundFetch(v)) {
|
|
return 0;
|
|
}
|
|
if (!isPosInt(size)) {
|
|
if (sizeCalculation) {
|
|
if (typeof sizeCalculation !== 'function') {
|
|
throw new TypeError('sizeCalculation must be a function');
|
|
}
|
|
size = sizeCalculation(v, k);
|
|
if (!isPosInt(size)) {
|
|
throw new TypeError('sizeCalculation return invalid (expect positive integer)');
|
|
}
|
|
}
|
|
else {
|
|
throw new TypeError('invalid size value (must be positive integer). ' +
|
|
'When maxSize or maxEntrySize is used, sizeCalculation ' +
|
|
'or size must be set.');
|
|
}
|
|
}
|
|
return size;
|
|
};
|
|
this.#addItemSize = (index, size, status) => {
|
|
sizes[index] = size;
|
|
if (this.#maxSize) {
|
|
const maxSize = this.#maxSize - sizes[index];
|
|
while (this.#calculatedSize > maxSize) {
|
|
this.#evict(true);
|
|
}
|
|
}
|
|
this.#calculatedSize += sizes[index];
|
|
if (status) {
|
|
status.entrySize = size;
|
|
status.totalCalculatedSize = this.#calculatedSize;
|
|
}
|
|
};
|
|
}
|
|
#removeItemSize = _i => { };
|
|
#addItemSize = (_i, _s, _st) => { };
|
|
#requireSize = (_k, _v, size, sizeCalculation) => {
|
|
if (size || sizeCalculation) {
|
|
throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
|
|
}
|
|
return 0;
|
|
};
|
|
*#indexes({ allowStale = this.allowStale } = {}) {
|
|
if (this.#size) {
|
|
for (let i = this.#tail; true;) {
|
|
if (!this.#isValidIndex(i)) {
|
|
break;
|
|
}
|
|
if (allowStale || !this.#isStale(i)) {
|
|
yield i;
|
|
}
|
|
if (i === this.#head) {
|
|
break;
|
|
}
|
|
else {
|
|
i = this.#prev[i];
|
|
}
|
|
}
|
|
}
|
|
}
|
|
*#rindexes({ allowStale = this.allowStale } = {}) {
|
|
if (this.#size) {
|
|
for (let i = this.#head; true;) {
|
|
if (!this.#isValidIndex(i)) {
|
|
break;
|
|
}
|
|
if (allowStale || !this.#isStale(i)) {
|
|
yield i;
|
|
}
|
|
if (i === this.#tail) {
|
|
break;
|
|
}
|
|
else {
|
|
i = this.#next[i];
|
|
}
|
|
}
|
|
}
|
|
}
|
|
#isValidIndex(index) {
|
|
return (index !== undefined &&
|
|
this.#keyMap.get(this.#keyList[index]) === index);
|
|
}
|
|
/**
|
|
* Return a generator yielding `[key, value]` pairs,
|
|
* in order from most recently used to least recently used.
|
|
*/
|
|
*entries() {
|
|
for (const i of this.#indexes()) {
|
|
if (this.#valList[i] !== undefined &&
|
|
this.#keyList[i] !== undefined &&
|
|
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
yield [this.#keyList[i], this.#valList[i]];
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Inverse order version of {@link LRUCache.entries}
|
|
*
|
|
* Return a generator yielding `[key, value]` pairs,
|
|
* in order from least recently used to most recently used.
|
|
*/
|
|
*rentries() {
|
|
for (const i of this.#rindexes()) {
|
|
if (this.#valList[i] !== undefined &&
|
|
this.#keyList[i] !== undefined &&
|
|
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
yield [this.#keyList[i], this.#valList[i]];
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Return a generator yielding the keys in the cache,
|
|
* in order from most recently used to least recently used.
|
|
*/
|
|
*keys() {
|
|
for (const i of this.#indexes()) {
|
|
const k = this.#keyList[i];
|
|
if (k !== undefined &&
|
|
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
yield k;
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Inverse order version of {@link LRUCache.keys}
|
|
*
|
|
* Return a generator yielding the keys in the cache,
|
|
* in order from least recently used to most recently used.
|
|
*/
|
|
*rkeys() {
|
|
for (const i of this.#rindexes()) {
|
|
const k = this.#keyList[i];
|
|
if (k !== undefined &&
|
|
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
yield k;
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Return a generator yielding the values in the cache,
|
|
* in order from most recently used to least recently used.
|
|
*/
|
|
*values() {
|
|
for (const i of this.#indexes()) {
|
|
const v = this.#valList[i];
|
|
if (v !== undefined &&
|
|
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
yield this.#valList[i];
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Inverse order version of {@link LRUCache.values}
|
|
*
|
|
* Return a generator yielding the values in the cache,
|
|
* in order from least recently used to most recently used.
|
|
*/
|
|
*rvalues() {
|
|
for (const i of this.#rindexes()) {
|
|
const v = this.#valList[i];
|
|
if (v !== undefined &&
|
|
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
yield this.#valList[i];
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Iterating over the cache itself yields the same results as
|
|
* {@link LRUCache.entries}
|
|
*/
|
|
[Symbol.iterator]() {
|
|
return this.entries();
|
|
}
|
|
/**
|
|
* Find a value for which the supplied fn method returns a truthy value,
|
|
* similar to Array.find(). fn is called as fn(value, key, cache).
|
|
*/
|
|
find(fn, getOptions = {}) {
|
|
for (const i of this.#indexes()) {
|
|
const v = this.#valList[i];
|
|
const value = this.#isBackgroundFetch(v)
|
|
? v.__staleWhileFetching
|
|
: v;
|
|
if (value === undefined)
|
|
continue;
|
|
if (fn(value, this.#keyList[i], this)) {
|
|
return this.get(this.#keyList[i], getOptions);
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Call the supplied function on each item in the cache, in order from
|
|
* most recently used to least recently used. fn is called as
|
|
* fn(value, key, cache). Does not update age or recenty of use.
|
|
* Does not iterate over stale values.
|
|
*/
|
|
forEach(fn, thisp = this) {
|
|
for (const i of this.#indexes()) {
|
|
const v = this.#valList[i];
|
|
const value = this.#isBackgroundFetch(v)
|
|
? v.__staleWhileFetching
|
|
: v;
|
|
if (value === undefined)
|
|
continue;
|
|
fn.call(thisp, value, this.#keyList[i], this);
|
|
}
|
|
}
|
|
/**
|
|
* The same as {@link LRUCache.forEach} but items are iterated over in
|
|
* reverse order. (ie, less recently used items are iterated over first.)
|
|
*/
|
|
rforEach(fn, thisp = this) {
|
|
for (const i of this.#rindexes()) {
|
|
const v = this.#valList[i];
|
|
const value = this.#isBackgroundFetch(v)
|
|
? v.__staleWhileFetching
|
|
: v;
|
|
if (value === undefined)
|
|
continue;
|
|
fn.call(thisp, value, this.#keyList[i], this);
|
|
}
|
|
}
|
|
/**
|
|
* Delete any stale entries. Returns true if anything was removed,
|
|
* false otherwise.
|
|
*/
|
|
purgeStale() {
|
|
let deleted = false;
|
|
for (const i of this.#rindexes({ allowStale: true })) {
|
|
if (this.#isStale(i)) {
|
|
this.delete(this.#keyList[i]);
|
|
deleted = true;
|
|
}
|
|
}
|
|
return deleted;
|
|
}
|
|
/**
|
|
* Return an array of [key, {@link LRUCache.Entry}] tuples which can be
|
|
* passed to cache.load()
|
|
*/
|
|
dump() {
|
|
const arr = [];
|
|
for (const i of this.#indexes({ allowStale: true })) {
|
|
const key = this.#keyList[i];
|
|
const v = this.#valList[i];
|
|
const value = this.#isBackgroundFetch(v)
|
|
? v.__staleWhileFetching
|
|
: v;
|
|
if (value === undefined || key === undefined)
|
|
continue;
|
|
const entry = { value };
|
|
if (this.#ttls && this.#starts) {
|
|
entry.ttl = this.#ttls[i];
|
|
// always dump the start relative to a portable timestamp
|
|
// it's ok for this to be a bit slow, it's a rare operation.
|
|
const age = perf.now() - this.#starts[i];
|
|
entry.start = Math.floor(Date.now() - age);
|
|
}
|
|
if (this.#sizes) {
|
|
entry.size = this.#sizes[i];
|
|
}
|
|
arr.unshift([key, entry]);
|
|
}
|
|
return arr;
|
|
}
|
|
/**
|
|
* Reset the cache and load in the items in entries in the order listed.
|
|
* Note that the shape of the resulting cache may be different if the
|
|
* same options are not used in both caches.
|
|
*/
|
|
load(arr) {
|
|
this.clear();
|
|
for (const [key, entry] of arr) {
|
|
if (entry.start) {
|
|
// entry.start is a portable timestamp, but we may be using
|
|
// node's performance.now(), so calculate the offset, so that
|
|
// we get the intended remaining TTL, no matter how long it's
|
|
// been on ice.
|
|
//
|
|
// it's ok for this to be a bit slow, it's a rare operation.
|
|
const age = Date.now() - entry.start;
|
|
entry.start = perf.now() - age;
|
|
}
|
|
this.set(key, entry.value, entry);
|
|
}
|
|
}
|
|
/**
|
|
* Add a value to the cache.
|
|
*
|
|
* Note: if `undefined` is specified as a value, this is an alias for
|
|
* {@link LRUCache#delete}
|
|
*/
|
|
set(k, v, setOptions = {}) {
|
|
if (v === undefined) {
|
|
this.delete(k);
|
|
return this;
|
|
}
|
|
const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
|
|
let { noUpdateTTL = this.noUpdateTTL } = setOptions;
|
|
const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
|
|
// if the item doesn't fit, don't do anything
|
|
// NB: maxEntrySize set to maxSize by default
|
|
if (this.maxEntrySize && size > this.maxEntrySize) {
|
|
if (status) {
|
|
status.set = 'miss';
|
|
status.maxEntrySizeExceeded = true;
|
|
}
|
|
// have to delete, in case something is there already.
|
|
this.delete(k);
|
|
return this;
|
|
}
|
|
let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
|
|
if (index === undefined) {
|
|
// addition
|
|
index = (this.#size === 0
|
|
? this.#tail
|
|
: this.#free.length !== 0
|
|
? this.#free.pop()
|
|
: this.#size === this.#max
|
|
? this.#evict(false)
|
|
: this.#size);
|
|
this.#keyList[index] = k;
|
|
this.#valList[index] = v;
|
|
this.#keyMap.set(k, index);
|
|
this.#next[this.#tail] = index;
|
|
this.#prev[index] = this.#tail;
|
|
this.#tail = index;
|
|
this.#size++;
|
|
this.#addItemSize(index, size, status);
|
|
if (status)
|
|
status.set = 'add';
|
|
noUpdateTTL = false;
|
|
}
|
|
else {
|
|
// update
|
|
this.#moveToTail(index);
|
|
const oldVal = this.#valList[index];
|
|
if (v !== oldVal) {
|
|
if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
|
|
oldVal.__abortController.abort(new Error('replaced'));
|
|
}
|
|
else if (!noDisposeOnSet) {
|
|
if (this.#hasDispose) {
|
|
this.#dispose?.(oldVal, k, 'set');
|
|
}
|
|
if (this.#hasDisposeAfter) {
|
|
this.#disposed?.push([oldVal, k, 'set']);
|
|
}
|
|
}
|
|
this.#removeItemSize(index);
|
|
this.#addItemSize(index, size, status);
|
|
this.#valList[index] = v;
|
|
if (status) {
|
|
status.set = 'replace';
|
|
const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
|
|
? oldVal.__staleWhileFetching
|
|
: oldVal;
|
|
if (oldValue !== undefined)
|
|
status.oldValue = oldValue;
|
|
}
|
|
}
|
|
else if (status) {
|
|
status.set = 'update';
|
|
}
|
|
}
|
|
if (ttl !== 0 && !this.#ttls) {
|
|
this.#initializeTTLTracking();
|
|
}
|
|
if (this.#ttls) {
|
|
if (!noUpdateTTL) {
|
|
this.#setItemTTL(index, ttl, start);
|
|
}
|
|
if (status)
|
|
this.#statusTTL(status, index);
|
|
}
|
|
if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
|
|
const dt = this.#disposed;
|
|
let task;
|
|
while ((task = dt?.shift())) {
|
|
this.#disposeAfter?.(...task);
|
|
}
|
|
}
|
|
return this;
|
|
}
|
|
/**
|
|
* Evict the least recently used item, returning its value or
|
|
* `undefined` if cache is empty.
|
|
*/
|
|
pop() {
|
|
try {
|
|
while (this.#size) {
|
|
const val = this.#valList[this.#head];
|
|
this.#evict(true);
|
|
if (this.#isBackgroundFetch(val)) {
|
|
if (val.__staleWhileFetching) {
|
|
return val.__staleWhileFetching;
|
|
}
|
|
}
|
|
else if (val !== undefined) {
|
|
return val;
|
|
}
|
|
}
|
|
}
|
|
finally {
|
|
if (this.#hasDisposeAfter && this.#disposed) {
|
|
const dt = this.#disposed;
|
|
let task;
|
|
while ((task = dt?.shift())) {
|
|
this.#disposeAfter?.(...task);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
#evict(free) {
|
|
const head = this.#head;
|
|
const k = this.#keyList[head];
|
|
const v = this.#valList[head];
|
|
if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
|
|
v.__abortController.abort(new Error('evicted'));
|
|
}
|
|
else if (this.#hasDispose || this.#hasDisposeAfter) {
|
|
if (this.#hasDispose) {
|
|
this.#dispose?.(v, k, 'evict');
|
|
}
|
|
if (this.#hasDisposeAfter) {
|
|
this.#disposed?.push([v, k, 'evict']);
|
|
}
|
|
}
|
|
this.#removeItemSize(head);
|
|
// if we aren't about to use the index, then null these out
|
|
if (free) {
|
|
this.#keyList[head] = undefined;
|
|
this.#valList[head] = undefined;
|
|
this.#free.push(head);
|
|
}
|
|
if (this.#size === 1) {
|
|
this.#head = this.#tail = 0;
|
|
this.#free.length = 0;
|
|
}
|
|
else {
|
|
this.#head = this.#next[head];
|
|
}
|
|
this.#keyMap.delete(k);
|
|
this.#size--;
|
|
return head;
|
|
}
|
|
/**
|
|
* Check if a key is in the cache, without updating the recency of use.
|
|
* Will return false if the item is stale, even though it is technically
|
|
* in the cache.
|
|
*
|
|
* Will not update item age unless
|
|
* {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
|
|
*/
|
|
has(k, hasOptions = {}) {
|
|
const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
|
|
const index = this.#keyMap.get(k);
|
|
if (index !== undefined) {
|
|
const v = this.#valList[index];
|
|
if (this.#isBackgroundFetch(v) &&
|
|
v.__staleWhileFetching === undefined) {
|
|
return false;
|
|
}
|
|
if (!this.#isStale(index)) {
|
|
if (updateAgeOnHas) {
|
|
this.#updateItemAge(index);
|
|
}
|
|
if (status) {
|
|
status.has = 'hit';
|
|
this.#statusTTL(status, index);
|
|
}
|
|
return true;
|
|
}
|
|
else if (status) {
|
|
status.has = 'stale';
|
|
this.#statusTTL(status, index);
|
|
}
|
|
}
|
|
else if (status) {
|
|
status.has = 'miss';
|
|
}
|
|
return false;
|
|
}
|
|
/**
|
|
* Like {@link LRUCache#get} but doesn't update recency or delete stale
|
|
* items.
|
|
*
|
|
* Returns `undefined` if the item is stale, unless
|
|
* {@link LRUCache.OptionsBase.allowStale} is set.
|
|
*/
|
|
peek(k, peekOptions = {}) {
|
|
const { allowStale = this.allowStale } = peekOptions;
|
|
const index = this.#keyMap.get(k);
|
|
if (index !== undefined &&
|
|
(allowStale || !this.#isStale(index))) {
|
|
const v = this.#valList[index];
|
|
// either stale and allowed, or forcing a refresh of non-stale value
|
|
return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
|
|
}
|
|
}
|
|
#backgroundFetch(k, index, options, context) {
|
|
const v = index === undefined ? undefined : this.#valList[index];
|
|
if (this.#isBackgroundFetch(v)) {
|
|
return v;
|
|
}
|
|
const ac = new AC();
|
|
const { signal } = options;
|
|
// when/if our AC signals, then stop listening to theirs.
|
|
signal?.addEventListener('abort', () => ac.abort(signal.reason), {
|
|
signal: ac.signal,
|
|
});
|
|
const fetchOpts = {
|
|
signal: ac.signal,
|
|
options,
|
|
context,
|
|
};
|
|
const cb = (v, updateCache = false) => {
|
|
const { aborted } = ac.signal;
|
|
const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
|
|
if (options.status) {
|
|
if (aborted && !updateCache) {
|
|
options.status.fetchAborted = true;
|
|
options.status.fetchError = ac.signal.reason;
|
|
if (ignoreAbort)
|
|
options.status.fetchAbortIgnored = true;
|
|
}
|
|
else {
|
|
options.status.fetchResolved = true;
|
|
}
|
|
}
|
|
if (aborted && !ignoreAbort && !updateCache) {
|
|
return fetchFail(ac.signal.reason);
|
|
}
|
|
// either we didn't abort, and are still here, or we did, and ignored
|
|
const bf = p;
|
|
if (this.#valList[index] === p) {
|
|
if (v === undefined) {
|
|
if (bf.__staleWhileFetching) {
|
|
this.#valList[index] = bf.__staleWhileFetching;
|
|
}
|
|
else {
|
|
this.delete(k);
|
|
}
|
|
}
|
|
else {
|
|
if (options.status)
|
|
options.status.fetchUpdated = true;
|
|
this.set(k, v, fetchOpts.options);
|
|
}
|
|
}
|
|
return v;
|
|
};
|
|
const eb = (er) => {
|
|
if (options.status) {
|
|
options.status.fetchRejected = true;
|
|
options.status.fetchError = er;
|
|
}
|
|
return fetchFail(er);
|
|
};
|
|
const fetchFail = (er) => {
|
|
const { aborted } = ac.signal;
|
|
const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
|
|
const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
|
|
const noDelete = allowStale || options.noDeleteOnFetchRejection;
|
|
const bf = p;
|
|
if (this.#valList[index] === p) {
|
|
// if we allow stale on fetch rejections, then we need to ensure that
|
|
// the stale value is not removed from the cache when the fetch fails.
|
|
const del = !noDelete || bf.__staleWhileFetching === undefined;
|
|
if (del) {
|
|
this.delete(k);
|
|
}
|
|
else if (!allowStaleAborted) {
|
|
// still replace the *promise* with the stale value,
|
|
// since we are done with the promise at this point.
|
|
// leave it untouched if we're still waiting for an
|
|
// aborted background fetch that hasn't yet returned.
|
|
this.#valList[index] = bf.__staleWhileFetching;
|
|
}
|
|
}
|
|
if (allowStale) {
|
|
if (options.status && bf.__staleWhileFetching !== undefined) {
|
|
options.status.returnedStale = true;
|
|
}
|
|
return bf.__staleWhileFetching;
|
|
}
|
|
else if (bf.__returned === bf) {
|
|
throw er;
|
|
}
|
|
};
|
|
const pcall = (res, rej) => {
|
|
const fmp = this.#fetchMethod?.(k, v, fetchOpts);
|
|
if (fmp && fmp instanceof Promise) {
|
|
fmp.then(v => res(v === undefined ? undefined : v), rej);
|
|
}
|
|
// ignored, we go until we finish, regardless.
|
|
// defer check until we are actually aborting,
|
|
// so fetchMethod can override.
|
|
ac.signal.addEventListener('abort', () => {
|
|
if (!options.ignoreFetchAbort ||
|
|
options.allowStaleOnFetchAbort) {
|
|
res(undefined);
|
|
// when it eventually resolves, update the cache.
|
|
if (options.allowStaleOnFetchAbort) {
|
|
res = v => cb(v, true);
|
|
}
|
|
}
|
|
});
|
|
};
|
|
if (options.status)
|
|
options.status.fetchDispatched = true;
|
|
const p = new Promise(pcall).then(cb, eb);
|
|
const bf = Object.assign(p, {
|
|
__abortController: ac,
|
|
__staleWhileFetching: v,
|
|
__returned: undefined,
|
|
});
|
|
if (index === undefined) {
|
|
// internal, don't expose status.
|
|
this.set(k, bf, { ...fetchOpts.options, status: undefined });
|
|
index = this.#keyMap.get(k);
|
|
}
|
|
else {
|
|
this.#valList[index] = bf;
|
|
}
|
|
return bf;
|
|
}
|
|
#isBackgroundFetch(p) {
|
|
if (!this.#hasFetchMethod)
|
|
return false;
|
|
const b = p;
|
|
return (!!b &&
|
|
b instanceof Promise &&
|
|
b.hasOwnProperty('__staleWhileFetching') &&
|
|
b.__abortController instanceof AC);
|
|
}
|
|
async fetch(k, fetchOptions = {}) {
|
|
const {
|
|
// get options
|
|
allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet,
|
|
// set options
|
|
ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL,
|
|
// fetch exclusive options
|
|
noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
|
|
if (!this.#hasFetchMethod) {
|
|
if (status)
|
|
status.fetch = 'get';
|
|
return this.get(k, {
|
|
allowStale,
|
|
updateAgeOnGet,
|
|
noDeleteOnStaleGet,
|
|
status,
|
|
});
|
|
}
|
|
const options = {
|
|
allowStale,
|
|
updateAgeOnGet,
|
|
noDeleteOnStaleGet,
|
|
ttl,
|
|
noDisposeOnSet,
|
|
size,
|
|
sizeCalculation,
|
|
noUpdateTTL,
|
|
noDeleteOnFetchRejection,
|
|
allowStaleOnFetchRejection,
|
|
allowStaleOnFetchAbort,
|
|
ignoreFetchAbort,
|
|
status,
|
|
signal,
|
|
};
|
|
let index = this.#keyMap.get(k);
|
|
if (index === undefined) {
|
|
if (status)
|
|
status.fetch = 'miss';
|
|
const p = this.#backgroundFetch(k, index, options, context);
|
|
return (p.__returned = p);
|
|
}
|
|
else {
|
|
// in cache, maybe already fetching
|
|
const v = this.#valList[index];
|
|
if (this.#isBackgroundFetch(v)) {
|
|
const stale = allowStale && v.__staleWhileFetching !== undefined;
|
|
if (status) {
|
|
status.fetch = 'inflight';
|
|
if (stale)
|
|
status.returnedStale = true;
|
|
}
|
|
return stale ? v.__staleWhileFetching : (v.__returned = v);
|
|
}
|
|
// if we force a refresh, that means do NOT serve the cached value,
|
|
// unless we are already in the process of refreshing the cache.
|
|
const isStale = this.#isStale(index);
|
|
if (!forceRefresh && !isStale) {
|
|
if (status)
|
|
status.fetch = 'hit';
|
|
this.#moveToTail(index);
|
|
if (updateAgeOnGet) {
|
|
this.#updateItemAge(index);
|
|
}
|
|
if (status)
|
|
this.#statusTTL(status, index);
|
|
return v;
|
|
}
|
|
// ok, it is stale or a forced refresh, and not already fetching.
|
|
// refresh the cache.
|
|
const p = this.#backgroundFetch(k, index, options, context);
|
|
const hasStale = p.__staleWhileFetching !== undefined;
|
|
const staleVal = hasStale && allowStale;
|
|
if (status) {
|
|
status.fetch = isStale ? 'stale' : 'refresh';
|
|
if (staleVal && isStale)
|
|
status.returnedStale = true;
|
|
}
|
|
return staleVal ? p.__staleWhileFetching : (p.__returned = p);
|
|
}
|
|
}
|
|
/**
|
|
* Return a value from the cache. Will update the recency of the cache
|
|
* entry found.
|
|
*
|
|
* If the key is not found, get() will return `undefined`.
|
|
*/
|
|
get(k, getOptions = {}) {
|
|
const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
|
|
const index = this.#keyMap.get(k);
|
|
if (index !== undefined) {
|
|
const value = this.#valList[index];
|
|
const fetching = this.#isBackgroundFetch(value);
|
|
if (status)
|
|
this.#statusTTL(status, index);
|
|
if (this.#isStale(index)) {
|
|
if (status)
|
|
status.get = 'stale';
|
|
// delete only if not an in-flight background fetch
|
|
if (!fetching) {
|
|
if (!noDeleteOnStaleGet) {
|
|
this.delete(k);
|
|
}
|
|
if (status && allowStale)
|
|
status.returnedStale = true;
|
|
return allowStale ? value : undefined;
|
|
}
|
|
else {
|
|
if (status &&
|
|
allowStale &&
|
|
value.__staleWhileFetching !== undefined) {
|
|
status.returnedStale = true;
|
|
}
|
|
return allowStale ? value.__staleWhileFetching : undefined;
|
|
}
|
|
}
|
|
else {
|
|
if (status)
|
|
status.get = 'hit';
|
|
// if we're currently fetching it, we don't actually have it yet
|
|
// it's not stale, which means this isn't a staleWhileRefetching.
|
|
// If it's not stale, and fetching, AND has a __staleWhileFetching
|
|
// value, then that means the user fetched with {forceRefresh:true},
|
|
// so it's safe to return that value.
|
|
if (fetching) {
|
|
return value.__staleWhileFetching;
|
|
}
|
|
this.#moveToTail(index);
|
|
if (updateAgeOnGet) {
|
|
this.#updateItemAge(index);
|
|
}
|
|
return value;
|
|
}
|
|
}
|
|
else if (status) {
|
|
status.get = 'miss';
|
|
}
|
|
}
|
|
#connect(p, n) {
|
|
this.#prev[n] = p;
|
|
this.#next[p] = n;
|
|
}
|
|
#moveToTail(index) {
|
|
// if tail already, nothing to do
|
|
// if head, move head to next[index]
|
|
// else
|
|
// move next[prev[index]] to next[index] (head has no prev)
|
|
// move prev[next[index]] to prev[index]
|
|
// prev[index] = tail
|
|
// next[tail] = index
|
|
// tail = index
|
|
if (index !== this.#tail) {
|
|
if (index === this.#head) {
|
|
this.#head = this.#next[index];
|
|
}
|
|
else {
|
|
this.#connect(this.#prev[index], this.#next[index]);
|
|
}
|
|
this.#connect(this.#tail, index);
|
|
this.#tail = index;
|
|
}
|
|
}
|
|
/**
|
|
* Deletes a key out of the cache.
|
|
* Returns true if the key was deleted, false otherwise.
|
|
*/
|
|
delete(k) {
|
|
let deleted = false;
|
|
if (this.#size !== 0) {
|
|
const index = this.#keyMap.get(k);
|
|
if (index !== undefined) {
|
|
deleted = true;
|
|
if (this.#size === 1) {
|
|
this.clear();
|
|
}
|
|
else {
|
|
this.#removeItemSize(index);
|
|
const v = this.#valList[index];
|
|
if (this.#isBackgroundFetch(v)) {
|
|
v.__abortController.abort(new Error('deleted'));
|
|
}
|
|
else if (this.#hasDispose || this.#hasDisposeAfter) {
|
|
if (this.#hasDispose) {
|
|
this.#dispose?.(v, k, 'delete');
|
|
}
|
|
if (this.#hasDisposeAfter) {
|
|
this.#disposed?.push([v, k, 'delete']);
|
|
}
|
|
}
|
|
this.#keyMap.delete(k);
|
|
this.#keyList[index] = undefined;
|
|
this.#valList[index] = undefined;
|
|
if (index === this.#tail) {
|
|
this.#tail = this.#prev[index];
|
|
}
|
|
else if (index === this.#head) {
|
|
this.#head = this.#next[index];
|
|
}
|
|
else {
|
|
this.#next[this.#prev[index]] = this.#next[index];
|
|
this.#prev[this.#next[index]] = this.#prev[index];
|
|
}
|
|
this.#size--;
|
|
this.#free.push(index);
|
|
}
|
|
}
|
|
}
|
|
if (this.#hasDisposeAfter && this.#disposed?.length) {
|
|
const dt = this.#disposed;
|
|
let task;
|
|
while ((task = dt?.shift())) {
|
|
this.#disposeAfter?.(...task);
|
|
}
|
|
}
|
|
return deleted;
|
|
}
|
|
/**
|
|
* Clear the cache entirely, throwing away all values.
|
|
*/
|
|
clear() {
|
|
for (const index of this.#rindexes({ allowStale: true })) {
|
|
const v = this.#valList[index];
|
|
if (this.#isBackgroundFetch(v)) {
|
|
v.__abortController.abort(new Error('deleted'));
|
|
}
|
|
else {
|
|
const k = this.#keyList[index];
|
|
if (this.#hasDispose) {
|
|
this.#dispose?.(v, k, 'delete');
|
|
}
|
|
if (this.#hasDisposeAfter) {
|
|
this.#disposed?.push([v, k, 'delete']);
|
|
}
|
|
}
|
|
}
|
|
this.#keyMap.clear();
|
|
this.#valList.fill(undefined);
|
|
this.#keyList.fill(undefined);
|
|
if (this.#ttls && this.#starts) {
|
|
this.#ttls.fill(0);
|
|
this.#starts.fill(0);
|
|
}
|
|
if (this.#sizes) {
|
|
this.#sizes.fill(0);
|
|
}
|
|
this.#head = 0;
|
|
this.#tail = 0;
|
|
this.#free.length = 0;
|
|
this.#calculatedSize = 0;
|
|
this.#size = 0;
|
|
if (this.#hasDisposeAfter && this.#disposed) {
|
|
const dt = this.#disposed;
|
|
let task;
|
|
while ((task = dt?.shift())) {
|
|
this.#disposeAfter?.(...task);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
exports.LRUCache = LRUCache;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
/***/ })
|
|
|
|
/******/ });
|
|
/************************************************************************/
|
|
/******/ // The module cache
|
|
/******/ var __webpack_module_cache__ = {};
|
|
/******/
|
|
/******/ // The require function
|
|
/******/ function __nccwpck_require__(moduleId) {
|
|
/******/ // Check if module is in cache
|
|
/******/ var cachedModule = __webpack_module_cache__[moduleId];
|
|
/******/ if (cachedModule !== undefined) {
|
|
/******/ return cachedModule.exports;
|
|
/******/ }
|
|
/******/ // Create a new module (and put it into the cache)
|
|
/******/ var module = __webpack_module_cache__[moduleId] = {
|
|
/******/ // no module.id needed
|
|
/******/ // no module.loaded needed
|
|
/******/ exports: {}
|
|
/******/ };
|
|
/******/
|
|
/******/ // Execute the module function
|
|
/******/ var threw = true;
|
|
/******/ try {
|
|
/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__);
|
|
/******/ threw = false;
|
|
/******/ } finally {
|
|
/******/ if(threw) delete __webpack_module_cache__[moduleId];
|
|
/******/ }
|
|
/******/
|
|
/******/ // Return the exports of the module
|
|
/******/ return module.exports;
|
|
/******/ }
|
|
/******/
|
|
/************************************************************************/
|
|
/******/ /* webpack/runtime/compat */
|
|
/******/
|
|
/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/";
|
|
/******/
|
|
/************************************************************************/
|
|
/******/
|
|
/******/ // startup
|
|
/******/ // Load entry module and return exports
|
|
/******/ // This entry module is referenced by other modules so it can't be inlined
|
|
/******/ var __webpack_exports__ = __nccwpck_require__(3109);
|
|
/******/ module.exports = __webpack_exports__;
|
|
/******/
|
|
/******/ })()
|
|
;
|
|
//# sourceMappingURL=index.js.map
|