10860 lines
353 KiB
JavaScript
10860 lines
353 KiB
JavaScript
module.exports =
|
|
/******/ (() => { // webpackBootstrap
|
|
/******/ var __webpack_modules__ = ({
|
|
|
|
/***/ 5496:
|
|
/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
const core = __nccwpck_require__(2186);
|
|
const github = __nccwpck_require__(5438);
|
|
const AdmZip = __nccwpck_require__(6761);
|
|
const filesize = __nccwpck_require__(5060);
|
|
const pathname = __nccwpck_require__(5622);
|
|
const fs = __nccwpck_require__(5747);
|
|
const { throttling } = __nccwpck_require__(9968);
|
|
const { GitHub } = __nccwpck_require__(3030);
|
|
|
|
async function getGoodArtifacts(client, owner, repo, name) {
|
|
const goodRepoArtifacts = await client.paginate(
|
|
"GET /repos/{owner}/{repo}/actions/artifacts",
|
|
{
|
|
owner: owner,
|
|
repo: repo,
|
|
per_page: 100,
|
|
},
|
|
(repoArtifacts, done) => {
|
|
// console.log(" ==> repoArtifacts", repoArtifacts);
|
|
const goodArtifacts = repoArtifacts.data.filter((a) => {
|
|
// console.log("==> Artifact check", a);
|
|
return a.name == name
|
|
});
|
|
if (goodArtifacts.length > 0) {
|
|
done();
|
|
}
|
|
return goodArtifacts;
|
|
}
|
|
);
|
|
|
|
console.log("==> maybe goodRepoArtifacts:", goodRepoArtifacts);
|
|
return goodRepoArtifacts;
|
|
}
|
|
|
|
async function main() {
|
|
const token = core.getInput("github_token", { required: true });
|
|
const [owner, repo] = core.getInput("repo", { required: true }).split("/");
|
|
const path = core.getInput("path", { required: true });
|
|
const name = core.getInput("name");
|
|
const download = core.getInput("download");
|
|
const OctokitWithThrottling = GitHub.plugin(throttling);
|
|
const client = new OctokitWithThrottling({
|
|
auth: token,
|
|
throttle: {
|
|
onRateLimit: (retryAfter, options) => {
|
|
console.log(
|
|
`Request quota exhausted for request ${options.method} ${options.url}`
|
|
);
|
|
|
|
// Retry twice after hitting a rate limit error, then give up
|
|
if (options.request.retryCount <= 2) {
|
|
console.log(`Retrying after ${retryAfter} seconds!`);
|
|
return true;
|
|
}
|
|
},
|
|
onAbuseLimit: (retryAfter, options) => {
|
|
// does not retry, only logs a warning
|
|
console.log(
|
|
`Abuse detected for request ${options.method} ${options.url}`
|
|
);
|
|
},
|
|
},
|
|
});
|
|
console.log("==> Repo:", owner + "/" + repo);
|
|
|
|
const goodArtifacts = await getGoodArtifacts(client, owner, repo, name);
|
|
console.log("==> goodArtifacts:", goodArtifacts);
|
|
|
|
let artifactStatus = "";
|
|
if (goodArtifacts.length === 0) {
|
|
artifactStatus = "missing";
|
|
} else {
|
|
artifactStatus = "found";
|
|
}
|
|
|
|
console.log("==> Artifact", name, artifactStatus);
|
|
console.log("==> download", download);
|
|
|
|
core.setOutput("status", artifactStatus);
|
|
|
|
if (artifactStatus === "found" && download == "true") {
|
|
console.log("==> # artifacts:", goodArtifacts.length);
|
|
|
|
let artifact = goodArtifacts[0];
|
|
|
|
console.log("==> Artifact:", artifact.id)
|
|
|
|
const size = filesize(artifact.size_in_bytes, { base: 10 })
|
|
|
|
console.log("==> Downloading:", artifact.name + ".zip", `(${size})`)
|
|
|
|
const zip = await client.actions.downloadArtifact({
|
|
owner: owner,
|
|
repo: repo,
|
|
artifact_id: artifact.id,
|
|
archive_format: "zip",
|
|
})
|
|
|
|
const dir = name ? path : pathname.join(path, artifact.name)
|
|
|
|
fs.mkdirSync(dir, { recursive: true })
|
|
|
|
const adm = new AdmZip(Buffer.from(zip.data))
|
|
|
|
adm.getEntries().forEach((entry) => {
|
|
const action = entry.isDirectory ? "creating" : "inflating"
|
|
const filepath = pathname.join(dir, entry.entryName)
|
|
console.log(` ${action}: ${filepath}`)
|
|
})
|
|
|
|
adm.extractAllTo(dir, true)
|
|
}
|
|
|
|
if (artifactStatus === "missing" && download == "true") {
|
|
core.setFailed("Required", name, "that is missing");
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
// We have to manually wrap the main function with a try-catch here because
|
|
// GitHub will ignore uncatched exceptions and continue running the workflow,
|
|
// leading to harder to diagnose errors downstream from this action.
|
|
try {
|
|
main();
|
|
} catch (error) {
|
|
core.setFailed(error.message);
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7351:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
const os = __importStar(__nccwpck_require__(2087));
|
|
const utils_1 = __nccwpck_require__(5278);
|
|
/**
|
|
* Commands
|
|
*
|
|
* Command Format:
|
|
* ::name key=value,key=value::message
|
|
*
|
|
* Examples:
|
|
* ::warning::This is the message
|
|
* ::set-env name=MY_VAR::some value
|
|
*/
|
|
function issueCommand(command, properties, message) {
|
|
const cmd = new Command(command, properties, message);
|
|
process.stdout.write(cmd.toString() + os.EOL);
|
|
}
|
|
exports.issueCommand = issueCommand;
|
|
function issue(name, message = '') {
|
|
issueCommand(name, {}, message);
|
|
}
|
|
exports.issue = issue;
|
|
const CMD_STRING = '::';
|
|
class Command {
|
|
constructor(command, properties, message) {
|
|
if (!command) {
|
|
command = 'missing.command';
|
|
}
|
|
this.command = command;
|
|
this.properties = properties;
|
|
this.message = message;
|
|
}
|
|
toString() {
|
|
let cmdStr = CMD_STRING + this.command;
|
|
if (this.properties && Object.keys(this.properties).length > 0) {
|
|
cmdStr += ' ';
|
|
let first = true;
|
|
for (const key in this.properties) {
|
|
if (this.properties.hasOwnProperty(key)) {
|
|
const val = this.properties[key];
|
|
if (val) {
|
|
if (first) {
|
|
first = false;
|
|
}
|
|
else {
|
|
cmdStr += ',';
|
|
}
|
|
cmdStr += `${key}=${escapeProperty(val)}`;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
|
|
return cmdStr;
|
|
}
|
|
}
|
|
function escapeData(s) {
|
|
return utils_1.toCommandValue(s)
|
|
.replace(/%/g, '%25')
|
|
.replace(/\r/g, '%0D')
|
|
.replace(/\n/g, '%0A');
|
|
}
|
|
function escapeProperty(s) {
|
|
return utils_1.toCommandValue(s)
|
|
.replace(/%/g, '%25')
|
|
.replace(/\r/g, '%0D')
|
|
.replace(/\n/g, '%0A')
|
|
.replace(/:/g, '%3A')
|
|
.replace(/,/g, '%2C');
|
|
}
|
|
//# sourceMappingURL=command.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2186:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
const command_1 = __nccwpck_require__(7351);
|
|
const file_command_1 = __nccwpck_require__(717);
|
|
const utils_1 = __nccwpck_require__(5278);
|
|
const os = __importStar(__nccwpck_require__(2087));
|
|
const path = __importStar(__nccwpck_require__(5622));
|
|
/**
|
|
* The code to exit an action
|
|
*/
|
|
var ExitCode;
|
|
(function (ExitCode) {
|
|
/**
|
|
* A code indicating that the action was successful
|
|
*/
|
|
ExitCode[ExitCode["Success"] = 0] = "Success";
|
|
/**
|
|
* A code indicating that the action was a failure
|
|
*/
|
|
ExitCode[ExitCode["Failure"] = 1] = "Failure";
|
|
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
|
|
//-----------------------------------------------------------------------
|
|
// Variables
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Sets env variable for this action and future actions in the job
|
|
* @param name the name of the variable to set
|
|
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
|
*/
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
function exportVariable(name, val) {
|
|
const convertedVal = utils_1.toCommandValue(val);
|
|
process.env[name] = convertedVal;
|
|
const filePath = process.env['GITHUB_ENV'] || '';
|
|
if (filePath) {
|
|
const delimiter = '_GitHubActionsFileCommandDelimeter_';
|
|
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
|
|
file_command_1.issueCommand('ENV', commandValue);
|
|
}
|
|
else {
|
|
command_1.issueCommand('set-env', { name }, convertedVal);
|
|
}
|
|
}
|
|
exports.exportVariable = exportVariable;
|
|
/**
|
|
* Registers a secret which will get masked from logs
|
|
* @param secret value of the secret
|
|
*/
|
|
function setSecret(secret) {
|
|
command_1.issueCommand('add-mask', {}, secret);
|
|
}
|
|
exports.setSecret = setSecret;
|
|
/**
|
|
* Prepends inputPath to the PATH (for this action and future actions)
|
|
* @param inputPath
|
|
*/
|
|
function addPath(inputPath) {
|
|
const filePath = process.env['GITHUB_PATH'] || '';
|
|
if (filePath) {
|
|
file_command_1.issueCommand('PATH', inputPath);
|
|
}
|
|
else {
|
|
command_1.issueCommand('add-path', {}, inputPath);
|
|
}
|
|
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
|
}
|
|
exports.addPath = addPath;
|
|
/**
|
|
* Gets the value of an input. The value is also trimmed.
|
|
*
|
|
* @param name name of the input to get
|
|
* @param options optional. See InputOptions.
|
|
* @returns string
|
|
*/
|
|
function getInput(name, options) {
|
|
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
|
|
if (options && options.required && !val) {
|
|
throw new Error(`Input required and not supplied: ${name}`);
|
|
}
|
|
return val.trim();
|
|
}
|
|
exports.getInput = getInput;
|
|
/**
|
|
* Sets the value of an output.
|
|
*
|
|
* @param name name of the output to set
|
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
|
*/
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
function setOutput(name, value) {
|
|
command_1.issueCommand('set-output', { name }, value);
|
|
}
|
|
exports.setOutput = setOutput;
|
|
/**
|
|
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
|
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
|
*
|
|
*/
|
|
function setCommandEcho(enabled) {
|
|
command_1.issue('echo', enabled ? 'on' : 'off');
|
|
}
|
|
exports.setCommandEcho = setCommandEcho;
|
|
//-----------------------------------------------------------------------
|
|
// Results
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Sets the action status to failed.
|
|
* When the action exits it will be with an exit code of 1
|
|
* @param message add error issue message
|
|
*/
|
|
function setFailed(message) {
|
|
process.exitCode = ExitCode.Failure;
|
|
error(message);
|
|
}
|
|
exports.setFailed = setFailed;
|
|
//-----------------------------------------------------------------------
|
|
// Logging Commands
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Gets whether Actions Step Debug is on or not
|
|
*/
|
|
function isDebug() {
|
|
return process.env['RUNNER_DEBUG'] === '1';
|
|
}
|
|
exports.isDebug = isDebug;
|
|
/**
|
|
* Writes debug message to user log
|
|
* @param message debug message
|
|
*/
|
|
function debug(message) {
|
|
command_1.issueCommand('debug', {}, message);
|
|
}
|
|
exports.debug = debug;
|
|
/**
|
|
* Adds an error issue
|
|
* @param message error issue message. Errors will be converted to string via toString()
|
|
*/
|
|
function error(message) {
|
|
command_1.issue('error', message instanceof Error ? message.toString() : message);
|
|
}
|
|
exports.error = error;
|
|
/**
|
|
* Adds an warning issue
|
|
* @param message warning issue message. Errors will be converted to string via toString()
|
|
*/
|
|
function warning(message) {
|
|
command_1.issue('warning', message instanceof Error ? message.toString() : message);
|
|
}
|
|
exports.warning = warning;
|
|
/**
|
|
* Writes info to log with console.log.
|
|
* @param message info message
|
|
*/
|
|
function info(message) {
|
|
process.stdout.write(message + os.EOL);
|
|
}
|
|
exports.info = info;
|
|
/**
|
|
* Begin an output group.
|
|
*
|
|
* Output until the next `groupEnd` will be foldable in this group
|
|
*
|
|
* @param name The name of the output group
|
|
*/
|
|
function startGroup(name) {
|
|
command_1.issue('group', name);
|
|
}
|
|
exports.startGroup = startGroup;
|
|
/**
|
|
* End an output group.
|
|
*/
|
|
function endGroup() {
|
|
command_1.issue('endgroup');
|
|
}
|
|
exports.endGroup = endGroup;
|
|
/**
|
|
* Wrap an asynchronous function call in a group.
|
|
*
|
|
* Returns the same type as the function itself.
|
|
*
|
|
* @param name The name of the group
|
|
* @param fn The function to wrap in the group
|
|
*/
|
|
function group(name, fn) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
startGroup(name);
|
|
let result;
|
|
try {
|
|
result = yield fn();
|
|
}
|
|
finally {
|
|
endGroup();
|
|
}
|
|
return result;
|
|
});
|
|
}
|
|
exports.group = group;
|
|
//-----------------------------------------------------------------------
|
|
// Wrapper action state
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
|
*
|
|
* @param name name of the state to store
|
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
|
*/
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
function saveState(name, value) {
|
|
command_1.issueCommand('save-state', { name }, value);
|
|
}
|
|
exports.saveState = saveState;
|
|
/**
|
|
* Gets the value of an state set by this action's main execution.
|
|
*
|
|
* @param name name of the state to get
|
|
* @returns string
|
|
*/
|
|
function getState(name) {
|
|
return process.env[`STATE_${name}`] || '';
|
|
}
|
|
exports.getState = getState;
|
|
//# sourceMappingURL=core.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 717:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
// For internal use, subject to change.
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
// We use any as a valid input type
|
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
const fs = __importStar(__nccwpck_require__(5747));
|
|
const os = __importStar(__nccwpck_require__(2087));
|
|
const utils_1 = __nccwpck_require__(5278);
|
|
function issueCommand(command, message) {
|
|
const filePath = process.env[`GITHUB_${command}`];
|
|
if (!filePath) {
|
|
throw new Error(`Unable to find environment variable for file command ${command}`);
|
|
}
|
|
if (!fs.existsSync(filePath)) {
|
|
throw new Error(`Missing file at path: ${filePath}`);
|
|
}
|
|
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
|
|
encoding: 'utf8'
|
|
});
|
|
}
|
|
exports.issueCommand = issueCommand;
|
|
//# sourceMappingURL=file-command.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5278:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
// We use any as a valid input type
|
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
/**
|
|
* Sanitizes an input into a string so it can be passed into issueCommand safely
|
|
* @param input input to sanitize into a string
|
|
*/
|
|
function toCommandValue(input) {
|
|
if (input === null || input === undefined) {
|
|
return '';
|
|
}
|
|
else if (typeof input === 'string' || input instanceof String) {
|
|
return input;
|
|
}
|
|
return JSON.stringify(input);
|
|
}
|
|
exports.toCommandValue = toCommandValue;
|
|
//# sourceMappingURL=utils.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4087:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.Context = void 0;
|
|
const fs_1 = __nccwpck_require__(5747);
|
|
const os_1 = __nccwpck_require__(2087);
|
|
class Context {
|
|
/**
|
|
* Hydrate the context from the environment
|
|
*/
|
|
constructor() {
|
|
this.payload = {};
|
|
if (process.env.GITHUB_EVENT_PATH) {
|
|
if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
|
|
this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
|
|
}
|
|
else {
|
|
const path = process.env.GITHUB_EVENT_PATH;
|
|
process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
|
|
}
|
|
}
|
|
this.eventName = process.env.GITHUB_EVENT_NAME;
|
|
this.sha = process.env.GITHUB_SHA;
|
|
this.ref = process.env.GITHUB_REF;
|
|
this.workflow = process.env.GITHUB_WORKFLOW;
|
|
this.action = process.env.GITHUB_ACTION;
|
|
this.actor = process.env.GITHUB_ACTOR;
|
|
this.job = process.env.GITHUB_JOB;
|
|
this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);
|
|
this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);
|
|
}
|
|
get issue() {
|
|
const payload = this.payload;
|
|
return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
|
|
}
|
|
get repo() {
|
|
if (process.env.GITHUB_REPOSITORY) {
|
|
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
|
|
return { owner, repo };
|
|
}
|
|
if (this.payload.repository) {
|
|
return {
|
|
owner: this.payload.repository.owner.login,
|
|
repo: this.payload.repository.name
|
|
};
|
|
}
|
|
throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'");
|
|
}
|
|
}
|
|
exports.Context = Context;
|
|
//# sourceMappingURL=context.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5438:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.getOctokit = exports.context = void 0;
|
|
const Context = __importStar(__nccwpck_require__(4087));
|
|
const utils_1 = __nccwpck_require__(3030);
|
|
exports.context = new Context.Context();
|
|
/**
|
|
* Returns a hydrated octokit ready to use for GitHub Actions
|
|
*
|
|
* @param token the repo PAT or GITHUB_TOKEN
|
|
* @param options other options to set
|
|
*/
|
|
function getOctokit(token, options) {
|
|
return new utils_1.GitHub(utils_1.getOctokitOptions(token, options));
|
|
}
|
|
exports.getOctokit = getOctokit;
|
|
//# sourceMappingURL=github.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7914:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;
|
|
const httpClient = __importStar(__nccwpck_require__(9925));
|
|
function getAuthString(token, options) {
|
|
if (!token && !options.auth) {
|
|
throw new Error('Parameter token or opts.auth is required');
|
|
}
|
|
else if (token && options.auth) {
|
|
throw new Error('Parameters token and opts.auth may not both be specified');
|
|
}
|
|
return typeof options.auth === 'string' ? options.auth : `token ${token}`;
|
|
}
|
|
exports.getAuthString = getAuthString;
|
|
function getProxyAgent(destinationUrl) {
|
|
const hc = new httpClient.HttpClient();
|
|
return hc.getAgent(destinationUrl);
|
|
}
|
|
exports.getProxyAgent = getProxyAgent;
|
|
function getApiBaseUrl() {
|
|
return process.env['GITHUB_API_URL'] || 'https://api.github.com';
|
|
}
|
|
exports.getApiBaseUrl = getApiBaseUrl;
|
|
//# sourceMappingURL=utils.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3030:
|
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|
}) : (function(o, m, k, k2) {
|
|
if (k2 === undefined) k2 = k;
|
|
o[k2] = m[k];
|
|
}));
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
}) : function(o, v) {
|
|
o["default"] = v;
|
|
});
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
__setModuleDefault(result, mod);
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.getOctokitOptions = exports.GitHub = exports.context = void 0;
|
|
const Context = __importStar(__nccwpck_require__(4087));
|
|
const Utils = __importStar(__nccwpck_require__(7914));
|
|
// octokit + plugins
|
|
const core_1 = __nccwpck_require__(6762);
|
|
const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044);
|
|
const plugin_paginate_rest_1 = __nccwpck_require__(4193);
|
|
exports.context = new Context.Context();
|
|
const baseUrl = Utils.getApiBaseUrl();
|
|
const defaults = {
|
|
baseUrl,
|
|
request: {
|
|
agent: Utils.getProxyAgent(baseUrl)
|
|
}
|
|
};
|
|
exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults);
|
|
/**
|
|
* Convience function to correctly format Octokit Options to pass into the constructor.
|
|
*
|
|
* @param token the repo PAT or GITHUB_TOKEN
|
|
* @param options other options to set
|
|
*/
|
|
function getOctokitOptions(token, options) {
|
|
const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller
|
|
// Auth
|
|
const auth = Utils.getAuthString(token, opts);
|
|
if (auth) {
|
|
opts.auth = auth;
|
|
}
|
|
return opts;
|
|
}
|
|
exports.getOctokitOptions = getOctokitOptions;
|
|
//# sourceMappingURL=utils.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9925:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
const http = __nccwpck_require__(8605);
|
|
const https = __nccwpck_require__(7211);
|
|
const pm = __nccwpck_require__(6443);
|
|
let tunnel;
|
|
var HttpCodes;
|
|
(function (HttpCodes) {
|
|
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
|
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
|
|
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
|
|
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
|
|
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
|
|
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
|
|
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
|
|
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
|
|
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
|
|
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
|
|
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
|
|
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
|
|
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
|
|
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
|
|
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
|
|
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
|
|
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
|
|
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
|
|
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
|
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
|
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
|
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
|
|
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
|
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
|
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
|
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
|
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
|
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
|
var Headers;
|
|
(function (Headers) {
|
|
Headers["Accept"] = "accept";
|
|
Headers["ContentType"] = "content-type";
|
|
})(Headers = exports.Headers || (exports.Headers = {}));
|
|
var MediaTypes;
|
|
(function (MediaTypes) {
|
|
MediaTypes["ApplicationJson"] = "application/json";
|
|
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
|
/**
|
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
|
*/
|
|
function getProxyUrl(serverUrl) {
|
|
let proxyUrl = pm.getProxyUrl(new URL(serverUrl));
|
|
return proxyUrl ? proxyUrl.href : '';
|
|
}
|
|
exports.getProxyUrl = getProxyUrl;
|
|
const HttpRedirectCodes = [
|
|
HttpCodes.MovedPermanently,
|
|
HttpCodes.ResourceMoved,
|
|
HttpCodes.SeeOther,
|
|
HttpCodes.TemporaryRedirect,
|
|
HttpCodes.PermanentRedirect
|
|
];
|
|
const HttpResponseRetryCodes = [
|
|
HttpCodes.BadGateway,
|
|
HttpCodes.ServiceUnavailable,
|
|
HttpCodes.GatewayTimeout
|
|
];
|
|
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
|
const ExponentialBackoffCeiling = 10;
|
|
const ExponentialBackoffTimeSlice = 5;
|
|
class HttpClientError extends Error {
|
|
constructor(message, statusCode) {
|
|
super(message);
|
|
this.name = 'HttpClientError';
|
|
this.statusCode = statusCode;
|
|
Object.setPrototypeOf(this, HttpClientError.prototype);
|
|
}
|
|
}
|
|
exports.HttpClientError = HttpClientError;
|
|
class HttpClientResponse {
|
|
constructor(message) {
|
|
this.message = message;
|
|
}
|
|
readBody() {
|
|
return new Promise(async (resolve, reject) => {
|
|
let output = Buffer.alloc(0);
|
|
this.message.on('data', (chunk) => {
|
|
output = Buffer.concat([output, chunk]);
|
|
});
|
|
this.message.on('end', () => {
|
|
resolve(output.toString());
|
|
});
|
|
});
|
|
}
|
|
}
|
|
exports.HttpClientResponse = HttpClientResponse;
|
|
function isHttps(requestUrl) {
|
|
let parsedUrl = new URL(requestUrl);
|
|
return parsedUrl.protocol === 'https:';
|
|
}
|
|
exports.isHttps = isHttps;
|
|
class HttpClient {
|
|
constructor(userAgent, handlers, requestOptions) {
|
|
this._ignoreSslError = false;
|
|
this._allowRedirects = true;
|
|
this._allowRedirectDowngrade = false;
|
|
this._maxRedirects = 50;
|
|
this._allowRetries = false;
|
|
this._maxRetries = 1;
|
|
this._keepAlive = false;
|
|
this._disposed = false;
|
|
this.userAgent = userAgent;
|
|
this.handlers = handlers || [];
|
|
this.requestOptions = requestOptions;
|
|
if (requestOptions) {
|
|
if (requestOptions.ignoreSslError != null) {
|
|
this._ignoreSslError = requestOptions.ignoreSslError;
|
|
}
|
|
this._socketTimeout = requestOptions.socketTimeout;
|
|
if (requestOptions.allowRedirects != null) {
|
|
this._allowRedirects = requestOptions.allowRedirects;
|
|
}
|
|
if (requestOptions.allowRedirectDowngrade != null) {
|
|
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
|
|
}
|
|
if (requestOptions.maxRedirects != null) {
|
|
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
|
|
}
|
|
if (requestOptions.keepAlive != null) {
|
|
this._keepAlive = requestOptions.keepAlive;
|
|
}
|
|
if (requestOptions.allowRetries != null) {
|
|
this._allowRetries = requestOptions.allowRetries;
|
|
}
|
|
if (requestOptions.maxRetries != null) {
|
|
this._maxRetries = requestOptions.maxRetries;
|
|
}
|
|
}
|
|
}
|
|
options(requestUrl, additionalHeaders) {
|
|
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
get(requestUrl, additionalHeaders) {
|
|
return this.request('GET', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
del(requestUrl, additionalHeaders) {
|
|
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
post(requestUrl, data, additionalHeaders) {
|
|
return this.request('POST', requestUrl, data, additionalHeaders || {});
|
|
}
|
|
patch(requestUrl, data, additionalHeaders) {
|
|
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
|
|
}
|
|
put(requestUrl, data, additionalHeaders) {
|
|
return this.request('PUT', requestUrl, data, additionalHeaders || {});
|
|
}
|
|
head(requestUrl, additionalHeaders) {
|
|
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
sendStream(verb, requestUrl, stream, additionalHeaders) {
|
|
return this.request(verb, requestUrl, stream, additionalHeaders);
|
|
}
|
|
/**
|
|
* Gets a typed object from an endpoint
|
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
|
*/
|
|
async getJson(requestUrl, additionalHeaders = {}) {
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
let res = await this.get(requestUrl, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
}
|
|
async postJson(requestUrl, obj, additionalHeaders = {}) {
|
|
let data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
let res = await this.post(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
}
|
|
async putJson(requestUrl, obj, additionalHeaders = {}) {
|
|
let data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
let res = await this.put(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
}
|
|
async patchJson(requestUrl, obj, additionalHeaders = {}) {
|
|
let data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
let res = await this.patch(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
}
|
|
/**
|
|
* Makes a raw http request.
|
|
* All other methods such as get, post, patch, and request ultimately call this.
|
|
* Prefer get, del, post and patch
|
|
*/
|
|
async request(verb, requestUrl, data, headers) {
|
|
if (this._disposed) {
|
|
throw new Error('Client has already been disposed.');
|
|
}
|
|
let parsedUrl = new URL(requestUrl);
|
|
let info = this._prepareRequest(verb, parsedUrl, headers);
|
|
// Only perform retries on reads since writes may not be idempotent.
|
|
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
|
|
? this._maxRetries + 1
|
|
: 1;
|
|
let numTries = 0;
|
|
let response;
|
|
while (numTries < maxTries) {
|
|
response = await this.requestRaw(info, data);
|
|
// Check if it's an authentication challenge
|
|
if (response &&
|
|
response.message &&
|
|
response.message.statusCode === HttpCodes.Unauthorized) {
|
|
let authenticationHandler;
|
|
for (let i = 0; i < this.handlers.length; i++) {
|
|
if (this.handlers[i].canHandleAuthentication(response)) {
|
|
authenticationHandler = this.handlers[i];
|
|
break;
|
|
}
|
|
}
|
|
if (authenticationHandler) {
|
|
return authenticationHandler.handleAuthentication(this, info, data);
|
|
}
|
|
else {
|
|
// We have received an unauthorized response but have no handlers to handle it.
|
|
// Let the response return to the caller.
|
|
return response;
|
|
}
|
|
}
|
|
let redirectsRemaining = this._maxRedirects;
|
|
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
|
|
this._allowRedirects &&
|
|
redirectsRemaining > 0) {
|
|
const redirectUrl = response.message.headers['location'];
|
|
if (!redirectUrl) {
|
|
// if there's no location to redirect to, we won't
|
|
break;
|
|
}
|
|
let parsedRedirectUrl = new URL(redirectUrl);
|
|
if (parsedUrl.protocol == 'https:' &&
|
|
parsedUrl.protocol != parsedRedirectUrl.protocol &&
|
|
!this._allowRedirectDowngrade) {
|
|
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
|
}
|
|
// we need to finish reading the response before reassigning response
|
|
// which will leak the open socket.
|
|
await response.readBody();
|
|
// strip authorization header if redirected to a different hostname
|
|
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
|
for (let header in headers) {
|
|
// header names are case insensitive
|
|
if (header.toLowerCase() === 'authorization') {
|
|
delete headers[header];
|
|
}
|
|
}
|
|
}
|
|
// let's make the request with the new redirectUrl
|
|
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
|
response = await this.requestRaw(info, data);
|
|
redirectsRemaining--;
|
|
}
|
|
if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
|
|
// If not a retry code, return immediately instead of retrying
|
|
return response;
|
|
}
|
|
numTries += 1;
|
|
if (numTries < maxTries) {
|
|
await response.readBody();
|
|
await this._performExponentialBackoff(numTries);
|
|
}
|
|
}
|
|
return response;
|
|
}
|
|
/**
|
|
* Needs to be called if keepAlive is set to true in request options.
|
|
*/
|
|
dispose() {
|
|
if (this._agent) {
|
|
this._agent.destroy();
|
|
}
|
|
this._disposed = true;
|
|
}
|
|
/**
|
|
* Raw request.
|
|
* @param info
|
|
* @param data
|
|
*/
|
|
requestRaw(info, data) {
|
|
return new Promise((resolve, reject) => {
|
|
let callbackForResult = function (err, res) {
|
|
if (err) {
|
|
reject(err);
|
|
}
|
|
resolve(res);
|
|
};
|
|
this.requestRawWithCallback(info, data, callbackForResult);
|
|
});
|
|
}
|
|
/**
|
|
* Raw request with callback.
|
|
* @param info
|
|
* @param data
|
|
* @param onResult
|
|
*/
|
|
requestRawWithCallback(info, data, onResult) {
|
|
let socket;
|
|
if (typeof data === 'string') {
|
|
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
|
}
|
|
let callbackCalled = false;
|
|
let handleResult = (err, res) => {
|
|
if (!callbackCalled) {
|
|
callbackCalled = true;
|
|
onResult(err, res);
|
|
}
|
|
};
|
|
let req = info.httpModule.request(info.options, (msg) => {
|
|
let res = new HttpClientResponse(msg);
|
|
handleResult(null, res);
|
|
});
|
|
req.on('socket', sock => {
|
|
socket = sock;
|
|
});
|
|
// If we ever get disconnected, we want the socket to timeout eventually
|
|
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
|
|
if (socket) {
|
|
socket.end();
|
|
}
|
|
handleResult(new Error('Request timeout: ' + info.options.path), null);
|
|
});
|
|
req.on('error', function (err) {
|
|
// err has statusCode property
|
|
// res should have headers
|
|
handleResult(err, null);
|
|
});
|
|
if (data && typeof data === 'string') {
|
|
req.write(data, 'utf8');
|
|
}
|
|
if (data && typeof data !== 'string') {
|
|
data.on('close', function () {
|
|
req.end();
|
|
});
|
|
data.pipe(req);
|
|
}
|
|
else {
|
|
req.end();
|
|
}
|
|
}
|
|
/**
|
|
* Gets an http agent. This function is useful when you need an http agent that handles
|
|
* routing through a proxy server - depending upon the url and proxy environment variables.
|
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
|
*/
|
|
getAgent(serverUrl) {
|
|
let parsedUrl = new URL(serverUrl);
|
|
return this._getAgent(parsedUrl);
|
|
}
|
|
_prepareRequest(method, requestUrl, headers) {
|
|
const info = {};
|
|
info.parsedUrl = requestUrl;
|
|
const usingSsl = info.parsedUrl.protocol === 'https:';
|
|
info.httpModule = usingSsl ? https : http;
|
|
const defaultPort = usingSsl ? 443 : 80;
|
|
info.options = {};
|
|
info.options.host = info.parsedUrl.hostname;
|
|
info.options.port = info.parsedUrl.port
|
|
? parseInt(info.parsedUrl.port)
|
|
: defaultPort;
|
|
info.options.path =
|
|
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
|
info.options.method = method;
|
|
info.options.headers = this._mergeHeaders(headers);
|
|
if (this.userAgent != null) {
|
|
info.options.headers['user-agent'] = this.userAgent;
|
|
}
|
|
info.options.agent = this._getAgent(info.parsedUrl);
|
|
// gives handlers an opportunity to participate
|
|
if (this.handlers) {
|
|
this.handlers.forEach(handler => {
|
|
handler.prepareRequest(info.options);
|
|
});
|
|
}
|
|
return info;
|
|
}
|
|
_mergeHeaders(headers) {
|
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
|
if (this.requestOptions && this.requestOptions.headers) {
|
|
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
|
|
}
|
|
return lowercaseKeys(headers || {});
|
|
}
|
|
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
|
let clientHeader;
|
|
if (this.requestOptions && this.requestOptions.headers) {
|
|
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
|
}
|
|
return additionalHeaders[header] || clientHeader || _default;
|
|
}
|
|
_getAgent(parsedUrl) {
|
|
let agent;
|
|
let proxyUrl = pm.getProxyUrl(parsedUrl);
|
|
let useProxy = proxyUrl && proxyUrl.hostname;
|
|
if (this._keepAlive && useProxy) {
|
|
agent = this._proxyAgent;
|
|
}
|
|
if (this._keepAlive && !useProxy) {
|
|
agent = this._agent;
|
|
}
|
|
// if agent is already assigned use that agent.
|
|
if (!!agent) {
|
|
return agent;
|
|
}
|
|
const usingSsl = parsedUrl.protocol === 'https:';
|
|
let maxSockets = 100;
|
|
if (!!this.requestOptions) {
|
|
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
|
|
}
|
|
if (useProxy) {
|
|
// If using proxy, need tunnel
|
|
if (!tunnel) {
|
|
tunnel = __nccwpck_require__(4294);
|
|
}
|
|
const agentOptions = {
|
|
maxSockets: maxSockets,
|
|
keepAlive: this._keepAlive,
|
|
proxy: {
|
|
...((proxyUrl.username || proxyUrl.password) && {
|
|
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
|
|
}),
|
|
host: proxyUrl.hostname,
|
|
port: proxyUrl.port
|
|
}
|
|
};
|
|
let tunnelAgent;
|
|
const overHttps = proxyUrl.protocol === 'https:';
|
|
if (usingSsl) {
|
|
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
|
|
}
|
|
else {
|
|
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
|
|
}
|
|
agent = tunnelAgent(agentOptions);
|
|
this._proxyAgent = agent;
|
|
}
|
|
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
|
if (this._keepAlive && !agent) {
|
|
const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
|
|
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
|
this._agent = agent;
|
|
}
|
|
// if not using private agent and tunnel agent isn't setup then use global agent
|
|
if (!agent) {
|
|
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
|
}
|
|
if (usingSsl && this._ignoreSslError) {
|
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
|
// we have to cast it to any and change it directly
|
|
agent.options = Object.assign(agent.options || {}, {
|
|
rejectUnauthorized: false
|
|
});
|
|
}
|
|
return agent;
|
|
}
|
|
_performExponentialBackoff(retryNumber) {
|
|
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
|
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
|
|
return new Promise(resolve => setTimeout(() => resolve(), ms));
|
|
}
|
|
static dateTimeDeserializer(key, value) {
|
|
if (typeof value === 'string') {
|
|
let a = new Date(value);
|
|
if (!isNaN(a.valueOf())) {
|
|
return a;
|
|
}
|
|
}
|
|
return value;
|
|
}
|
|
async _processResponse(res, options) {
|
|
return new Promise(async (resolve, reject) => {
|
|
const statusCode = res.message.statusCode;
|
|
const response = {
|
|
statusCode: statusCode,
|
|
result: null,
|
|
headers: {}
|
|
};
|
|
// not found leads to null obj returned
|
|
if (statusCode == HttpCodes.NotFound) {
|
|
resolve(response);
|
|
}
|
|
let obj;
|
|
let contents;
|
|
// get the result from the body
|
|
try {
|
|
contents = await res.readBody();
|
|
if (contents && contents.length > 0) {
|
|
if (options && options.deserializeDates) {
|
|
obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
|
|
}
|
|
else {
|
|
obj = JSON.parse(contents);
|
|
}
|
|
response.result = obj;
|
|
}
|
|
response.headers = res.message.headers;
|
|
}
|
|
catch (err) {
|
|
// Invalid resource (contents not json); leaving result obj null
|
|
}
|
|
// note that 3xx redirects are handled by the http layer.
|
|
if (statusCode > 299) {
|
|
let msg;
|
|
// if exception/error in body, attempt to get better error
|
|
if (obj && obj.message) {
|
|
msg = obj.message;
|
|
}
|
|
else if (contents && contents.length > 0) {
|
|
// it may be the case that the exception is in the body message as string
|
|
msg = contents;
|
|
}
|
|
else {
|
|
msg = 'Failed request: (' + statusCode + ')';
|
|
}
|
|
let err = new HttpClientError(msg, statusCode);
|
|
err.result = response.result;
|
|
reject(err);
|
|
}
|
|
else {
|
|
resolve(response);
|
|
}
|
|
});
|
|
}
|
|
}
|
|
exports.HttpClient = HttpClient;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6443:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
function getProxyUrl(reqUrl) {
|
|
let usingSsl = reqUrl.protocol === 'https:';
|
|
let proxyUrl;
|
|
if (checkBypass(reqUrl)) {
|
|
return proxyUrl;
|
|
}
|
|
let proxyVar;
|
|
if (usingSsl) {
|
|
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];
|
|
}
|
|
else {
|
|
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
|
}
|
|
if (proxyVar) {
|
|
proxyUrl = new URL(proxyVar);
|
|
}
|
|
return proxyUrl;
|
|
}
|
|
exports.getProxyUrl = getProxyUrl;
|
|
function checkBypass(reqUrl) {
|
|
if (!reqUrl.hostname) {
|
|
return false;
|
|
}
|
|
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
|
if (!noProxy) {
|
|
return false;
|
|
}
|
|
// Determine the request port
|
|
let reqPort;
|
|
if (reqUrl.port) {
|
|
reqPort = Number(reqUrl.port);
|
|
}
|
|
else if (reqUrl.protocol === 'http:') {
|
|
reqPort = 80;
|
|
}
|
|
else if (reqUrl.protocol === 'https:') {
|
|
reqPort = 443;
|
|
}
|
|
// Format the request hostname and hostname with port
|
|
let upperReqHosts = [reqUrl.hostname.toUpperCase()];
|
|
if (typeof reqPort === 'number') {
|
|
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
|
}
|
|
// Compare request host against noproxy
|
|
for (let upperNoProxyItem of noProxy
|
|
.split(',')
|
|
.map(x => x.trim().toUpperCase())
|
|
.filter(x => x)) {
|
|
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
|
return true;
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
exports.checkBypass = checkBypass;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 334:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
|
|
async function auth(token) {
|
|
const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth";
|
|
return {
|
|
type: "token",
|
|
token: token,
|
|
tokenType
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Prefix token for usage in the Authorization header
|
|
*
|
|
* @param token OAuth token or JSON Web Token
|
|
*/
|
|
function withAuthorizationPrefix(token) {
|
|
if (token.split(/\./).length === 3) {
|
|
return `bearer ${token}`;
|
|
}
|
|
|
|
return `token ${token}`;
|
|
}
|
|
|
|
async function hook(token, request, route, parameters) {
|
|
const endpoint = request.endpoint.merge(route, parameters);
|
|
endpoint.headers.authorization = withAuthorizationPrefix(token);
|
|
return request(endpoint);
|
|
}
|
|
|
|
const createTokenAuth = function createTokenAuth(token) {
|
|
if (!token) {
|
|
throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
|
|
}
|
|
|
|
if (typeof token !== "string") {
|
|
throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
|
|
}
|
|
|
|
token = token.replace(/^(token|bearer) +/i, "");
|
|
return Object.assign(auth.bind(null, token), {
|
|
hook: hook.bind(null, token)
|
|
});
|
|
};
|
|
|
|
exports.createTokenAuth = createTokenAuth;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6762:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
|
|
var universalUserAgent = __nccwpck_require__(5030);
|
|
var beforeAfterHook = __nccwpck_require__(3682);
|
|
var request = __nccwpck_require__(6234);
|
|
var graphql = __nccwpck_require__(8467);
|
|
var authToken = __nccwpck_require__(334);
|
|
|
|
function _objectWithoutPropertiesLoose(source, excluded) {
|
|
if (source == null) return {};
|
|
var target = {};
|
|
var sourceKeys = Object.keys(source);
|
|
var key, i;
|
|
|
|
for (i = 0; i < sourceKeys.length; i++) {
|
|
key = sourceKeys[i];
|
|
if (excluded.indexOf(key) >= 0) continue;
|
|
target[key] = source[key];
|
|
}
|
|
|
|
return target;
|
|
}
|
|
|
|
function _objectWithoutProperties(source, excluded) {
|
|
if (source == null) return {};
|
|
|
|
var target = _objectWithoutPropertiesLoose(source, excluded);
|
|
|
|
var key, i;
|
|
|
|
if (Object.getOwnPropertySymbols) {
|
|
var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
|
|
|
|
for (i = 0; i < sourceSymbolKeys.length; i++) {
|
|
key = sourceSymbolKeys[i];
|
|
if (excluded.indexOf(key) >= 0) continue;
|
|
if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
|
|
target[key] = source[key];
|
|
}
|
|
}
|
|
|
|
return target;
|
|
}
|
|
|
|
const VERSION = "3.4.0";
|
|
|
|
class Octokit {
|
|
constructor(options = {}) {
|
|
const hook = new beforeAfterHook.Collection();
|
|
const requestDefaults = {
|
|
baseUrl: request.request.endpoint.DEFAULTS.baseUrl,
|
|
headers: {},
|
|
request: Object.assign({}, options.request, {
|
|
// @ts-ignore internal usage only, no need to type
|
|
hook: hook.bind(null, "request")
|
|
}),
|
|
mediaType: {
|
|
previews: [],
|
|
format: ""
|
|
}
|
|
}; // prepend default user agent with `options.userAgent` if set
|
|
|
|
requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" ");
|
|
|
|
if (options.baseUrl) {
|
|
requestDefaults.baseUrl = options.baseUrl;
|
|
}
|
|
|
|
if (options.previews) {
|
|
requestDefaults.mediaType.previews = options.previews;
|
|
}
|
|
|
|
if (options.timeZone) {
|
|
requestDefaults.headers["time-zone"] = options.timeZone;
|
|
}
|
|
|
|
this.request = request.request.defaults(requestDefaults);
|
|
this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);
|
|
this.log = Object.assign({
|
|
debug: () => {},
|
|
info: () => {},
|
|
warn: console.warn.bind(console),
|
|
error: console.error.bind(console)
|
|
}, options.log);
|
|
this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
|
|
// is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.
|
|
// (2) If only `options.auth` is set, use the default token authentication strategy.
|
|
// (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
|
|
// TODO: type `options.auth` based on `options.authStrategy`.
|
|
|
|
if (!options.authStrategy) {
|
|
if (!options.auth) {
|
|
// (1)
|
|
this.auth = async () => ({
|
|
type: "unauthenticated"
|
|
});
|
|
} else {
|
|
// (2)
|
|
const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯
|
|
|
|
hook.wrap("request", auth.hook);
|
|
this.auth = auth;
|
|
}
|
|
} else {
|
|
const {
|
|
authStrategy
|
|
} = options,
|
|
otherOptions = _objectWithoutProperties(options, ["authStrategy"]);
|
|
|
|
const auth = authStrategy(Object.assign({
|
|
request: this.request,
|
|
log: this.log,
|
|
// we pass the current octokit instance as well as its constructor options
|
|
// to allow for authentication strategies that return a new octokit instance
|
|
// that shares the same internal state as the current one. The original
|
|
// requirement for this was the "event-octokit" authentication strategy
|
|
// of https://github.com/probot/octokit-auth-probot.
|
|
octokit: this,
|
|
octokitOptions: otherOptions
|
|
}, options.auth)); // @ts-ignore ¯\_(ツ)_/¯
|
|
|
|
hook.wrap("request", auth.hook);
|
|
this.auth = auth;
|
|
} // apply plugins
|
|
// https://stackoverflow.com/a/16345172
|
|
|
|
|
|
const classConstructor = this.constructor;
|
|
classConstructor.plugins.forEach(plugin => {
|
|
Object.assign(this, plugin(this, options));
|
|
});
|
|
}
|
|
|
|
static defaults(defaults) {
|
|
const OctokitWithDefaults = class extends this {
|
|
constructor(...args) {
|
|
const options = args[0] || {};
|
|
|
|
if (typeof defaults === "function") {
|
|
super(defaults(options));
|
|
return;
|
|
}
|
|
|
|
super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {
|
|
userAgent: `${options.userAgent} ${defaults.userAgent}`
|
|
} : null));
|
|
}
|
|
|
|
};
|
|
return OctokitWithDefaults;
|
|
}
|
|
/**
|
|
* Attach a plugin (or many) to your Octokit instance.
|
|
*
|
|
* @example
|
|
* const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)
|
|
*/
|
|
|
|
|
|
static plugin(...newPlugins) {
|
|
var _a;
|
|
|
|
const currentPlugins = this.plugins;
|
|
const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);
|
|
return NewOctokit;
|
|
}
|
|
|
|
}
|
|
Octokit.VERSION = VERSION;
|
|
Octokit.plugins = [];
|
|
|
|
exports.Octokit = Octokit;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9440:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
|
|
var isPlainObject = __nccwpck_require__(3287);
|
|
var universalUserAgent = __nccwpck_require__(5030);
|
|
|
|
function lowercaseKeys(object) {
|
|
if (!object) {
|
|
return {};
|
|
}
|
|
|
|
return Object.keys(object).reduce((newObj, key) => {
|
|
newObj[key.toLowerCase()] = object[key];
|
|
return newObj;
|
|
}, {});
|
|
}
|
|
|
|
function mergeDeep(defaults, options) {
|
|
const result = Object.assign({}, defaults);
|
|
Object.keys(options).forEach(key => {
|
|
if (isPlainObject.isPlainObject(options[key])) {
|
|
if (!(key in defaults)) Object.assign(result, {
|
|
[key]: options[key]
|
|
});else result[key] = mergeDeep(defaults[key], options[key]);
|
|
} else {
|
|
Object.assign(result, {
|
|
[key]: options[key]
|
|
});
|
|
}
|
|
});
|
|
return result;
|
|
}
|
|
|
|
function removeUndefinedProperties(obj) {
|
|
for (const key in obj) {
|
|
if (obj[key] === undefined) {
|
|
delete obj[key];
|
|
}
|
|
}
|
|
|
|
return obj;
|
|
}
|
|
|
|
function merge(defaults, route, options) {
|
|
if (typeof route === "string") {
|
|
let [method, url] = route.split(" ");
|
|
options = Object.assign(url ? {
|
|
method,
|
|
url
|
|
} : {
|
|
url: method
|
|
}, options);
|
|
} else {
|
|
options = Object.assign({}, route);
|
|
} // lowercase header names before merging with defaults to avoid duplicates
|
|
|
|
|
|
options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging
|
|
|
|
removeUndefinedProperties(options);
|
|
removeUndefinedProperties(options.headers);
|
|
const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten
|
|
|
|
if (defaults && defaults.mediaType.previews.length) {
|
|
mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);
|
|
}
|
|
|
|
mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, ""));
|
|
return mergedOptions;
|
|
}
|
|
|
|
function addQueryParameters(url, parameters) {
|
|
const separator = /\?/.test(url) ? "&" : "?";
|
|
const names = Object.keys(parameters);
|
|
|
|
if (names.length === 0) {
|
|
return url;
|
|
}
|
|
|
|
return url + separator + names.map(name => {
|
|
if (name === "q") {
|
|
return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
|
|
}
|
|
|
|
return `${name}=${encodeURIComponent(parameters[name])}`;
|
|
}).join("&");
|
|
}
|
|
|
|
const urlVariableRegex = /\{[^}]+\}/g;
|
|
|
|
function removeNonChars(variableName) {
|
|
return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
|
|
}
|
|
|
|
function extractUrlVariableNames(url) {
|
|
const matches = url.match(urlVariableRegex);
|
|
|
|
if (!matches) {
|
|
return [];
|
|
}
|
|
|
|
return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
|
|
}
|
|
|
|
function omit(object, keysToOmit) {
|
|
return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {
|
|
obj[key] = object[key];
|
|
return obj;
|
|
}, {});
|
|
}
|
|
|
|
// Based on https://github.com/bramstein/url-template, licensed under BSD
|
|
// TODO: create separate package.
|
|
//
|
|
// Copyright (c) 2012-2014, Bram Stein
|
|
// All rights reserved.
|
|
// Redistribution and use in source and binary forms, with or without
|
|
// modification, are permitted provided that the following conditions
|
|
// are met:
|
|
// 1. Redistributions of source code must retain the above copyright
|
|
// notice, this list of conditions and the following disclaimer.
|
|
// 2. Redistributions in binary form must reproduce the above copyright
|
|
// notice, this list of conditions and the following disclaimer in the
|
|
// documentation and/or other materials provided with the distribution.
|
|
// 3. The name of the author may not be used to endorse or promote products
|
|
// derived from this software without specific prior written permission.
|
|
// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
|
|
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
|
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
|
// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
|
|
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
|
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
|
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
|
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
|
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
/* istanbul ignore file */
|
|
function encodeReserved(str) {
|
|
return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {
|
|
if (!/%[0-9A-Fa-f]/.test(part)) {
|
|
part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
|
|
}
|
|
|
|
return part;
|
|
}).join("");
|
|
}
|
|
|
|
function encodeUnreserved(str) {
|
|
return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {
|
|
return "%" + c.charCodeAt(0).toString(16).toUpperCase();
|
|
});
|
|
}
|
|
|
|
function encodeValue(operator, value, key) {
|
|
value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
|
|
|
|
if (key) {
|
|
return encodeUnreserved(key) + "=" + value;
|
|
} else {
|
|
return value;
|
|
}
|
|
}
|
|
|
|
function isDefined(value) {
|
|
return value !== undefined && value !== null;
|
|
}
|
|
|
|
function isKeyOperator(operator) {
|
|
return operator === ";" || operator === "&" || operator === "?";
|
|
}
|
|
|
|
function getValues(context, operator, key, modifier) {
|
|
var value = context[key],
|
|
result = [];
|
|
|
|
if (isDefined(value) && value !== "") {
|
|
if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
|
|
value = value.toString();
|
|
|
|
if (modifier && modifier !== "*") {
|
|
value = value.substring(0, parseInt(modifier, 10));
|
|
}
|
|
|
|
result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
|
|
} else {
|
|
if (modifier === "*") {
|
|
if (Array.isArray(value)) {
|
|
value.filter(isDefined).forEach(function (value) {
|
|
result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
|
|
});
|
|
} else {
|
|
Object.keys(value).forEach(function (k) {
|
|
if (isDefined(value[k])) {
|
|
result.push(encodeValue(operator, value[k], k));
|
|
}
|
|
});
|
|
}
|
|
} else {
|
|
const tmp = [];
|
|
|
|
if (Array.isArray(value)) {
|
|
value.filter(isDefined).forEach(function (value) {
|
|
tmp.push(encodeValue(operator, value));
|
|
});
|
|
} else {
|
|
Object.keys(value).forEach(function (k) {
|
|
if (isDefined(value[k])) {
|
|
tmp.push(encodeUnreserved(k));
|
|
tmp.push(encodeValue(operator, value[k].toString()));
|
|
}
|
|
});
|
|
}
|
|
|
|
if (isKeyOperator(operator)) {
|
|
result.push(encodeUnreserved(key) + "=" + tmp.join(","));
|
|
} else if (tmp.length !== 0) {
|
|
result.push(tmp.join(","));
|
|
}
|
|
}
|
|
}
|
|
} else {
|
|
if (operator === ";") {
|
|
if (isDefined(value)) {
|
|
result.push(encodeUnreserved(key));
|
|
}
|
|
} else if (value === "" && (operator === "&" || operator === "?")) {
|
|
result.push(encodeUnreserved(key) + "=");
|
|
} else if (value === "") {
|
|
result.push("");
|
|
}
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
function parseUrl(template) {
|
|
return {
|
|
expand: expand.bind(null, template)
|
|
};
|
|
}
|
|
|
|
function expand(template, context) {
|
|
var operators = ["+", "#", ".", "/", ";", "?", "&"];
|
|
return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) {
|
|
if (expression) {
|
|
let operator = "";
|
|
const values = [];
|
|
|
|
if (operators.indexOf(expression.charAt(0)) !== -1) {
|
|
operator = expression.charAt(0);
|
|
expression = expression.substr(1);
|
|
}
|
|
|
|
expression.split(/,/g).forEach(function (variable) {
|
|
var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
|
|
values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
|
|
});
|
|
|
|
if (operator && operator !== "+") {
|
|
var separator = ",";
|
|
|
|
if (operator === "?") {
|
|
separator = "&";
|
|
} else if (operator !== "#") {
|
|
separator = operator;
|
|
}
|
|
|
|
return (values.length !== 0 ? operator : "") + values.join(separator);
|
|
} else {
|
|
return values.join(",");
|
|
}
|
|
} else {
|
|
return encodeReserved(literal);
|
|
}
|
|
});
|
|
}
|
|
|
|
function parse(options) {
|
|
// https://fetch.spec.whatwg.org/#methods
|
|
let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible
|
|
|
|
let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}");
|
|
let headers = Object.assign({}, options.headers);
|
|
let body;
|
|
let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later
|
|
|
|
const urlVariableNames = extractUrlVariableNames(url);
|
|
url = parseUrl(url).expand(parameters);
|
|
|
|
if (!/^http/.test(url)) {
|
|
url = options.baseUrl + url;
|
|
}
|
|
|
|
const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl");
|
|
const remainingParameters = omit(parameters, omittedParameters);
|
|
const isBinaryRequest = /application\/octet-stream/i.test(headers.accept);
|
|
|
|
if (!isBinaryRequest) {
|
|
if (options.mediaType.format) {
|
|
// e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
|
|
headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(",");
|
|
}
|
|
|
|
if (options.mediaType.previews.length) {
|
|
const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
|
|
headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {
|
|
const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
|
|
return `application/vnd.github.${preview}-preview${format}`;
|
|
}).join(",");
|
|
}
|
|
} // for GET/HEAD requests, set URL query parameters from remaining parameters
|
|
// for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
|
|
|
|
|
|
if (["GET", "HEAD"].includes(method)) {
|
|
url = addQueryParameters(url, remainingParameters);
|
|
} else {
|
|
if ("data" in remainingParameters) {
|
|
body = remainingParameters.data;
|
|
} else {
|
|
if (Object.keys(remainingParameters).length) {
|
|
body = remainingParameters;
|
|
} else {
|
|
headers["content-length"] = 0;
|
|
}
|
|
}
|
|
} // default content-type for JSON if body is set
|
|
|
|
|
|
if (!headers["content-type"] && typeof body !== "undefined") {
|
|
headers["content-type"] = "application/json; charset=utf-8";
|
|
} // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
|
|
// fetch does not allow to set `content-length` header, but we can set body to an empty string
|
|
|
|
|
|
if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
|
|
body = "";
|
|
} // Only return body/request keys if present
|
|
|
|
|
|
return Object.assign({
|
|
method,
|
|
url,
|
|
headers
|
|
}, typeof body !== "undefined" ? {
|
|
body
|
|
} : null, options.request ? {
|
|
request: options.request
|
|
} : null);
|
|
}
|
|
|
|
function endpointWithDefaults(defaults, route, options) {
|
|
return parse(merge(defaults, route, options));
|
|
}
|
|
|
|
function withDefaults(oldDefaults, newDefaults) {
|
|
const DEFAULTS = merge(oldDefaults, newDefaults);
|
|
const endpoint = endpointWithDefaults.bind(null, DEFAULTS);
|
|
return Object.assign(endpoint, {
|
|
DEFAULTS,
|
|
defaults: withDefaults.bind(null, DEFAULTS),
|
|
merge: merge.bind(null, DEFAULTS),
|
|
parse
|
|
});
|
|
}
|
|
|
|
const VERSION = "6.0.11";
|
|
|
|
const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.
|
|
// So we use RequestParameters and add method as additional required property.
|
|
|
|
const DEFAULTS = {
|
|
method: "GET",
|
|
baseUrl: "https://api.github.com",
|
|
headers: {
|
|
accept: "application/vnd.github.v3+json",
|
|
"user-agent": userAgent
|
|
},
|
|
mediaType: {
|
|
format: "",
|
|
previews: []
|
|
}
|
|
};
|
|
|
|
const endpoint = withDefaults(null, DEFAULTS);
|
|
|
|
exports.endpoint = endpoint;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8467:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
|
|
var request = __nccwpck_require__(6234);
|
|
var universalUserAgent = __nccwpck_require__(5030);
|
|
|
|
const VERSION = "4.6.1";
|
|
|
|
class GraphqlError extends Error {
|
|
constructor(request, response) {
|
|
const message = response.data.errors[0].message;
|
|
super(message);
|
|
Object.assign(this, response.data);
|
|
Object.assign(this, {
|
|
headers: response.headers
|
|
});
|
|
this.name = "GraphqlError";
|
|
this.request = request; // Maintains proper stack trace (only available on V8)
|
|
|
|
/* istanbul ignore next */
|
|
|
|
if (Error.captureStackTrace) {
|
|
Error.captureStackTrace(this, this.constructor);
|
|
}
|
|
}
|
|
|
|
}
|
|
|
|
const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"];
|
|
const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"];
|
|
const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/;
|
|
function graphql(request, query, options) {
|
|
if (options) {
|
|
if (typeof query === "string" && "query" in options) {
|
|
return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`));
|
|
}
|
|
|
|
for (const key in options) {
|
|
if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;
|
|
return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`));
|
|
}
|
|
}
|
|
|
|
const parsedOptions = typeof query === "string" ? Object.assign({
|
|
query
|
|
}, options) : query;
|
|
const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {
|
|
if (NON_VARIABLE_OPTIONS.includes(key)) {
|
|
result[key] = parsedOptions[key];
|
|
return result;
|
|
}
|
|
|
|
if (!result.variables) {
|
|
result.variables = {};
|
|
}
|
|
|
|
result.variables[key] = parsedOptions[key];
|
|
return result;
|
|
}, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix
|
|
// https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451
|
|
|
|
const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;
|
|
|
|
if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {
|
|
requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql");
|
|
}
|
|
|
|
return request(requestOptions).then(response => {
|
|
if (response.data.errors) {
|
|
const headers = {};
|
|
|
|
for (const key of Object.keys(response.headers)) {
|
|
headers[key] = response.headers[key];
|
|
}
|
|
|
|
throw new GraphqlError(requestOptions, {
|
|
headers,
|
|
data: response.data
|
|
});
|
|
}
|
|
|
|
return response.data.data;
|
|
});
|
|
}
|
|
|
|
function withDefaults(request$1, newDefaults) {
|
|
const newRequest = request$1.defaults(newDefaults);
|
|
|
|
const newApi = (query, options) => {
|
|
return graphql(newRequest, query, options);
|
|
};
|
|
|
|
return Object.assign(newApi, {
|
|
defaults: withDefaults.bind(null, newRequest),
|
|
endpoint: request.request.endpoint
|
|
});
|
|
}
|
|
|
|
const graphql$1 = withDefaults(request.request, {
|
|
headers: {
|
|
"user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`
|
|
},
|
|
method: "POST",
|
|
url: "/graphql"
|
|
});
|
|
function withCustomRequest(customRequest) {
|
|
return withDefaults(customRequest, {
|
|
method: "POST",
|
|
url: "/graphql"
|
|
});
|
|
}
|
|
|
|
exports.graphql = graphql$1;
|
|
exports.withCustomRequest = withCustomRequest;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4193:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
|
|
const VERSION = "2.13.3";
|
|
|
|
/**
|
|
* Some “list” response that can be paginated have a different response structure
|
|
*
|
|
* They have a `total_count` key in the response (search also has `incomplete_results`,
|
|
* /installation/repositories also has `repository_selection`), as well as a key with
|
|
* the list of the items which name varies from endpoint to endpoint.
|
|
*
|
|
* Octokit normalizes these responses so that paginated results are always returned following
|
|
* the same structure. One challenge is that if the list response has only one page, no Link
|
|
* header is provided, so this header alone is not sufficient to check wether a response is
|
|
* paginated or not.
|
|
*
|
|
* We check if a "total_count" key is present in the response data, but also make sure that
|
|
* a "url" property is not, as the "Get the combined status for a specific ref" endpoint would
|
|
* otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
|
|
*/
|
|
function normalizePaginatedListResponse(response) {
|
|
const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
|
|
if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way
|
|
// to retrieve the same information.
|
|
|
|
const incompleteResults = response.data.incomplete_results;
|
|
const repositorySelection = response.data.repository_selection;
|
|
const totalCount = response.data.total_count;
|
|
delete response.data.incomplete_results;
|
|
delete response.data.repository_selection;
|
|
delete response.data.total_count;
|
|
const namespaceKey = Object.keys(response.data)[0];
|
|
const data = response.data[namespaceKey];
|
|
response.data = data;
|
|
|
|
if (typeof incompleteResults !== "undefined") {
|
|
response.data.incomplete_results = incompleteResults;
|
|
}
|
|
|
|
if (typeof repositorySelection !== "undefined") {
|
|
response.data.repository_selection = repositorySelection;
|
|
}
|
|
|
|
response.data.total_count = totalCount;
|
|
return response;
|
|
}
|
|
|
|
function iterator(octokit, route, parameters) {
|
|
const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);
|
|
const requestMethod = typeof route === "function" ? route : octokit.request;
|
|
const method = options.method;
|
|
const headers = options.headers;
|
|
let url = options.url;
|
|
return {
|
|
[Symbol.asyncIterator]: () => ({
|
|
async next() {
|
|
if (!url) return {
|
|
done: true
|
|
};
|
|
const response = await requestMethod({
|
|
method,
|
|
url,
|
|
headers
|
|
});
|
|
const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:
|
|
// '<https://api.github.com/users/aseemk/followers?page=2>; rel="next", <https://api.github.com/users/aseemk/followers?page=2>; rel="last"'
|
|
// sets `url` to undefined if "next" URL is not present or `link` header is not set
|
|
|
|
url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1];
|
|
return {
|
|
value: normalizedResponse
|
|
};
|
|
}
|
|
|
|
})
|
|
};
|
|
}
|
|
|
|
function paginate(octokit, route, parameters, mapFn) {
|
|
if (typeof parameters === "function") {
|
|
mapFn = parameters;
|
|
parameters = undefined;
|
|
}
|
|
|
|
return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);
|
|
}
|
|
|
|
function gather(octokit, results, iterator, mapFn) {
|
|
return iterator.next().then(result => {
|
|
if (result.done) {
|
|
return results;
|
|
}
|
|
|
|
let earlyExit = false;
|
|
|
|
function done() {
|
|
earlyExit = true;
|
|
}
|
|
|
|
results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);
|
|
|
|
if (earlyExit) {
|
|
return results;
|
|
}
|
|
|
|
return gather(octokit, results, iterator, mapFn);
|
|
});
|
|
}
|
|
|
|
const composePaginateRest = Object.assign(paginate, {
|
|
iterator
|
|
});
|
|
|
|
const paginatingEndpoints = ["GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/actions/runners/downloads", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/runners/downloads", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/blocks", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/events", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runners/downloads", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /scim/v2/enterprises/{enterprise}/Groups", "GET /scim/v2/enterprises/{enterprise}/Users", "GET /scim/v2/organizations/{org}/Users", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/team-sync/group-mappings", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"];
|
|
|
|
function isPaginatingEndpoint(arg) {
|
|
if (typeof arg === "string") {
|
|
return paginatingEndpoints.includes(arg);
|
|
} else {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* @param octokit Octokit instance
|
|
* @param options Options passed to Octokit constructor
|
|
*/
|
|
|
|
function paginateRest(octokit) {
|
|
return {
|
|
paginate: Object.assign(paginate.bind(null, octokit), {
|
|
iterator: iterator.bind(null, octokit)
|
|
})
|
|
};
|
|
}
|
|
paginateRest.VERSION = VERSION;
|
|
|
|
exports.composePaginateRest = composePaginateRest;
|
|
exports.isPaginatingEndpoint = isPaginatingEndpoint;
|
|
exports.paginateRest = paginateRest;
|
|
exports.paginatingEndpoints = paginatingEndpoints;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3044:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
|
|
function _defineProperty(obj, key, value) {
|
|
if (key in obj) {
|
|
Object.defineProperty(obj, key, {
|
|
value: value,
|
|
enumerable: true,
|
|
configurable: true,
|
|
writable: true
|
|
});
|
|
} else {
|
|
obj[key] = value;
|
|
}
|
|
|
|
return obj;
|
|
}
|
|
|
|
function ownKeys(object, enumerableOnly) {
|
|
var keys = Object.keys(object);
|
|
|
|
if (Object.getOwnPropertySymbols) {
|
|
var symbols = Object.getOwnPropertySymbols(object);
|
|
if (enumerableOnly) symbols = symbols.filter(function (sym) {
|
|
return Object.getOwnPropertyDescriptor(object, sym).enumerable;
|
|
});
|
|
keys.push.apply(keys, symbols);
|
|
}
|
|
|
|
return keys;
|
|
}
|
|
|
|
function _objectSpread2(target) {
|
|
for (var i = 1; i < arguments.length; i++) {
|
|
var source = arguments[i] != null ? arguments[i] : {};
|
|
|
|
if (i % 2) {
|
|
ownKeys(Object(source), true).forEach(function (key) {
|
|
_defineProperty(target, key, source[key]);
|
|
});
|
|
} else if (Object.getOwnPropertyDescriptors) {
|
|
Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));
|
|
} else {
|
|
ownKeys(Object(source)).forEach(function (key) {
|
|
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
|
|
});
|
|
}
|
|
}
|
|
|
|
return target;
|
|
}
|
|
|
|
const Endpoints = {
|
|
actions: {
|
|
addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
|
|
cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"],
|
|
createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
|
|
createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"],
|
|
createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
|
|
createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"],
|
|
createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"],
|
|
createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"],
|
|
createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"],
|
|
createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"],
|
|
deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
|
|
deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
|
|
deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"],
|
|
deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
|
|
deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"],
|
|
deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"],
|
|
deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"],
|
|
deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
|
|
disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"],
|
|
disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"],
|
|
downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"],
|
|
downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"],
|
|
downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
|
|
enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"],
|
|
enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"],
|
|
getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"],
|
|
getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"],
|
|
getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
|
|
getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"],
|
|
getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
|
|
getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"],
|
|
getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"],
|
|
getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"],
|
|
getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"],
|
|
getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"],
|
|
getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"],
|
|
getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, {
|
|
renamed: ["actions", "getGithubActionsPermissionsRepository"]
|
|
}],
|
|
getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"],
|
|
getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
|
|
getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"],
|
|
getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"],
|
|
getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"],
|
|
getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"],
|
|
getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"],
|
|
getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"],
|
|
getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"],
|
|
listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"],
|
|
listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"],
|
|
listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"],
|
|
listOrgSecrets: ["GET /orgs/{org}/actions/secrets"],
|
|
listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"],
|
|
listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"],
|
|
listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"],
|
|
listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"],
|
|
listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"],
|
|
listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"],
|
|
listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"],
|
|
listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"],
|
|
listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"],
|
|
listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"],
|
|
listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"],
|
|
reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"],
|
|
removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
|
|
reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"],
|
|
setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"],
|
|
setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"],
|
|
setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"],
|
|
setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"],
|
|
setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"],
|
|
setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"]
|
|
},
|
|
activity: {
|
|
checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"],
|
|
deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"],
|
|
deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"],
|
|
getFeeds: ["GET /feeds"],
|
|
getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"],
|
|
getThread: ["GET /notifications/threads/{thread_id}"],
|
|
getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"],
|
|
listEventsForAuthenticatedUser: ["GET /users/{username}/events"],
|
|
listNotificationsForAuthenticatedUser: ["GET /notifications"],
|
|
listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"],
|
|
listPublicEvents: ["GET /events"],
|
|
listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"],
|
|
listPublicEventsForUser: ["GET /users/{username}/events/public"],
|
|
listPublicOrgEvents: ["GET /orgs/{org}/events"],
|
|
listReceivedEventsForUser: ["GET /users/{username}/received_events"],
|
|
listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"],
|
|
listRepoEvents: ["GET /repos/{owner}/{repo}/events"],
|
|
listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"],
|
|
listReposStarredByAuthenticatedUser: ["GET /user/starred"],
|
|
listReposStarredByUser: ["GET /users/{username}/starred"],
|
|
listReposWatchedByUser: ["GET /users/{username}/subscriptions"],
|
|
listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"],
|
|
listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"],
|
|
listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"],
|
|
markNotificationsAsRead: ["PUT /notifications"],
|
|
markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"],
|
|
markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"],
|
|
setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"],
|
|
setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"],
|
|
starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"],
|
|
unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"]
|
|
},
|
|
apps: {
|
|
addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"],
|
|
checkToken: ["POST /applications/{client_id}/token"],
|
|
createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", {
|
|
mediaType: {
|
|
previews: ["corsair"]
|
|
}
|
|
}],
|
|
createFromManifest: ["POST /app-manifests/{code}/conversions"],
|
|
createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"],
|
|
deleteAuthorization: ["DELETE /applications/{client_id}/grant"],
|
|
deleteInstallation: ["DELETE /app/installations/{installation_id}"],
|
|
deleteToken: ["DELETE /applications/{client_id}/token"],
|
|
getAuthenticated: ["GET /app"],
|
|
getBySlug: ["GET /apps/{app_slug}"],
|
|
getInstallation: ["GET /app/installations/{installation_id}"],
|
|
getOrgInstallation: ["GET /orgs/{org}/installation"],
|
|
getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"],
|
|
getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"],
|
|
getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"],
|
|
getUserInstallation: ["GET /users/{username}/installation"],
|
|
getWebhookConfigForApp: ["GET /app/hook/config"],
|
|
listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"],
|
|
listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"],
|
|
listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"],
|
|
listInstallations: ["GET /app/installations"],
|
|
listInstallationsForAuthenticatedUser: ["GET /user/installations"],
|
|
listPlans: ["GET /marketplace_listing/plans"],
|
|
listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"],
|
|
listReposAccessibleToInstallation: ["GET /installation/repositories"],
|
|
listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"],
|
|
listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"],
|
|
removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"],
|
|
resetToken: ["PATCH /applications/{client_id}/token"],
|
|
revokeInstallationAccessToken: ["DELETE /installation/token"],
|
|
scopeToken: ["POST /applications/{client_id}/token/scoped"],
|
|
suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"],
|
|
unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"],
|
|
updateWebhookConfigForApp: ["PATCH /app/hook/config"]
|
|
},
|
|
billing: {
|
|
getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"],
|
|
getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"],
|
|
getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"],
|
|
getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"],
|
|
getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"],
|
|
getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"]
|
|
},
|
|
checks: {
|
|
create: ["POST /repos/{owner}/{repo}/check-runs"],
|
|
createSuite: ["POST /repos/{owner}/{repo}/check-suites"],
|
|
get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"],
|
|
getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"],
|
|
listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"],
|
|
listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"],
|
|
listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"],
|
|
listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"],
|
|
rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"],
|
|
setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"],
|
|
update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"]
|
|
},
|
|
codeScanning: {
|
|
deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"],
|
|
getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, {
|
|
renamedParameters: {
|
|
alert_id: "alert_number"
|
|
}
|
|
}],
|
|
getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"],
|
|
getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"],
|
|
listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"],
|
|
listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"],
|
|
listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"],
|
|
updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"],
|
|
uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"]
|
|
},
|
|
codesOfConduct: {
|
|
getAllCodesOfConduct: ["GET /codes_of_conduct", {
|
|
mediaType: {
|
|
previews: ["scarlet-witch"]
|
|
}
|
|
}],
|
|
getConductCode: ["GET /codes_of_conduct/{key}", {
|
|
mediaType: {
|
|
previews: ["scarlet-witch"]
|
|
}
|
|
}],
|
|
getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", {
|
|
mediaType: {
|
|
previews: ["scarlet-witch"]
|
|
}
|
|
}]
|
|
},
|
|
emojis: {
|
|
get: ["GET /emojis"]
|
|
},
|
|
enterpriseAdmin: {
|
|
disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"],
|
|
enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"],
|
|
getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"],
|
|
getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"],
|
|
listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"],
|
|
setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"],
|
|
setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"],
|
|
setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"]
|
|
},
|
|
gists: {
|
|
checkIsStarred: ["GET /gists/{gist_id}/star"],
|
|
create: ["POST /gists"],
|
|
createComment: ["POST /gists/{gist_id}/comments"],
|
|
delete: ["DELETE /gists/{gist_id}"],
|
|
deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"],
|
|
fork: ["POST /gists/{gist_id}/forks"],
|
|
get: ["GET /gists/{gist_id}"],
|
|
getComment: ["GET /gists/{gist_id}/comments/{comment_id}"],
|
|
getRevision: ["GET /gists/{gist_id}/{sha}"],
|
|
list: ["GET /gists"],
|
|
listComments: ["GET /gists/{gist_id}/comments"],
|
|
listCommits: ["GET /gists/{gist_id}/commits"],
|
|
listForUser: ["GET /users/{username}/gists"],
|
|
listForks: ["GET /gists/{gist_id}/forks"],
|
|
listPublic: ["GET /gists/public"],
|
|
listStarred: ["GET /gists/starred"],
|
|
star: ["PUT /gists/{gist_id}/star"],
|
|
unstar: ["DELETE /gists/{gist_id}/star"],
|
|
update: ["PATCH /gists/{gist_id}"],
|
|
updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"]
|
|
},
|
|
git: {
|
|
createBlob: ["POST /repos/{owner}/{repo}/git/blobs"],
|
|
createCommit: ["POST /repos/{owner}/{repo}/git/commits"],
|
|
createRef: ["POST /repos/{owner}/{repo}/git/refs"],
|
|
createTag: ["POST /repos/{owner}/{repo}/git/tags"],
|
|
createTree: ["POST /repos/{owner}/{repo}/git/trees"],
|
|
deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"],
|
|
getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"],
|
|
getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"],
|
|
getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"],
|
|
getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"],
|
|
getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"],
|
|
listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"],
|
|
updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"]
|
|
},
|
|
gitignore: {
|
|
getAllTemplates: ["GET /gitignore/templates"],
|
|
getTemplate: ["GET /gitignore/templates/{name}"]
|
|
},
|
|
interactions: {
|
|
getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"],
|
|
getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"],
|
|
getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"],
|
|
getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, {
|
|
renamed: ["interactions", "getRestrictionsForAuthenticatedUser"]
|
|
}],
|
|
removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"],
|
|
removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"],
|
|
removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"],
|
|
removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, {
|
|
renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"]
|
|
}],
|
|
setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"],
|
|
setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"],
|
|
setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"],
|
|
setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, {
|
|
renamed: ["interactions", "setRestrictionsForAuthenticatedUser"]
|
|
}]
|
|
},
|
|
issues: {
|
|
addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
|
|
addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"],
|
|
checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"],
|
|
create: ["POST /repos/{owner}/{repo}/issues"],
|
|
createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"],
|
|
createLabel: ["POST /repos/{owner}/{repo}/labels"],
|
|
createMilestone: ["POST /repos/{owner}/{repo}/milestones"],
|
|
deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"],
|
|
deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"],
|
|
deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"],
|
|
get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"],
|
|
getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"],
|
|
getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"],
|
|
getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"],
|
|
getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"],
|
|
list: ["GET /issues"],
|
|
listAssignees: ["GET /repos/{owner}/{repo}/assignees"],
|
|
listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"],
|
|
listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"],
|
|
listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"],
|
|
listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"],
|
|
listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", {
|
|
mediaType: {
|
|
previews: ["mockingbird"]
|
|
}
|
|
}],
|
|
listForAuthenticatedUser: ["GET /user/issues"],
|
|
listForOrg: ["GET /orgs/{org}/issues"],
|
|
listForRepo: ["GET /repos/{owner}/{repo}/issues"],
|
|
listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"],
|
|
listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"],
|
|
listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"],
|
|
listMilestones: ["GET /repos/{owner}/{repo}/milestones"],
|
|
lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"],
|
|
removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"],
|
|
removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
|
|
removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"],
|
|
setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"],
|
|
unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"],
|
|
update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"],
|
|
updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"],
|
|
updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"],
|
|
updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"]
|
|
},
|
|
licenses: {
|
|
get: ["GET /licenses/{license}"],
|
|
getAllCommonlyUsed: ["GET /licenses"],
|
|
getForRepo: ["GET /repos/{owner}/{repo}/license"]
|
|
},
|
|
markdown: {
|
|
render: ["POST /markdown"],
|
|
renderRaw: ["POST /markdown/raw", {
|
|
headers: {
|
|
"content-type": "text/plain; charset=utf-8"
|
|
}
|
|
}]
|
|
},
|
|
meta: {
|
|
get: ["GET /meta"],
|
|
getOctocat: ["GET /octocat"],
|
|
getZen: ["GET /zen"],
|
|
root: ["GET /"]
|
|
},
|
|
migrations: {
|
|
cancelImport: ["DELETE /repos/{owner}/{repo}/import"],
|
|
deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", {
|
|
mediaType: {
|
|
previews: ["wyandotte"]
|
|
}
|
|
}],
|
|
deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", {
|
|
mediaType: {
|
|
previews: ["wyandotte"]
|
|
}
|
|
}],
|
|
downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", {
|
|
mediaType: {
|
|
previews: ["wyandotte"]
|
|
}
|
|
}],
|
|
getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", {
|
|
mediaType: {
|
|
previews: ["wyandotte"]
|
|
}
|
|
}],
|
|
getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"],
|
|
getImportStatus: ["GET /repos/{owner}/{repo}/import"],
|
|
getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"],
|
|
getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", {
|
|
mediaType: {
|
|
previews: ["wyandotte"]
|
|
}
|
|
}],
|
|
getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", {
|
|
mediaType: {
|
|
previews: ["wyandotte"]
|
|
}
|
|
}],
|
|
listForAuthenticatedUser: ["GET /user/migrations", {
|
|
mediaType: {
|
|
previews: ["wyandotte"]
|
|
}
|
|
}],
|
|
listForOrg: ["GET /orgs/{org}/migrations", {
|
|
mediaType: {
|
|
previews: ["wyandotte"]
|
|
}
|
|
}],
|
|
listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", {
|
|
mediaType: {
|
|
previews: ["wyandotte"]
|
|
}
|
|
}],
|
|
listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {
|
|
mediaType: {
|
|
previews: ["wyandotte"]
|
|
}
|
|
}],
|
|
mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"],
|
|
setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"],
|
|
startForAuthenticatedUser: ["POST /user/migrations"],
|
|
startForOrg: ["POST /orgs/{org}/migrations"],
|
|
startImport: ["PUT /repos/{owner}/{repo}/import"],
|
|
unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", {
|
|
mediaType: {
|
|
previews: ["wyandotte"]
|
|
}
|
|
}],
|
|
unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", {
|
|
mediaType: {
|
|
previews: ["wyandotte"]
|
|
}
|
|
}],
|
|
updateImport: ["PATCH /repos/{owner}/{repo}/import"]
|
|
},
|
|
orgs: {
|
|
blockUser: ["PUT /orgs/{org}/blocks/{username}"],
|
|
cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"],
|
|
checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"],
|
|
checkMembershipForUser: ["GET /orgs/{org}/members/{username}"],
|
|
checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"],
|
|
convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"],
|
|
createInvitation: ["POST /orgs/{org}/invitations"],
|
|
createWebhook: ["POST /orgs/{org}/hooks"],
|
|
deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"],
|
|
get: ["GET /orgs/{org}"],
|
|
getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"],
|
|
getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"],
|
|
getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"],
|
|
getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"],
|
|
list: ["GET /organizations"],
|
|
listAppInstallations: ["GET /orgs/{org}/installations"],
|
|
listBlockedUsers: ["GET /orgs/{org}/blocks"],
|
|
listFailedInvitations: ["GET /orgs/{org}/failed_invitations"],
|
|
listForAuthenticatedUser: ["GET /user/orgs"],
|
|
listForUser: ["GET /users/{username}/orgs"],
|
|
listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"],
|
|
listMembers: ["GET /orgs/{org}/members"],
|
|
listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"],
|
|
listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"],
|
|
listPendingInvitations: ["GET /orgs/{org}/invitations"],
|
|
listPublicMembers: ["GET /orgs/{org}/public_members"],
|
|
listWebhooks: ["GET /orgs/{org}/hooks"],
|
|
pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"],
|
|
removeMember: ["DELETE /orgs/{org}/members/{username}"],
|
|
removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"],
|
|
removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"],
|
|
removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"],
|
|
setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"],
|
|
setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"],
|
|
unblockUser: ["DELETE /orgs/{org}/blocks/{username}"],
|
|
update: ["PATCH /orgs/{org}"],
|
|
updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"],
|
|
updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"],
|
|
updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"]
|
|
},
|
|
packages: {
|
|
deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"],
|
|
deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"],
|
|
deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"],
|
|
deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
|
|
getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, {
|
|
renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"]
|
|
}],
|
|
getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, {
|
|
renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"]
|
|
}],
|
|
getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"],
|
|
getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"],
|
|
getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"],
|
|
getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"],
|
|
getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"],
|
|
getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"],
|
|
getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"],
|
|
getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
|
|
getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
|
|
restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"],
|
|
restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"],
|
|
restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"],
|
|
restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"]
|
|
},
|
|
projects: {
|
|
addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
createCard: ["POST /projects/columns/{column_id}/cards", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
createColumn: ["POST /projects/{project_id}/columns", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
createForAuthenticatedUser: ["POST /user/projects", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
createForOrg: ["POST /orgs/{org}/projects", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
createForRepo: ["POST /repos/{owner}/{repo}/projects", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
delete: ["DELETE /projects/{project_id}", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
deleteCard: ["DELETE /projects/columns/cards/{card_id}", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
deleteColumn: ["DELETE /projects/columns/{column_id}", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
get: ["GET /projects/{project_id}", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
getCard: ["GET /projects/columns/cards/{card_id}", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
getColumn: ["GET /projects/columns/{column_id}", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
listCards: ["GET /projects/columns/{column_id}/cards", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
listCollaborators: ["GET /projects/{project_id}/collaborators", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
listColumns: ["GET /projects/{project_id}/columns", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
listForOrg: ["GET /orgs/{org}/projects", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
listForRepo: ["GET /repos/{owner}/{repo}/projects", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
listForUser: ["GET /users/{username}/projects", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
moveCard: ["POST /projects/columns/cards/{card_id}/moves", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
moveColumn: ["POST /projects/columns/{column_id}/moves", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
update: ["PATCH /projects/{project_id}", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
updateCard: ["PATCH /projects/columns/cards/{card_id}", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
updateColumn: ["PATCH /projects/columns/{column_id}", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}]
|
|
},
|
|
pulls: {
|
|
checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
|
|
create: ["POST /repos/{owner}/{repo}/pulls"],
|
|
createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"],
|
|
createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
|
|
createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"],
|
|
deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
|
|
deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
|
|
dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"],
|
|
get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"],
|
|
getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
|
|
getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
|
|
list: ["GET /repos/{owner}/{repo}/pulls"],
|
|
listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"],
|
|
listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"],
|
|
listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"],
|
|
listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
|
|
listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"],
|
|
listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"],
|
|
listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
|
|
merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
|
|
removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
|
|
requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
|
|
submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"],
|
|
update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"],
|
|
updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", {
|
|
mediaType: {
|
|
previews: ["lydian"]
|
|
}
|
|
}],
|
|
updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
|
|
updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"]
|
|
},
|
|
rateLimit: {
|
|
get: ["GET /rate_limit"]
|
|
},
|
|
reactions: {
|
|
createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
deleteLegacy: ["DELETE /reactions/{reaction_id}", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}, {
|
|
deprecated: "octokit.reactions.deleteLegacy() is deprecated, see https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy"
|
|
}],
|
|
listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}],
|
|
listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", {
|
|
mediaType: {
|
|
previews: ["squirrel-girl"]
|
|
}
|
|
}]
|
|
},
|
|
repos: {
|
|
acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"],
|
|
addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
|
|
mapToData: "apps"
|
|
}],
|
|
addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"],
|
|
addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
|
|
mapToData: "contexts"
|
|
}],
|
|
addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
|
|
mapToData: "teams"
|
|
}],
|
|
addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
|
|
mapToData: "users"
|
|
}],
|
|
checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"],
|
|
checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", {
|
|
mediaType: {
|
|
previews: ["dorian"]
|
|
}
|
|
}],
|
|
compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"],
|
|
createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"],
|
|
createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", {
|
|
mediaType: {
|
|
previews: ["zzzax"]
|
|
}
|
|
}],
|
|
createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"],
|
|
createDeployKey: ["POST /repos/{owner}/{repo}/keys"],
|
|
createDeployment: ["POST /repos/{owner}/{repo}/deployments"],
|
|
createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"],
|
|
createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"],
|
|
createForAuthenticatedUser: ["POST /user/repos"],
|
|
createFork: ["POST /repos/{owner}/{repo}/forks{?org,organization}"],
|
|
createInOrg: ["POST /orgs/{org}/repos"],
|
|
createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"],
|
|
createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"],
|
|
createPagesSite: ["POST /repos/{owner}/{repo}/pages", {
|
|
mediaType: {
|
|
previews: ["switcheroo"]
|
|
}
|
|
}],
|
|
createRelease: ["POST /repos/{owner}/{repo}/releases"],
|
|
createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", {
|
|
mediaType: {
|
|
previews: ["baptiste"]
|
|
}
|
|
}],
|
|
createWebhook: ["POST /repos/{owner}/{repo}/hooks"],
|
|
declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"],
|
|
delete: ["DELETE /repos/{owner}/{repo}"],
|
|
deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"],
|
|
deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
|
|
deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"],
|
|
deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"],
|
|
deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"],
|
|
deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", {
|
|
mediaType: {
|
|
previews: ["zzzax"]
|
|
}
|
|
}],
|
|
deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"],
|
|
deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"],
|
|
deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"],
|
|
deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"],
|
|
deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", {
|
|
mediaType: {
|
|
previews: ["switcheroo"]
|
|
}
|
|
}],
|
|
deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
|
|
deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"],
|
|
deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"],
|
|
deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"],
|
|
disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", {
|
|
mediaType: {
|
|
previews: ["london"]
|
|
}
|
|
}],
|
|
disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", {
|
|
mediaType: {
|
|
previews: ["dorian"]
|
|
}
|
|
}],
|
|
downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, {
|
|
renamed: ["repos", "downloadZipballArchive"]
|
|
}],
|
|
downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"],
|
|
downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"],
|
|
enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", {
|
|
mediaType: {
|
|
previews: ["london"]
|
|
}
|
|
}],
|
|
enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", {
|
|
mediaType: {
|
|
previews: ["dorian"]
|
|
}
|
|
}],
|
|
get: ["GET /repos/{owner}/{repo}"],
|
|
getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"],
|
|
getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
|
|
getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"],
|
|
getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"],
|
|
getAllTopics: ["GET /repos/{owner}/{repo}/topics", {
|
|
mediaType: {
|
|
previews: ["mercy"]
|
|
}
|
|
}],
|
|
getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"],
|
|
getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"],
|
|
getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"],
|
|
getClones: ["GET /repos/{owner}/{repo}/traffic/clones"],
|
|
getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"],
|
|
getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"],
|
|
getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"],
|
|
getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"],
|
|
getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"],
|
|
getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"],
|
|
getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", {
|
|
mediaType: {
|
|
previews: ["zzzax"]
|
|
}
|
|
}],
|
|
getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"],
|
|
getContent: ["GET /repos/{owner}/{repo}/contents/{path}"],
|
|
getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"],
|
|
getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"],
|
|
getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"],
|
|
getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"],
|
|
getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"],
|
|
getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"],
|
|
getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"],
|
|
getPages: ["GET /repos/{owner}/{repo}/pages"],
|
|
getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"],
|
|
getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"],
|
|
getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
|
|
getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"],
|
|
getReadme: ["GET /repos/{owner}/{repo}/readme"],
|
|
getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"],
|
|
getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"],
|
|
getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"],
|
|
getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"],
|
|
getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
|
|
getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"],
|
|
getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"],
|
|
getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"],
|
|
getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"],
|
|
getViews: ["GET /repos/{owner}/{repo}/traffic/views"],
|
|
getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"],
|
|
getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"],
|
|
listBranches: ["GET /repos/{owner}/{repo}/branches"],
|
|
listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", {
|
|
mediaType: {
|
|
previews: ["groot"]
|
|
}
|
|
}],
|
|
listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"],
|
|
listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"],
|
|
listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"],
|
|
listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"],
|
|
listCommits: ["GET /repos/{owner}/{repo}/commits"],
|
|
listContributors: ["GET /repos/{owner}/{repo}/contributors"],
|
|
listDeployKeys: ["GET /repos/{owner}/{repo}/keys"],
|
|
listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"],
|
|
listDeployments: ["GET /repos/{owner}/{repo}/deployments"],
|
|
listForAuthenticatedUser: ["GET /user/repos"],
|
|
listForOrg: ["GET /orgs/{org}/repos"],
|
|
listForUser: ["GET /users/{username}/repos"],
|
|
listForks: ["GET /repos/{owner}/{repo}/forks"],
|
|
listInvitations: ["GET /repos/{owner}/{repo}/invitations"],
|
|
listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"],
|
|
listLanguages: ["GET /repos/{owner}/{repo}/languages"],
|
|
listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"],
|
|
listPublic: ["GET /repositories"],
|
|
listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", {
|
|
mediaType: {
|
|
previews: ["groot"]
|
|
}
|
|
}],
|
|
listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"],
|
|
listReleases: ["GET /repos/{owner}/{repo}/releases"],
|
|
listTags: ["GET /repos/{owner}/{repo}/tags"],
|
|
listTeams: ["GET /repos/{owner}/{repo}/teams"],
|
|
listWebhooks: ["GET /repos/{owner}/{repo}/hooks"],
|
|
merge: ["POST /repos/{owner}/{repo}/merges"],
|
|
pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"],
|
|
removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
|
|
mapToData: "apps"
|
|
}],
|
|
removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"],
|
|
removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
|
|
mapToData: "contexts"
|
|
}],
|
|
removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
|
|
removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
|
|
mapToData: "teams"
|
|
}],
|
|
removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
|
|
mapToData: "users"
|
|
}],
|
|
renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"],
|
|
replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", {
|
|
mediaType: {
|
|
previews: ["mercy"]
|
|
}
|
|
}],
|
|
requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"],
|
|
setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
|
|
setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
|
|
mapToData: "apps"
|
|
}],
|
|
setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
|
|
mapToData: "contexts"
|
|
}],
|
|
setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
|
|
mapToData: "teams"
|
|
}],
|
|
setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
|
|
mapToData: "users"
|
|
}],
|
|
testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"],
|
|
transfer: ["POST /repos/{owner}/{repo}/transfer"],
|
|
update: ["PATCH /repos/{owner}/{repo}"],
|
|
updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"],
|
|
updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"],
|
|
updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"],
|
|
updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"],
|
|
updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
|
|
updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"],
|
|
updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"],
|
|
updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, {
|
|
renamed: ["repos", "updateStatusCheckProtection"]
|
|
}],
|
|
updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
|
|
updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"],
|
|
updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"],
|
|
uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", {
|
|
baseUrl: "https://uploads.github.com"
|
|
}]
|
|
},
|
|
search: {
|
|
code: ["GET /search/code"],
|
|
commits: ["GET /search/commits", {
|
|
mediaType: {
|
|
previews: ["cloak"]
|
|
}
|
|
}],
|
|
issuesAndPullRequests: ["GET /search/issues"],
|
|
labels: ["GET /search/labels"],
|
|
repos: ["GET /search/repositories"],
|
|
topics: ["GET /search/topics", {
|
|
mediaType: {
|
|
previews: ["mercy"]
|
|
}
|
|
}],
|
|
users: ["GET /search/users"]
|
|
},
|
|
secretScanning: {
|
|
getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"],
|
|
listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"],
|
|
updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"]
|
|
},
|
|
teams: {
|
|
addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"],
|
|
addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
|
|
checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
|
|
create: ["POST /orgs/{org}/teams"],
|
|
createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"],
|
|
createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"],
|
|
deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
|
|
deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
|
|
deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"],
|
|
getByName: ["GET /orgs/{org}/teams/{team_slug}"],
|
|
getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
|
|
getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
|
|
getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"],
|
|
list: ["GET /orgs/{org}/teams"],
|
|
listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"],
|
|
listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"],
|
|
listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"],
|
|
listForAuthenticatedUser: ["GET /user/teams"],
|
|
listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"],
|
|
listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"],
|
|
listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", {
|
|
mediaType: {
|
|
previews: ["inertia"]
|
|
}
|
|
}],
|
|
listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"],
|
|
removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"],
|
|
removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
|
|
removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
|
|
updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
|
|
updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
|
|
updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"]
|
|
},
|
|
users: {
|
|
addEmailForAuthenticated: ["POST /user/emails"],
|
|
block: ["PUT /user/blocks/{username}"],
|
|
checkBlocked: ["GET /user/blocks/{username}"],
|
|
checkFollowingForUser: ["GET /users/{username}/following/{target_user}"],
|
|
checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"],
|
|
createGpgKeyForAuthenticated: ["POST /user/gpg_keys"],
|
|
createPublicSshKeyForAuthenticated: ["POST /user/keys"],
|
|
deleteEmailForAuthenticated: ["DELETE /user/emails"],
|
|
deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"],
|
|
deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"],
|
|
follow: ["PUT /user/following/{username}"],
|
|
getAuthenticated: ["GET /user"],
|
|
getByUsername: ["GET /users/{username}"],
|
|
getContextForUser: ["GET /users/{username}/hovercard"],
|
|
getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"],
|
|
getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"],
|
|
list: ["GET /users"],
|
|
listBlockedByAuthenticated: ["GET /user/blocks"],
|
|
listEmailsForAuthenticated: ["GET /user/emails"],
|
|
listFollowedByAuthenticated: ["GET /user/following"],
|
|
listFollowersForAuthenticatedUser: ["GET /user/followers"],
|
|
listFollowersForUser: ["GET /users/{username}/followers"],
|
|
listFollowingForUser: ["GET /users/{username}/following"],
|
|
listGpgKeysForAuthenticated: ["GET /user/gpg_keys"],
|
|
listGpgKeysForUser: ["GET /users/{username}/gpg_keys"],
|
|
listPublicEmailsForAuthenticated: ["GET /user/public_emails"],
|
|
listPublicKeysForUser: ["GET /users/{username}/keys"],
|
|
listPublicSshKeysForAuthenticated: ["GET /user/keys"],
|
|
setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"],
|
|
unblock: ["DELETE /user/blocks/{username}"],
|
|
unfollow: ["DELETE /user/following/{username}"],
|
|
updateAuthenticated: ["PATCH /user"]
|
|
}
|
|
};
|
|
|
|
const VERSION = "4.15.0";
|
|
|
|
function endpointsToMethods(octokit, endpointsMap) {
|
|
const newMethods = {};
|
|
|
|
for (const [scope, endpoints] of Object.entries(endpointsMap)) {
|
|
for (const [methodName, endpoint] of Object.entries(endpoints)) {
|
|
const [route, defaults, decorations] = endpoint;
|
|
const [method, url] = route.split(/ /);
|
|
const endpointDefaults = Object.assign({
|
|
method,
|
|
url
|
|
}, defaults);
|
|
|
|
if (!newMethods[scope]) {
|
|
newMethods[scope] = {};
|
|
}
|
|
|
|
const scopeMethods = newMethods[scope];
|
|
|
|
if (decorations) {
|
|
scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);
|
|
continue;
|
|
}
|
|
|
|
scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);
|
|
}
|
|
}
|
|
|
|
return newMethods;
|
|
}
|
|
|
|
function decorate(octokit, scope, methodName, defaults, decorations) {
|
|
const requestWithDefaults = octokit.request.defaults(defaults);
|
|
/* istanbul ignore next */
|
|
|
|
function withDecorations(...args) {
|
|
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
|
|
let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`
|
|
|
|
if (decorations.mapToData) {
|
|
options = Object.assign({}, options, {
|
|
data: options[decorations.mapToData],
|
|
[decorations.mapToData]: undefined
|
|
});
|
|
return requestWithDefaults(options);
|
|
}
|
|
|
|
if (decorations.renamed) {
|
|
const [newScope, newMethodName] = decorations.renamed;
|
|
octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);
|
|
}
|
|
|
|
if (decorations.deprecated) {
|
|
octokit.log.warn(decorations.deprecated);
|
|
}
|
|
|
|
if (decorations.renamedParameters) {
|
|
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
|
|
const options = requestWithDefaults.endpoint.merge(...args);
|
|
|
|
for (const [name, alias] of Object.entries(decorations.renamedParameters)) {
|
|
if (name in options) {
|
|
octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`);
|
|
|
|
if (!(alias in options)) {
|
|
options[alias] = options[name];
|
|
}
|
|
|
|
delete options[name];
|
|
}
|
|
}
|
|
|
|
return requestWithDefaults(options);
|
|
} // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
|
|
|
|
|
|
return requestWithDefaults(...args);
|
|
}
|
|
|
|
return Object.assign(withDecorations, requestWithDefaults);
|
|
}
|
|
|
|
function restEndpointMethods(octokit) {
|
|
const api = endpointsToMethods(octokit, Endpoints);
|
|
return _objectSpread2(_objectSpread2({}, api), {}, {
|
|
rest: api
|
|
});
|
|
}
|
|
restEndpointMethods.VERSION = VERSION;
|
|
|
|
exports.restEndpointMethods = restEndpointMethods;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9968:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
|
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
|
|
|
var BottleneckLight = _interopDefault(__nccwpck_require__(1174));
|
|
|
|
function _defineProperty(obj, key, value) {
|
|
if (key in obj) {
|
|
Object.defineProperty(obj, key, {
|
|
value: value,
|
|
enumerable: true,
|
|
configurable: true,
|
|
writable: true
|
|
});
|
|
} else {
|
|
obj[key] = value;
|
|
}
|
|
|
|
return obj;
|
|
}
|
|
|
|
function ownKeys(object, enumerableOnly) {
|
|
var keys = Object.keys(object);
|
|
|
|
if (Object.getOwnPropertySymbols) {
|
|
var symbols = Object.getOwnPropertySymbols(object);
|
|
if (enumerableOnly) symbols = symbols.filter(function (sym) {
|
|
return Object.getOwnPropertyDescriptor(object, sym).enumerable;
|
|
});
|
|
keys.push.apply(keys, symbols);
|
|
}
|
|
|
|
return keys;
|
|
}
|
|
|
|
function _objectSpread2(target) {
|
|
for (var i = 1; i < arguments.length; i++) {
|
|
var source = arguments[i] != null ? arguments[i] : {};
|
|
|
|
if (i % 2) {
|
|
ownKeys(Object(source), true).forEach(function (key) {
|
|
_defineProperty(target, key, source[key]);
|
|
});
|
|
} else if (Object.getOwnPropertyDescriptors) {
|
|
Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));
|
|
} else {
|
|
ownKeys(Object(source)).forEach(function (key) {
|
|
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
|
|
});
|
|
}
|
|
}
|
|
|
|
return target;
|
|
}
|
|
|
|
const VERSION = "3.4.1";
|
|
|
|
const noop = () => Promise.resolve(); // @ts-ignore
|
|
|
|
|
|
function wrapRequest(state, request, options) {
|
|
return state.retryLimiter.schedule(doRequest, state, request, options);
|
|
} // @ts-ignore
|
|
|
|
async function doRequest(state, request, options) {
|
|
const isWrite = options.method !== "GET" && options.method !== "HEAD";
|
|
const isSearch = options.method === "GET" && options.url.startsWith("/search/");
|
|
const isGraphQL = options.url.startsWith("/graphql");
|
|
const retryCount = ~~options.request.retryCount;
|
|
const jobOptions = retryCount > 0 ? {
|
|
priority: 0,
|
|
weight: 0
|
|
} : {};
|
|
|
|
if (state.clustering) {
|
|
// Remove a job from Redis if it has not completed or failed within 60s
|
|
// Examples: Node process terminated, client disconnected, etc.
|
|
// @ts-ignore
|
|
jobOptions.expiration = 1000 * 60;
|
|
} // Guarantee at least 1000ms between writes
|
|
// GraphQL can also trigger writes
|
|
|
|
|
|
if (isWrite || isGraphQL) {
|
|
await state.write.key(state.id).schedule(jobOptions, noop);
|
|
} // Guarantee at least 3000ms between requests that trigger notifications
|
|
|
|
|
|
if (isWrite && state.triggersNotification(options.url)) {
|
|
await state.notifications.key(state.id).schedule(jobOptions, noop);
|
|
} // Guarantee at least 2000ms between search requests
|
|
|
|
|
|
if (isSearch) {
|
|
await state.search.key(state.id).schedule(jobOptions, noop);
|
|
}
|
|
|
|
const req = state.global.key(state.id).schedule(jobOptions, request, options);
|
|
|
|
if (isGraphQL) {
|
|
const res = await req;
|
|
|
|
if (res.data.errors != null && // @ts-ignore
|
|
res.data.errors.some(error => error.type === "RATE_LIMITED")) {
|
|
const error = Object.assign(new Error("GraphQL Rate Limit Exceeded"), {
|
|
headers: res.headers,
|
|
data: res.data
|
|
});
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
return req;
|
|
}
|
|
|
|
var triggersNotificationPaths = ["/orgs/{org}/invitations", "/orgs/{org}/invitations/{invitation_id}", "/orgs/{org}/teams/{team_slug}/discussions", "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "/repos/{owner}/{repo}/collaborators/{username}", "/repos/{owner}/{repo}/commits/{commit_sha}/comments", "/repos/{owner}/{repo}/issues", "/repos/{owner}/{repo}/issues/{issue_number}/comments", "/repos/{owner}/{repo}/pulls", "/repos/{owner}/{repo}/pulls/{pull_number}/comments", "/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", "/repos/{owner}/{repo}/pulls/{pull_number}/merge", "/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "/repos/{owner}/{repo}/pulls/{pull_number}/reviews", "/repos/{owner}/{repo}/releases", "/teams/{team_id}/discussions", "/teams/{team_id}/discussions/{discussion_number}/comments"];
|
|
|
|
// @ts-ignore
|
|
function routeMatcher(paths) {
|
|
// EXAMPLE. For the following paths:
|
|
|
|
/* [
|
|
"/orgs/{org}/invitations",
|
|
"/repos/{owner}/{repo}/collaborators/{username}"
|
|
] */
|
|
// @ts-ignore
|
|
const regexes = paths.map(path => path.split("/") // @ts-ignore
|
|
.map(c => c.startsWith("{") ? "(?:.+?)" : c).join("/")); // 'regexes' would contain:
|
|
|
|
/* [
|
|
'/orgs/(?:.+?)/invitations',
|
|
'/repos/(?:.+?)/(?:.+?)/collaborators/(?:.+?)'
|
|
] */
|
|
// @ts-ignore
|
|
|
|
const regex = `^(?:${regexes.map(r => `(?:${r})`).join("|")})[^/]*$`; // 'regex' would contain:
|
|
|
|
/*
|
|
^(?:(?:\/orgs\/(?:.+?)\/invitations)|(?:\/repos\/(?:.+?)\/(?:.+?)\/collaborators\/(?:.+?)))[^\/]*$
|
|
It may look scary, but paste it into https://www.debuggex.com/
|
|
and it will make a lot more sense!
|
|
*/
|
|
|
|
return new RegExp(regex, "i");
|
|
}
|
|
|
|
const regex = routeMatcher(triggersNotificationPaths);
|
|
const triggersNotification = regex.test.bind(regex);
|
|
const groups = {}; // @ts-ignore
|
|
|
|
const createGroups = function (Bottleneck, common) {
|
|
// @ts-ignore
|
|
groups.global = new Bottleneck.Group(_objectSpread2({
|
|
id: "octokit-global",
|
|
maxConcurrent: 10
|
|
}, common)); // @ts-ignore
|
|
|
|
groups.search = new Bottleneck.Group(_objectSpread2({
|
|
id: "octokit-search",
|
|
maxConcurrent: 1,
|
|
minTime: 2000
|
|
}, common)); // @ts-ignore
|
|
|
|
groups.write = new Bottleneck.Group(_objectSpread2({
|
|
id: "octokit-write",
|
|
maxConcurrent: 1,
|
|
minTime: 1000
|
|
}, common)); // @ts-ignore
|
|
|
|
groups.notifications = new Bottleneck.Group(_objectSpread2({
|
|
id: "octokit-notifications",
|
|
maxConcurrent: 1,
|
|
minTime: 3000
|
|
}, common));
|
|
};
|
|
|
|
function throttling(octokit, octokitOptions = {}) {
|
|
const {
|
|
enabled = true,
|
|
Bottleneck = BottleneckLight,
|
|
id = "no-id",
|
|
timeout = 1000 * 60 * 2,
|
|
// Redis TTL: 2 minutes
|
|
connection
|
|
} = octokitOptions.throttle || {};
|
|
|
|
if (!enabled) {
|
|
return;
|
|
}
|
|
|
|
const common = {
|
|
connection,
|
|
timeout
|
|
}; // @ts-ignore
|
|
|
|
if (groups.global == null) {
|
|
createGroups(Bottleneck, common);
|
|
}
|
|
|
|
const state = Object.assign(_objectSpread2({
|
|
clustering: connection != null,
|
|
triggersNotification,
|
|
minimumAbuseRetryAfter: 5,
|
|
retryAfterBaseValue: 1000,
|
|
retryLimiter: new Bottleneck(),
|
|
id
|
|
}, groups), // @ts-ignore
|
|
octokitOptions.throttle);
|
|
|
|
if (typeof state.onAbuseLimit !== "function" || typeof state.onRateLimit !== "function") {
|
|
throw new Error(`octokit/plugin-throttling error:
|
|
You must pass the onAbuseLimit and onRateLimit error handlers.
|
|
See https://github.com/octokit/rest.js#throttling
|
|
|
|
const octokit = new Octokit({
|
|
throttle: {
|
|
onAbuseLimit: (retryAfter, options) => {/* ... */},
|
|
onRateLimit: (retryAfter, options) => {/* ... */}
|
|
}
|
|
})
|
|
`);
|
|
}
|
|
|
|
const events = {};
|
|
const emitter = new Bottleneck.Events(events); // @ts-ignore
|
|
|
|
events.on("abuse-limit", state.onAbuseLimit); // @ts-ignore
|
|
|
|
events.on("rate-limit", state.onRateLimit); // @ts-ignore
|
|
|
|
events.on("error", e => console.warn("Error in throttling-plugin limit handler", e)); // @ts-ignore
|
|
|
|
state.retryLimiter.on("failed", async function (error, info) {
|
|
const options = info.args[info.args.length - 1];
|
|
const shouldRetryGraphQL = options.url.startsWith("/graphql") && error.status !== 401;
|
|
|
|
if (!(shouldRetryGraphQL || error.status === 403)) {
|
|
return;
|
|
}
|
|
|
|
const retryCount = ~~options.request.retryCount;
|
|
options.request.retryCount = retryCount;
|
|
const {
|
|
wantRetry,
|
|
retryAfter
|
|
} = await async function () {
|
|
if (/\babuse\b/i.test(error.message)) {
|
|
// The user has hit the abuse rate limit. (REST and GraphQL)
|
|
// https://docs.github.com/en/rest/overview/resources-in-the-rest-api#abuse-rate-limits
|
|
// The Retry-After header can sometimes be blank when hitting an abuse limit,
|
|
// but is always present after 2-3s, so make sure to set `retryAfter` to at least 5s by default.
|
|
const retryAfter = Math.max(~~error.headers["retry-after"], state.minimumAbuseRetryAfter);
|
|
const wantRetry = await emitter.trigger("abuse-limit", retryAfter, options, octokit);
|
|
return {
|
|
wantRetry,
|
|
retryAfter
|
|
};
|
|
}
|
|
|
|
if (error.headers != null && error.headers["x-ratelimit-remaining"] === "0") {
|
|
// The user has used all their allowed calls for the current time period (REST and GraphQL)
|
|
// https://docs.github.com/en/rest/reference/rate-limit (REST)
|
|
// https://docs.github.com/en/graphql/overview/resource-limitations#rate-limit (GraphQL)
|
|
const rateLimitReset = new Date(~~error.headers["x-ratelimit-reset"] * 1000).getTime();
|
|
const retryAfter = Math.max(Math.ceil((rateLimitReset - Date.now()) / 1000), 0);
|
|
const wantRetry = await emitter.trigger("rate-limit", retryAfter, options, octokit);
|
|
return {
|
|
wantRetry,
|
|
retryAfter
|
|
};
|
|
}
|
|
|
|
return {};
|
|
}();
|
|
|
|
if (wantRetry) {
|
|
options.request.retryCount++; // @ts-ignore
|
|
|
|
return retryAfter * state.retryAfterBaseValue;
|
|
}
|
|
});
|
|
octokit.hook.wrap("request", wrapRequest.bind(null, state));
|
|
}
|
|
throttling.VERSION = VERSION;
|
|
throttling.triggersNotification = triggersNotification;
|
|
|
|
exports.throttling = throttling;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 537:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
|
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
|
|
|
var deprecation = __nccwpck_require__(8932);
|
|
var once = _interopDefault(__nccwpck_require__(1223));
|
|
|
|
const logOnce = once(deprecation => console.warn(deprecation));
|
|
/**
|
|
* Error with extra properties to help with debugging
|
|
*/
|
|
|
|
class RequestError extends Error {
|
|
constructor(message, statusCode, options) {
|
|
super(message); // Maintains proper stack trace (only available on V8)
|
|
|
|
/* istanbul ignore next */
|
|
|
|
if (Error.captureStackTrace) {
|
|
Error.captureStackTrace(this, this.constructor);
|
|
}
|
|
|
|
this.name = "HttpError";
|
|
this.status = statusCode;
|
|
Object.defineProperty(this, "code", {
|
|
get() {
|
|
logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
|
|
return statusCode;
|
|
}
|
|
|
|
});
|
|
this.headers = options.headers || {}; // redact request credentials without mutating original request options
|
|
|
|
const requestCopy = Object.assign({}, options.request);
|
|
|
|
if (options.request.headers.authorization) {
|
|
requestCopy.headers = Object.assign({}, options.request.headers, {
|
|
authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
|
|
});
|
|
}
|
|
|
|
requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
|
|
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
|
|
.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
|
|
// see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
|
|
.replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
|
|
this.request = requestCopy;
|
|
}
|
|
|
|
}
|
|
|
|
exports.RequestError = RequestError;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6234:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
|
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
|
|
|
var endpoint = __nccwpck_require__(9440);
|
|
var universalUserAgent = __nccwpck_require__(5030);
|
|
var isPlainObject = __nccwpck_require__(3287);
|
|
var nodeFetch = _interopDefault(__nccwpck_require__(467));
|
|
var requestError = __nccwpck_require__(537);
|
|
|
|
const VERSION = "5.4.15";
|
|
|
|
function getBufferResponse(response) {
|
|
return response.arrayBuffer();
|
|
}
|
|
|
|
function fetchWrapper(requestOptions) {
|
|
if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
|
|
requestOptions.body = JSON.stringify(requestOptions.body);
|
|
}
|
|
|
|
let headers = {};
|
|
let status;
|
|
let url;
|
|
const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
|
|
return fetch(requestOptions.url, Object.assign({
|
|
method: requestOptions.method,
|
|
body: requestOptions.body,
|
|
headers: requestOptions.headers,
|
|
redirect: requestOptions.redirect
|
|
}, // `requestOptions.request.agent` type is incompatible
|
|
// see https://github.com/octokit/types.ts/pull/264
|
|
requestOptions.request)).then(response => {
|
|
url = response.url;
|
|
status = response.status;
|
|
|
|
for (const keyAndValue of response.headers) {
|
|
headers[keyAndValue[0]] = keyAndValue[1];
|
|
}
|
|
|
|
if (status === 204 || status === 205) {
|
|
return;
|
|
} // GitHub API returns 200 for HEAD requests
|
|
|
|
|
|
if (requestOptions.method === "HEAD") {
|
|
if (status < 400) {
|
|
return;
|
|
}
|
|
|
|
throw new requestError.RequestError(response.statusText, status, {
|
|
headers,
|
|
request: requestOptions
|
|
});
|
|
}
|
|
|
|
if (status === 304) {
|
|
throw new requestError.RequestError("Not modified", status, {
|
|
headers,
|
|
request: requestOptions
|
|
});
|
|
}
|
|
|
|
if (status >= 400) {
|
|
return response.text().then(message => {
|
|
const error = new requestError.RequestError(message, status, {
|
|
headers,
|
|
request: requestOptions
|
|
});
|
|
|
|
try {
|
|
let responseBody = JSON.parse(error.message);
|
|
Object.assign(error, responseBody);
|
|
let errors = responseBody.errors; // Assumption `errors` would always be in Array format
|
|
|
|
error.message = error.message + ": " + errors.map(JSON.stringify).join(", ");
|
|
} catch (e) {// ignore, see octokit/rest.js#684
|
|
}
|
|
|
|
throw error;
|
|
});
|
|
}
|
|
|
|
const contentType = response.headers.get("content-type");
|
|
|
|
if (/application\/json/.test(contentType)) {
|
|
return response.json();
|
|
}
|
|
|
|
if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
|
|
return response.text();
|
|
}
|
|
|
|
return getBufferResponse(response);
|
|
}).then(data => {
|
|
return {
|
|
status,
|
|
url,
|
|
headers,
|
|
data
|
|
};
|
|
}).catch(error => {
|
|
if (error instanceof requestError.RequestError) {
|
|
throw error;
|
|
}
|
|
|
|
throw new requestError.RequestError(error.message, 500, {
|
|
headers,
|
|
request: requestOptions
|
|
});
|
|
});
|
|
}
|
|
|
|
function withDefaults(oldEndpoint, newDefaults) {
|
|
const endpoint = oldEndpoint.defaults(newDefaults);
|
|
|
|
const newApi = function (route, parameters) {
|
|
const endpointOptions = endpoint.merge(route, parameters);
|
|
|
|
if (!endpointOptions.request || !endpointOptions.request.hook) {
|
|
return fetchWrapper(endpoint.parse(endpointOptions));
|
|
}
|
|
|
|
const request = (route, parameters) => {
|
|
return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
|
|
};
|
|
|
|
Object.assign(request, {
|
|
endpoint,
|
|
defaults: withDefaults.bind(null, endpoint)
|
|
});
|
|
return endpointOptions.request.hook(request, endpointOptions);
|
|
};
|
|
|
|
return Object.assign(newApi, {
|
|
endpoint,
|
|
defaults: withDefaults.bind(null, endpoint)
|
|
});
|
|
}
|
|
|
|
const request = withDefaults(endpoint.endpoint, {
|
|
headers: {
|
|
"user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
|
|
}
|
|
});
|
|
|
|
exports.request = request;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6761:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
const Utils = __nccwpck_require__(5182);
|
|
const pth = __nccwpck_require__(5622);
|
|
const ZipEntry = __nccwpck_require__(4057);
|
|
const ZipFile = __nccwpck_require__(7744);
|
|
|
|
const fs = Utils.FileSystem.require();
|
|
fs.existsSync = fs.existsSync || pth.existsSync;
|
|
|
|
const defaultOptions = {
|
|
// read entries during load (initial loading may be slower)
|
|
readEntries: false,
|
|
// default method is none
|
|
method: Utils.Constants.NONE
|
|
}
|
|
|
|
function canonical(p) {
|
|
// trick normalize think path is absolute
|
|
var safeSuffix = pth.posix.normalize("/" + p.split("\\").join("/"));
|
|
return pth.join(".", safeSuffix);
|
|
}
|
|
|
|
module.exports = function (/**String*/input, /** object */options) {
|
|
let inBuffer = null;
|
|
|
|
// create object based default options, allowing them to be overwritten
|
|
const opts = Object.assign(Object.create( null ), defaultOptions);
|
|
|
|
// test input variable
|
|
if (input && "object" === typeof input){
|
|
// if value is not buffer we accept it to be object with options
|
|
if (!(input instanceof Uint8Array)){
|
|
Object.assign(opts, input);
|
|
input = opts.input ? opts.input : undefined;
|
|
if (opts.input) delete opts.input;
|
|
}
|
|
|
|
// if input is buffer
|
|
if (input instanceof Uint8Array){
|
|
inBuffer = input;
|
|
opts.method = Utils.Constants.BUFFER;
|
|
input = undefined;
|
|
}
|
|
}
|
|
|
|
// assign options
|
|
Object.assign(opts, options);
|
|
|
|
// if input is file name we retrieve its content
|
|
if (input && "string" === typeof input) {
|
|
// load zip file
|
|
if (fs.existsSync(input)) {
|
|
opts.method = Utils.Constants.FILE;
|
|
opts.filename = input;
|
|
inBuffer = fs.readFileSync(input);
|
|
} else {
|
|
throw new Error(Utils.Errors.INVALID_FILENAME);
|
|
}
|
|
}
|
|
|
|
// create variable
|
|
const _zip = new ZipFile(inBuffer, opts);
|
|
|
|
function sanitize(prefix, name) {
|
|
prefix = pth.resolve(pth.normalize(prefix));
|
|
var parts = name.split('/');
|
|
for (var i = 0, l = parts.length; i < l; i++) {
|
|
var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep)));
|
|
if (path.indexOf(prefix) === 0) {
|
|
return path;
|
|
}
|
|
}
|
|
return pth.normalize(pth.join(prefix, pth.basename(name)));
|
|
}
|
|
|
|
function getEntry(/**Object*/entry) {
|
|
if (entry && _zip) {
|
|
var item;
|
|
// If entry was given as a file name
|
|
if (typeof entry === "string")
|
|
item = _zip.getEntry(entry);
|
|
// if entry was given as a ZipEntry object
|
|
if (typeof entry === "object" && typeof entry.entryName !== "undefined" && typeof entry.header !== "undefined")
|
|
item = _zip.getEntry(entry.entryName);
|
|
|
|
if (item) {
|
|
return item;
|
|
}
|
|
}
|
|
return null;
|
|
}
|
|
|
|
function fixPath(zipPath){
|
|
const { join, normalize, sep } = pth.posix;
|
|
// convert windows file separators and normalize
|
|
return join(".", normalize(sep + zipPath.split("\\").join(sep) + sep));
|
|
}
|
|
|
|
return {
|
|
/**
|
|
* Extracts the given entry from the archive and returns the content as a Buffer object
|
|
* @param entry ZipEntry object or String with the full path of the entry
|
|
*
|
|
* @return Buffer or Null in case of error
|
|
*/
|
|
readFile: function (/**Object*/entry, /*String, Buffer*/pass) {
|
|
var item = getEntry(entry);
|
|
return item && item.getData(pass) || null;
|
|
},
|
|
|
|
/**
|
|
* Asynchronous readFile
|
|
* @param entry ZipEntry object or String with the full path of the entry
|
|
* @param callback
|
|
*
|
|
* @return Buffer or Null in case of error
|
|
*/
|
|
readFileAsync: function (/**Object*/entry, /**Function*/callback) {
|
|
var item = getEntry(entry);
|
|
if (item) {
|
|
item.getDataAsync(callback);
|
|
} else {
|
|
callback(null, "getEntry failed for:" + entry)
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Extracts the given entry from the archive and returns the content as plain text in the given encoding
|
|
* @param entry ZipEntry object or String with the full path of the entry
|
|
* @param encoding Optional. If no encoding is specified utf8 is used
|
|
*
|
|
* @return String
|
|
*/
|
|
readAsText: function (/**Object*/entry, /**String=*/encoding) {
|
|
var item = getEntry(entry);
|
|
if (item) {
|
|
var data = item.getData();
|
|
if (data && data.length) {
|
|
return data.toString(encoding || "utf8");
|
|
}
|
|
}
|
|
return "";
|
|
},
|
|
|
|
/**
|
|
* Asynchronous readAsText
|
|
* @param entry ZipEntry object or String with the full path of the entry
|
|
* @param callback
|
|
* @param encoding Optional. If no encoding is specified utf8 is used
|
|
*
|
|
* @return String
|
|
*/
|
|
readAsTextAsync: function (/**Object*/entry, /**Function*/callback, /**String=*/encoding) {
|
|
var item = getEntry(entry);
|
|
if (item) {
|
|
item.getDataAsync(function (data, err) {
|
|
if (err) {
|
|
callback(data, err);
|
|
return;
|
|
}
|
|
|
|
if (data && data.length) {
|
|
callback(data.toString(encoding || "utf8"));
|
|
} else {
|
|
callback("");
|
|
}
|
|
})
|
|
} else {
|
|
callback("");
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory
|
|
*
|
|
* @param entry
|
|
*/
|
|
deleteFile: function (/**Object*/entry) { // @TODO: test deleteFile
|
|
var item = getEntry(entry);
|
|
if (item) {
|
|
_zip.deleteEntry(item.entryName);
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Adds a comment to the zip. The zip must be rewritten after adding the comment.
|
|
*
|
|
* @param comment
|
|
*/
|
|
addZipComment: function (/**String*/comment) { // @TODO: test addZipComment
|
|
_zip.comment = comment;
|
|
},
|
|
|
|
/**
|
|
* Returns the zip comment
|
|
*
|
|
* @return String
|
|
*/
|
|
getZipComment: function () {
|
|
return _zip.comment || '';
|
|
},
|
|
|
|
/**
|
|
* Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment
|
|
* The comment cannot exceed 65535 characters in length
|
|
*
|
|
* @param entry
|
|
* @param comment
|
|
*/
|
|
addZipEntryComment: function (/**Object*/entry, /**String*/comment) {
|
|
var item = getEntry(entry);
|
|
if (item) {
|
|
item.comment = comment;
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Returns the comment of the specified entry
|
|
*
|
|
* @param entry
|
|
* @return String
|
|
*/
|
|
getZipEntryComment: function (/**Object*/entry) {
|
|
var item = getEntry(entry);
|
|
if (item) {
|
|
return item.comment || '';
|
|
}
|
|
return ''
|
|
},
|
|
|
|
/**
|
|
* Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content
|
|
*
|
|
* @param entry
|
|
* @param content
|
|
*/
|
|
updateFile: function (/**Object*/entry, /**Buffer*/content) {
|
|
var item = getEntry(entry);
|
|
if (item) {
|
|
item.setData(content);
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Adds a file from the disk to the archive
|
|
*
|
|
* @param localPath File to add to zip
|
|
* @param zipPath Optional path inside the zip
|
|
* @param zipName Optional name for the file
|
|
*/
|
|
addLocalFile: function (/**String*/localPath, /**String=*/zipPath, /**String=*/zipName, /**String*/comment) {
|
|
if (fs.existsSync(localPath)) {
|
|
// fix ZipPath
|
|
zipPath = (zipPath) ? fixPath(zipPath) : "";
|
|
|
|
// p - local file name
|
|
var p = localPath.split("\\").join("/").split("/").pop();
|
|
|
|
// add file name into zippath
|
|
zipPath += (zipName) ? zipName : p;
|
|
|
|
// read file attributes
|
|
const _attr = fs.statSync(localPath);
|
|
|
|
// add file into zip file
|
|
this.addFile(zipPath, fs.readFileSync(localPath), comment, _attr)
|
|
} else {
|
|
throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath));
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Adds a local directory and all its nested files and directories to the archive
|
|
*
|
|
* @param localPath
|
|
* @param zipPath optional path inside zip
|
|
* @param filter optional RegExp or Function if files match will
|
|
* be included.
|
|
*/
|
|
addLocalFolder: function (/**String*/localPath, /**String=*/zipPath, /**=RegExp|Function*/filter) {
|
|
// Prepare filter
|
|
if (filter instanceof RegExp) { // if filter is RegExp wrap it
|
|
filter = (function (rx){
|
|
return function (filename) {
|
|
return rx.test(filename);
|
|
}
|
|
})(filter);
|
|
} else if ('function' !== typeof filter) { // if filter is not function we will replace it
|
|
filter = function () {
|
|
return true;
|
|
};
|
|
}
|
|
|
|
// fix ZipPath
|
|
zipPath = (zipPath) ? fixPath(zipPath) : "";
|
|
|
|
// normalize the path first
|
|
localPath = pth.normalize(localPath);
|
|
|
|
if (fs.existsSync(localPath)) {
|
|
|
|
var items = Utils.findFiles(localPath),
|
|
self = this;
|
|
|
|
if (items.length) {
|
|
items.forEach(function (filepath) {
|
|
var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix
|
|
if (filter(p)) {
|
|
var stats = fs.statSync(filepath);
|
|
if (stats.isFile()) {
|
|
self.addFile(zipPath + p, fs.readFileSync(filepath), "", stats);
|
|
} else {
|
|
self.addFile(zipPath + p + '/', Buffer.alloc(0), "", stats);
|
|
}
|
|
}
|
|
});
|
|
}
|
|
} else {
|
|
throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath));
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Asynchronous addLocalFile
|
|
* @param localPath
|
|
* @param callback
|
|
* @param zipPath optional path inside zip
|
|
* @param filter optional RegExp or Function if files match will
|
|
* be included.
|
|
*/
|
|
addLocalFolderAsync: function (/*String*/localPath, /*Function*/callback, /*String*/zipPath, /*RegExp|Function*/filter) {
|
|
if (filter instanceof RegExp) {
|
|
filter = (function (rx) {
|
|
return function (filename) {
|
|
return rx.test(filename);
|
|
};
|
|
})(filter);
|
|
} else if ("function" !== typeof filter) {
|
|
filter = function () {
|
|
return true;
|
|
};
|
|
}
|
|
|
|
// fix ZipPath
|
|
zipPath = zipPath ? fixPath(zipPath) : "";
|
|
|
|
// normalize the path first
|
|
localPath = pth.normalize(localPath);
|
|
|
|
var self = this;
|
|
fs.open(localPath, 'r', function (err) {
|
|
if (err && err.code === 'ENOENT') {
|
|
callback(undefined, Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath));
|
|
} else if (err) {
|
|
callback(undefined, err);
|
|
} else {
|
|
var items = Utils.findFiles(localPath);
|
|
var i = -1;
|
|
|
|
var next = function () {
|
|
i += 1;
|
|
if (i < items.length) {
|
|
var filepath = items[i];
|
|
var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix
|
|
p = p.normalize('NFD').replace(/[\u0300-\u036f]/g, '').replace(/[^\x20-\x7E]/g, '') // accent fix
|
|
if (filter(p)) {
|
|
fs.stat(filepath, function (er0, stats) {
|
|
if (er0) callback(undefined, er0);
|
|
if (stats.isFile()) {
|
|
fs.readFile(filepath, function (er1, data) {
|
|
if (er1) {
|
|
callback(undefined, er1);
|
|
} else {
|
|
self.addFile(zipPath + p, data, "", stats);
|
|
next();
|
|
}
|
|
});
|
|
} else {
|
|
self.addFile(zipPath + p + "/", Buffer.alloc(0), "", stats);
|
|
next();
|
|
}
|
|
});
|
|
} else {
|
|
next();
|
|
}
|
|
|
|
} else {
|
|
callback(true, undefined);
|
|
}
|
|
}
|
|
|
|
next();
|
|
}
|
|
});
|
|
},
|
|
|
|
addLocalFolderPromise: function (/*String*/ localPath, /* object */ options) {
|
|
return new Promise((resolve, reject) => {
|
|
const { filter, zipPath } = Object.assign({}, options);
|
|
this.addLocalFolderAsync(localPath,
|
|
(done, err) => {
|
|
if (err) reject(err);
|
|
if (done) resolve(this);
|
|
}, zipPath, filter
|
|
);
|
|
});
|
|
},
|
|
|
|
/**
|
|
* Allows you to create a entry (file or directory) in the zip file.
|
|
* If you want to create a directory the entryName must end in / and a null buffer should be provided.
|
|
* Comment and attributes are optional
|
|
*
|
|
* @param {string} entryName
|
|
* @param {Buffer | string} content - file content as buffer or utf8 coded string
|
|
* @param {string} comment - file comment
|
|
* @param {number | object} attr - number as unix file permissions, object as filesystem Stats object
|
|
*/
|
|
addFile: function (/**String*/ entryName, /**Buffer*/ content, /**String*/ comment, /**Number*/ attr) {
|
|
let entry = getEntry(entryName);
|
|
const update = entry != null;
|
|
|
|
// prepare new entry
|
|
if (!update){
|
|
entry = new ZipEntry();
|
|
entry.entryName = entryName;
|
|
}
|
|
entry.comment = comment || "";
|
|
|
|
const isStat = ('object' === typeof attr) && (attr instanceof fs.Stats);
|
|
|
|
// last modification time from file stats
|
|
if (isStat){
|
|
entry.header.time = attr.mtime;
|
|
}
|
|
|
|
// Set file attribute
|
|
var fileattr = (entry.isDirectory) ? 0x10 : 0; // (MS-DOS directory flag)
|
|
|
|
// extended attributes field for Unix
|
|
if('win32' !== process.platform){
|
|
// set file type either S_IFDIR / S_IFREG
|
|
let unix = (entry.isDirectory) ? 0x4000 : 0x8000;
|
|
|
|
if (isStat) { // File attributes from file stats
|
|
unix |= (0xfff & attr.mode);
|
|
}else if ('number' === typeof attr){ // attr from given attr values
|
|
unix |= (0xfff & attr);
|
|
}else{ // Default values:
|
|
unix |= (entry.isDirectory) ? 0o755 : 0o644; // permissions (drwxr-xr-x) or (-r-wr--r--)
|
|
}
|
|
|
|
fileattr = (fileattr | (unix << 16)) >>> 0; // add attributes
|
|
}
|
|
|
|
entry.attr = fileattr;
|
|
|
|
entry.setData(content);
|
|
if (!update) _zip.setEntry(entry);
|
|
},
|
|
|
|
/**
|
|
* Returns an array of ZipEntry objects representing the files and folders inside the archive
|
|
*
|
|
* @return Array
|
|
*/
|
|
getEntries: function () {
|
|
if (_zip) {
|
|
return _zip.entries;
|
|
} else {
|
|
return [];
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Returns a ZipEntry object representing the file or folder specified by ``name``.
|
|
*
|
|
* @param name
|
|
* @return ZipEntry
|
|
*/
|
|
getEntry: function (/**String*/name) {
|
|
return getEntry(name);
|
|
},
|
|
|
|
getEntryCount: function() {
|
|
return _zip.getEntryCount();
|
|
},
|
|
|
|
forEach: function(callback) {
|
|
return _zip.forEach(callback);
|
|
},
|
|
|
|
/**
|
|
* Extracts the given entry to the given targetPath
|
|
* If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted
|
|
*
|
|
* @param entry ZipEntry object or String with the full path of the entry
|
|
* @param targetPath Target folder where to write the file
|
|
* @param maintainEntryPath If maintainEntryPath is true and the entry is inside a folder, the entry folder
|
|
* will be created in targetPath as well. Default is TRUE
|
|
* @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.
|
|
* Default is FALSE
|
|
* @param outFileName String If set will override the filename of the extracted file (Only works if the entry is a file)
|
|
*
|
|
* @return Boolean
|
|
*/
|
|
extractEntryTo: function (/**Object*/entry, /**String*/targetPath, /**Boolean*/maintainEntryPath, /**Boolean*/overwrite, /**String**/outFileName) {
|
|
overwrite = overwrite || false;
|
|
maintainEntryPath = typeof maintainEntryPath === "undefined" ? true : maintainEntryPath;
|
|
|
|
var item = getEntry(entry);
|
|
if (!item) {
|
|
throw new Error(Utils.Errors.NO_ENTRY);
|
|
}
|
|
|
|
var entryName = canonical(item.entryName);
|
|
|
|
var target = sanitize(targetPath,outFileName && !item.isDirectory ? outFileName : (maintainEntryPath ? entryName : pth.basename(entryName)));
|
|
|
|
if (item.isDirectory) {
|
|
target = pth.resolve(target, "..");
|
|
var children = _zip.getEntryChildren(item);
|
|
children.forEach(function (child) {
|
|
if (child.isDirectory) return;
|
|
var content = child.getData();
|
|
if (!content) {
|
|
throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
|
|
}
|
|
var name = canonical(child.entryName)
|
|
var childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name));
|
|
// The reverse operation for attr depend on method addFile()
|
|
var fileAttr = child.attr ? (((child.attr >>> 0) | 0) >> 16) & 0xfff : 0;
|
|
Utils.writeFileTo(childName, content, overwrite, fileAttr);
|
|
});
|
|
return true;
|
|
}
|
|
|
|
var content = item.getData();
|
|
if (!content) throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
|
|
|
|
if (fs.existsSync(target) && !overwrite) {
|
|
throw new Error(Utils.Errors.CANT_OVERRIDE);
|
|
}
|
|
// The reverse operation for attr depend on method addFile()
|
|
var fileAttr = item.attr ? (((item.attr >>> 0) | 0) >> 16) & 0xfff : 0;
|
|
Utils.writeFileTo(target, content, overwrite, fileAttr);
|
|
|
|
return true;
|
|
},
|
|
|
|
/**
|
|
* Test the archive
|
|
*
|
|
*/
|
|
test: function (pass) {
|
|
if (!_zip) {
|
|
return false;
|
|
}
|
|
|
|
for (var entry in _zip.entries) {
|
|
try {
|
|
if (entry.isDirectory) {
|
|
continue;
|
|
}
|
|
var content = _zip.entries[entry].getData(pass);
|
|
if (!content) {
|
|
return false;
|
|
}
|
|
} catch (err) {
|
|
return false;
|
|
}
|
|
}
|
|
return true;
|
|
},
|
|
|
|
/**
|
|
* Extracts the entire archive to the given location
|
|
*
|
|
* @param targetPath Target location
|
|
* @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.
|
|
* Default is FALSE
|
|
*/
|
|
extractAllTo: function (/**String*/targetPath, /**Boolean*/overwrite, /*String, Buffer*/pass) {
|
|
overwrite = overwrite || false;
|
|
if (!_zip) {
|
|
throw new Error(Utils.Errors.NO_ZIP);
|
|
}
|
|
_zip.entries.forEach(function (entry) {
|
|
var entryName = sanitize(targetPath, canonical(entry.entryName.toString()));
|
|
if (entry.isDirectory) {
|
|
Utils.makeDir(entryName);
|
|
return;
|
|
}
|
|
var content = entry.getData(pass);
|
|
if (!content) {
|
|
throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
|
|
}
|
|
// The reverse operation for attr depend on method addFile()
|
|
var fileAttr = entry.attr ? (((entry.attr >>> 0) | 0) >> 16) & 0xfff : 0;
|
|
Utils.writeFileTo(entryName, content, overwrite, fileAttr);
|
|
try {
|
|
fs.utimesSync(entryName, entry.header.time, entry.header.time)
|
|
} catch (err) {
|
|
throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
|
|
}
|
|
})
|
|
},
|
|
|
|
/**
|
|
* Asynchronous extractAllTo
|
|
*
|
|
* @param targetPath Target location
|
|
* @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.
|
|
* Default is FALSE
|
|
* @param callback
|
|
*/
|
|
extractAllToAsync: function (/**String*/targetPath, /**Boolean*/overwrite, /**Function*/callback) {
|
|
if (!callback) {
|
|
callback = function() {}
|
|
}
|
|
overwrite = overwrite || false;
|
|
if (!_zip) {
|
|
callback(new Error(Utils.Errors.NO_ZIP));
|
|
return;
|
|
}
|
|
|
|
var entries = _zip.entries;
|
|
var i = entries.length;
|
|
entries.forEach(function (entry) {
|
|
if (i <= 0) return; // Had an error already
|
|
|
|
var entryName = pth.normalize(canonical(entry.entryName.toString()));
|
|
|
|
if (entry.isDirectory) {
|
|
Utils.makeDir(sanitize(targetPath, entryName));
|
|
if (--i === 0)
|
|
callback(undefined);
|
|
return;
|
|
}
|
|
entry.getDataAsync(function (content, err) {
|
|
if (i <= 0) return;
|
|
if (err) {
|
|
callback(new Error(err));
|
|
return;
|
|
}
|
|
if (!content) {
|
|
i = 0;
|
|
callback(new Error(Utils.Errors.CANT_EXTRACT_FILE));
|
|
return;
|
|
}
|
|
|
|
// The reverse operation for attr depend on method addFile()
|
|
var fileAttr = entry.attr ? (((entry.attr >>> 0) | 0) >> 16) & 0xfff : 0;
|
|
Utils.writeFileToAsync(sanitize(targetPath, entryName), content, overwrite, fileAttr, function (succ) {
|
|
try {
|
|
fs.utimesSync(pth.resolve(targetPath, entryName), entry.header.time, entry.header.time);
|
|
} catch (err) {
|
|
callback(new Error('Unable to set utimes'));
|
|
}
|
|
if (i <= 0) return;
|
|
if (!succ) {
|
|
i = 0;
|
|
callback(new Error('Unable to write'));
|
|
return;
|
|
}
|
|
if (--i === 0)
|
|
callback(undefined);
|
|
});
|
|
});
|
|
})
|
|
},
|
|
|
|
/**
|
|
* Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip
|
|
*
|
|
* @param targetFileName
|
|
* @param callback
|
|
*/
|
|
writeZip: function (/**String*/targetFileName, /**Function*/callback) {
|
|
if (arguments.length === 1) {
|
|
if (typeof targetFileName === "function") {
|
|
callback = targetFileName;
|
|
targetFileName = "";
|
|
}
|
|
}
|
|
|
|
if (!targetFileName && opts.filename) {
|
|
targetFileName = opts.filename;
|
|
}
|
|
if (!targetFileName) return;
|
|
|
|
var zipData = _zip.compressToBuffer();
|
|
if (zipData) {
|
|
var ok = Utils.writeFileTo(targetFileName, zipData, true);
|
|
if (typeof callback === 'function') callback(!ok ? new Error("failed") : null, "");
|
|
}
|
|
},
|
|
|
|
writeZipPromise: function (/**String*/ targetFileName, /* object */ options) {
|
|
const { overwrite, perm } = Object.assign({ overwrite: true }, options);
|
|
|
|
return new Promise((resolve, reject) => {
|
|
// find file name
|
|
if (!targetFileName && opts.filename) targetFileName = opts.filename;
|
|
if (!targetFileName) reject("ADM-ZIP: ZIP File Name Missing");
|
|
|
|
this.toBufferPromise().then((zipData) => {
|
|
const ret = (done) => (done ? resolve(done) : reject("ADM-ZIP: Wasn't able to write zip file"));
|
|
Utils.writeFileToAsync(targetFileName, zipData, overwrite, perm, ret);
|
|
}, reject);
|
|
});
|
|
},
|
|
|
|
toBufferPromise: function () {
|
|
return new Promise((resolve, reject) => {
|
|
_zip.toAsyncBuffer(resolve, reject);
|
|
});
|
|
},
|
|
|
|
/**
|
|
* Returns the content of the entire zip file as a Buffer object
|
|
*
|
|
* @return Buffer
|
|
*/
|
|
toBuffer: function (/**Function=*/onSuccess, /**Function=*/onFail, /**Function=*/onItemStart, /**Function=*/onItemEnd) {
|
|
this.valueOf = 2;
|
|
if (typeof onSuccess === "function") {
|
|
_zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd);
|
|
return null;
|
|
}
|
|
return _zip.compressToBuffer()
|
|
}
|
|
}
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 9032:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
var Utils = __nccwpck_require__(5182),
|
|
Constants = Utils.Constants;
|
|
|
|
/* The central directory file header */
|
|
module.exports = function () {
|
|
var _verMade = 0x14,
|
|
_version = 0x0A,
|
|
_flags = 0,
|
|
_method = 0,
|
|
_time = 0,
|
|
_crc = 0,
|
|
_compressedSize = 0,
|
|
_size = 0,
|
|
_fnameLen = 0,
|
|
_extraLen = 0,
|
|
|
|
_comLen = 0,
|
|
_diskStart = 0,
|
|
_inattr = 0,
|
|
_attr = 0,
|
|
_offset = 0;
|
|
|
|
switch(process.platform){
|
|
case 'win32':
|
|
_verMade |= 0x0A00;
|
|
default:
|
|
_verMade |= 0x0300;
|
|
}
|
|
|
|
var _dataHeader = {};
|
|
|
|
function setTime(val) {
|
|
val = new Date(val);
|
|
_time = (val.getFullYear() - 1980 & 0x7f) << 25 // b09-16 years from 1980
|
|
| (val.getMonth() + 1) << 21 // b05-08 month
|
|
| val.getDate() << 16 // b00-04 hour
|
|
|
|
// 2 bytes time
|
|
| val.getHours() << 11 // b11-15 hour
|
|
| val.getMinutes() << 5 // b05-10 minute
|
|
| val.getSeconds() >> 1; // b00-04 seconds divided by 2
|
|
}
|
|
|
|
setTime(+new Date());
|
|
|
|
return {
|
|
get made () { return _verMade; },
|
|
set made (val) { _verMade = val; },
|
|
|
|
get version () { return _version; },
|
|
set version (val) { _version = val },
|
|
|
|
get flags () { return _flags },
|
|
set flags (val) { _flags = val; },
|
|
|
|
get method () { return _method; },
|
|
set method (val) {
|
|
switch (val){
|
|
case Constants.STORED:
|
|
this.version = 10;
|
|
case Constants.DEFLATED:
|
|
default:
|
|
this.version = 20;
|
|
}
|
|
_method = val;
|
|
},
|
|
|
|
get time () { return new Date(
|
|
((_time >> 25) & 0x7f) + 1980,
|
|
((_time >> 21) & 0x0f) - 1,
|
|
(_time >> 16) & 0x1f,
|
|
(_time >> 11) & 0x1f,
|
|
(_time >> 5) & 0x3f,
|
|
(_time & 0x1f) << 1
|
|
);
|
|
},
|
|
set time (val) {
|
|
setTime(val);
|
|
},
|
|
|
|
get crc () { return _crc; },
|
|
set crc (val) { _crc = val; },
|
|
|
|
get compressedSize () { return _compressedSize; },
|
|
set compressedSize (val) { _compressedSize = val; },
|
|
|
|
get size () { return _size; },
|
|
set size (val) { _size = val; },
|
|
|
|
get fileNameLength () { return _fnameLen; },
|
|
set fileNameLength (val) { _fnameLen = val; },
|
|
|
|
get extraLength () { return _extraLen },
|
|
set extraLength (val) { _extraLen = val; },
|
|
|
|
get commentLength () { return _comLen },
|
|
set commentLength (val) { _comLen = val },
|
|
|
|
get diskNumStart () { return _diskStart },
|
|
set diskNumStart (val) { _diskStart = val },
|
|
|
|
get inAttr () { return _inattr },
|
|
set inAttr (val) { _inattr = val },
|
|
|
|
get attr () { return _attr },
|
|
set attr (val) { _attr = val },
|
|
|
|
get offset () { return _offset },
|
|
set offset (val) { _offset = val },
|
|
|
|
get encripted () { return (_flags & 1) === 1 },
|
|
|
|
get entryHeaderSize () {
|
|
return Constants.CENHDR + _fnameLen + _extraLen + _comLen;
|
|
},
|
|
|
|
get realDataOffset () {
|
|
return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen;
|
|
},
|
|
|
|
get dataHeader () {
|
|
return _dataHeader;
|
|
},
|
|
|
|
loadDataHeaderFromBinary : function(/*Buffer*/input) {
|
|
var data = input.slice(_offset, _offset + Constants.LOCHDR);
|
|
// 30 bytes and should start with "PK\003\004"
|
|
if (data.readUInt32LE(0) !== Constants.LOCSIG) {
|
|
throw new Error(Utils.Errors.INVALID_LOC);
|
|
}
|
|
_dataHeader = {
|
|
// version needed to extract
|
|
version : data.readUInt16LE(Constants.LOCVER),
|
|
// general purpose bit flag
|
|
flags : data.readUInt16LE(Constants.LOCFLG),
|
|
// compression method
|
|
method : data.readUInt16LE(Constants.LOCHOW),
|
|
// modification time (2 bytes time, 2 bytes date)
|
|
time : data.readUInt32LE(Constants.LOCTIM),
|
|
// uncompressed file crc-32 value
|
|
crc : data.readUInt32LE(Constants.LOCCRC),
|
|
// compressed size
|
|
compressedSize : data.readUInt32LE(Constants.LOCSIZ),
|
|
// uncompressed size
|
|
size : data.readUInt32LE(Constants.LOCLEN),
|
|
// filename length
|
|
fnameLen : data.readUInt16LE(Constants.LOCNAM),
|
|
// extra field length
|
|
extraLen : data.readUInt16LE(Constants.LOCEXT)
|
|
}
|
|
},
|
|
|
|
loadFromBinary : function(/*Buffer*/data) {
|
|
// data should be 46 bytes and start with "PK 01 02"
|
|
if (data.length !== Constants.CENHDR || data.readUInt32LE(0) !== Constants.CENSIG) {
|
|
throw new Error(Utils.Errors.INVALID_CEN);
|
|
}
|
|
// version made by
|
|
_verMade = data.readUInt16LE(Constants.CENVEM);
|
|
// version needed to extract
|
|
_version = data.readUInt16LE(Constants.CENVER);
|
|
// encrypt, decrypt flags
|
|
_flags = data.readUInt16LE(Constants.CENFLG);
|
|
// compression method
|
|
_method = data.readUInt16LE(Constants.CENHOW);
|
|
// modification time (2 bytes time, 2 bytes date)
|
|
_time = data.readUInt32LE(Constants.CENTIM);
|
|
// uncompressed file crc-32 value
|
|
_crc = data.readUInt32LE(Constants.CENCRC);
|
|
// compressed size
|
|
_compressedSize = data.readUInt32LE(Constants.CENSIZ);
|
|
// uncompressed size
|
|
_size = data.readUInt32LE(Constants.CENLEN);
|
|
// filename length
|
|
_fnameLen = data.readUInt16LE(Constants.CENNAM);
|
|
// extra field length
|
|
_extraLen = data.readUInt16LE(Constants.CENEXT);
|
|
// file comment length
|
|
_comLen = data.readUInt16LE(Constants.CENCOM);
|
|
// volume number start
|
|
_diskStart = data.readUInt16LE(Constants.CENDSK);
|
|
// internal file attributes
|
|
_inattr = data.readUInt16LE(Constants.CENATT);
|
|
// external file attributes
|
|
_attr = data.readUInt32LE(Constants.CENATX);
|
|
// LOC header offset
|
|
_offset = data.readUInt32LE(Constants.CENOFF);
|
|
},
|
|
|
|
dataHeaderToBinary : function() {
|
|
// LOC header size (30 bytes)
|
|
var data = Buffer.alloc(Constants.LOCHDR);
|
|
// "PK\003\004"
|
|
data.writeUInt32LE(Constants.LOCSIG, 0);
|
|
// version needed to extract
|
|
data.writeUInt16LE(_version, Constants.LOCVER);
|
|
// general purpose bit flag
|
|
data.writeUInt16LE(_flags, Constants.LOCFLG);
|
|
// compression method
|
|
data.writeUInt16LE(_method, Constants.LOCHOW);
|
|
// modification time (2 bytes time, 2 bytes date)
|
|
data.writeUInt32LE(_time, Constants.LOCTIM);
|
|
// uncompressed file crc-32 value
|
|
data.writeUInt32LE(_crc, Constants.LOCCRC);
|
|
// compressed size
|
|
data.writeUInt32LE(_compressedSize, Constants.LOCSIZ);
|
|
// uncompressed size
|
|
data.writeUInt32LE(_size, Constants.LOCLEN);
|
|
// filename length
|
|
data.writeUInt16LE(_fnameLen, Constants.LOCNAM);
|
|
// extra field length
|
|
data.writeUInt16LE(_extraLen, Constants.LOCEXT);
|
|
return data;
|
|
},
|
|
|
|
entryHeaderToBinary : function() {
|
|
// CEN header size (46 bytes)
|
|
var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen);
|
|
// "PK\001\002"
|
|
data.writeUInt32LE(Constants.CENSIG, 0);
|
|
// version made by
|
|
data.writeUInt16LE(_verMade, Constants.CENVEM);
|
|
// version needed to extract
|
|
data.writeUInt16LE(_version, Constants.CENVER);
|
|
// encrypt, decrypt flags
|
|
data.writeUInt16LE(_flags, Constants.CENFLG);
|
|
// compression method
|
|
data.writeUInt16LE(_method, Constants.CENHOW);
|
|
// modification time (2 bytes time, 2 bytes date)
|
|
data.writeUInt32LE(_time, Constants.CENTIM);
|
|
// uncompressed file crc-32 value
|
|
data.writeUInt32LE(_crc, Constants.CENCRC);
|
|
// compressed size
|
|
data.writeUInt32LE(_compressedSize, Constants.CENSIZ);
|
|
// uncompressed size
|
|
data.writeUInt32LE(_size, Constants.CENLEN);
|
|
// filename length
|
|
data.writeUInt16LE(_fnameLen, Constants.CENNAM);
|
|
// extra field length
|
|
data.writeUInt16LE(_extraLen, Constants.CENEXT);
|
|
// file comment length
|
|
data.writeUInt16LE(_comLen, Constants.CENCOM);
|
|
// volume number start
|
|
data.writeUInt16LE(_diskStart, Constants.CENDSK);
|
|
// internal file attributes
|
|
data.writeUInt16LE(_inattr, Constants.CENATT);
|
|
// external file attributes
|
|
data.writeUInt32LE(_attr, Constants.CENATX);
|
|
// LOC header offset
|
|
data.writeUInt32LE(_offset, Constants.CENOFF);
|
|
// fill all with
|
|
data.fill(0x00, Constants.CENHDR);
|
|
return data;
|
|
},
|
|
|
|
toString : function() {
|
|
return '{\n' +
|
|
'\t"made" : ' + _verMade + ",\n" +
|
|
'\t"version" : ' + _version + ",\n" +
|
|
'\t"flags" : ' + _flags + ",\n" +
|
|
'\t"method" : ' + Utils.methodToString(_method) + ",\n" +
|
|
'\t"time" : ' + this.time + ",\n" +
|
|
'\t"crc" : 0x' + _crc.toString(16).toUpperCase() + ",\n" +
|
|
'\t"compressedSize" : ' + _compressedSize + " bytes,\n" +
|
|
'\t"size" : ' + _size + " bytes,\n" +
|
|
'\t"fileNameLength" : ' + _fnameLen + ",\n" +
|
|
'\t"extraLength" : ' + _extraLen + " bytes,\n" +
|
|
'\t"commentLength" : ' + _comLen + " bytes,\n" +
|
|
'\t"diskNumStart" : ' + _diskStart + ",\n" +
|
|
'\t"inAttr" : ' + _inattr + ",\n" +
|
|
'\t"attr" : ' + _attr + ",\n" +
|
|
'\t"offset" : ' + _offset + ",\n" +
|
|
'\t"entryHeaderSize" : ' + (Constants.CENHDR + _fnameLen + _extraLen + _comLen) + " bytes\n" +
|
|
'}';
|
|
}
|
|
}
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4958:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
exports.EntryHeader = __nccwpck_require__(9032);
|
|
exports.MainHeader = __nccwpck_require__(4408);
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4408:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
var Utils = __nccwpck_require__(5182),
|
|
Constants = Utils.Constants;
|
|
|
|
/* The entries in the end of central directory */
|
|
module.exports = function () {
|
|
var _volumeEntries = 0,
|
|
_totalEntries = 0,
|
|
_size = 0,
|
|
_offset = 0,
|
|
_commentLength = 0;
|
|
|
|
return {
|
|
get diskEntries () { return _volumeEntries },
|
|
set diskEntries (/*Number*/val) { _volumeEntries = _totalEntries = val; },
|
|
|
|
get totalEntries () { return _totalEntries },
|
|
set totalEntries (/*Number*/val) { _totalEntries = _volumeEntries = val; },
|
|
|
|
get size () { return _size },
|
|
set size (/*Number*/val) { _size = val; },
|
|
|
|
get offset () { return _offset },
|
|
set offset (/*Number*/val) { _offset = val; },
|
|
|
|
get commentLength () { return _commentLength },
|
|
set commentLength (/*Number*/val) { _commentLength = val; },
|
|
|
|
get mainHeaderSize () {
|
|
return Constants.ENDHDR + _commentLength;
|
|
},
|
|
|
|
loadFromBinary : function(/*Buffer*/data) {
|
|
// data should be 22 bytes and start with "PK 05 06"
|
|
// or be 56+ bytes and start with "PK 06 06" for Zip64
|
|
if ((data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) &&
|
|
(data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG)) {
|
|
|
|
throw new Error(Utils.Errors.INVALID_END);
|
|
}
|
|
|
|
if (data.readUInt32LE(0) === Constants.ENDSIG) {
|
|
// number of entries on this volume
|
|
_volumeEntries = data.readUInt16LE(Constants.ENDSUB);
|
|
// total number of entries
|
|
_totalEntries = data.readUInt16LE(Constants.ENDTOT);
|
|
// central directory size in bytes
|
|
_size = data.readUInt32LE(Constants.ENDSIZ);
|
|
// offset of first CEN header
|
|
_offset = data.readUInt32LE(Constants.ENDOFF);
|
|
// zip file comment length
|
|
_commentLength = data.readUInt16LE(Constants.ENDCOM);
|
|
} else {
|
|
// number of entries on this volume
|
|
_volumeEntries = Utils.readBigUInt64LE(data, Constants.ZIP64SUB);
|
|
// total number of entries
|
|
_totalEntries = Utils.readBigUInt64LE(data, Constants.ZIP64TOT);
|
|
// central directory size in bytes
|
|
_size = Utils.readBigUInt64LE(data, Constants.ZIP64SIZ);
|
|
// offset of first CEN header
|
|
_offset = Utils.readBigUInt64LE(data, Constants.ZIP64OFF);
|
|
|
|
_commentLength = 0;
|
|
}
|
|
|
|
},
|
|
|
|
toBinary : function() {
|
|
var b = Buffer.alloc(Constants.ENDHDR + _commentLength);
|
|
// "PK 05 06" signature
|
|
b.writeUInt32LE(Constants.ENDSIG, 0);
|
|
b.writeUInt32LE(0, 4);
|
|
// number of entries on this volume
|
|
b.writeUInt16LE(_volumeEntries, Constants.ENDSUB);
|
|
// total number of entries
|
|
b.writeUInt16LE(_totalEntries, Constants.ENDTOT);
|
|
// central directory size in bytes
|
|
b.writeUInt32LE(_size, Constants.ENDSIZ);
|
|
// offset of first CEN header
|
|
b.writeUInt32LE(_offset, Constants.ENDOFF);
|
|
// zip file comment length
|
|
b.writeUInt16LE(_commentLength, Constants.ENDCOM);
|
|
// fill comment memory with spaces so no garbage is left there
|
|
b.fill(" ", Constants.ENDHDR);
|
|
|
|
return b;
|
|
},
|
|
|
|
toString : function() {
|
|
return '{\n' +
|
|
'\t"diskEntries" : ' + _volumeEntries + ",\n" +
|
|
'\t"totalEntries" : ' + _totalEntries + ",\n" +
|
|
'\t"size" : ' + _size + " bytes,\n" +
|
|
'\t"offset" : 0x' + _offset.toString(16).toUpperCase() + ",\n" +
|
|
'\t"commentLength" : 0x' + _commentLength + "\n" +
|
|
'}';
|
|
}
|
|
}
|
|
};
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7686:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
module.exports = function (/*Buffer*/inbuf) {
|
|
|
|
var zlib = __nccwpck_require__(8761);
|
|
|
|
var opts = {chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024};
|
|
|
|
return {
|
|
deflate: function () {
|
|
return zlib.deflateRawSync(inbuf, opts);
|
|
},
|
|
|
|
deflateAsync: function (/*Function*/callback) {
|
|
var tmp = zlib.createDeflateRaw(opts), parts = [], total = 0;
|
|
tmp.on('data', function (data) {
|
|
parts.push(data);
|
|
total += data.length;
|
|
});
|
|
tmp.on('end', function () {
|
|
var buf = Buffer.alloc(total), written = 0;
|
|
buf.fill(0);
|
|
for (var i = 0; i < parts.length; i++) {
|
|
var part = parts[i];
|
|
part.copy(buf, written);
|
|
written += part.length;
|
|
}
|
|
callback && callback(buf);
|
|
});
|
|
tmp.end(inbuf);
|
|
}
|
|
}
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3928:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
exports.Deflater = __nccwpck_require__(7686);
|
|
exports.Inflater = __nccwpck_require__(2153);
|
|
exports.ZipCrypto = __nccwpck_require__(3228);
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2153:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
module.exports = function (/*Buffer*/inbuf) {
|
|
|
|
var zlib = __nccwpck_require__(8761);
|
|
|
|
return {
|
|
inflate: function () {
|
|
return zlib.inflateRawSync(inbuf);
|
|
},
|
|
|
|
inflateAsync: function (/*Function*/callback) {
|
|
var tmp = zlib.createInflateRaw(), parts = [], total = 0;
|
|
tmp.on('data', function (data) {
|
|
parts.push(data);
|
|
total += data.length;
|
|
});
|
|
tmp.on('end', function () {
|
|
var buf = Buffer.alloc(total), written = 0;
|
|
buf.fill(0);
|
|
for (var i = 0; i < parts.length; i++) {
|
|
var part = parts[i];
|
|
part.copy(buf, written);
|
|
written += part.length;
|
|
}
|
|
callback && callback(buf);
|
|
});
|
|
tmp.end(inbuf);
|
|
}
|
|
}
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3228:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
// node crypt, we use it for generate salt
|
|
const { randomFillSync } = __nccwpck_require__(6417);
|
|
|
|
"use strict";
|
|
|
|
// generate CRC32 lookup table
|
|
const crctable = new Uint32Array(256).map((t, crc) => {
|
|
for (let j = 0; j < 8; j++) {
|
|
if (0 !== (crc & 1)) {
|
|
crc = (crc >>> 1) ^ 0xedb88320;
|
|
} else {
|
|
crc >>>= 1;
|
|
}
|
|
}
|
|
return crc >>> 0;
|
|
});
|
|
|
|
// C-style uInt32 Multiply (discards higher bits, when JS multiply discards lower bits)
|
|
const uMul = (a, b) => Math.imul(a, b) >>> 0;
|
|
|
|
// crc32 byte single update (actually same function is part of utils.crc32 function :) )
|
|
const crc32update = (pCrc32, bval) => {
|
|
return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8);
|
|
};
|
|
|
|
// function for generating salt for encrytion header
|
|
const genSalt = () => {
|
|
if ("function" === typeof randomFillSync) {
|
|
return randomFillSync(Buffer.alloc(12));
|
|
} else {
|
|
// fallback if function is not defined
|
|
return genSalt.node();
|
|
}
|
|
};
|
|
|
|
// salt generation with node random function (mainly as fallback)
|
|
genSalt.node = () => {
|
|
const salt = Buffer.alloc(12);
|
|
const len = salt.length;
|
|
for (let i = 0; i < len; i++) salt[i] = (Math.random() * 256) & 0xff;
|
|
return salt;
|
|
};
|
|
|
|
// general config
|
|
const config = {
|
|
genSalt
|
|
};
|
|
|
|
// Class Initkeys handles same basic ops with keys
|
|
function Initkeys(pw) {
|
|
const pass = Buffer.isBuffer(pw) ? pw : Buffer.from(pw);
|
|
this.keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]);
|
|
for (let i = 0; i < pass.length; i++) {
|
|
this.updateKeys(pass[i]);
|
|
}
|
|
}
|
|
|
|
Initkeys.prototype.updateKeys = function (byteValue) {
|
|
const keys = this.keys;
|
|
keys[0] = crc32update(keys[0], byteValue);
|
|
keys[1] += keys[0] & 0xff;
|
|
keys[1] = uMul(keys[1], 134775813) + 1;
|
|
keys[2] = crc32update(keys[2], keys[1] >>> 24);
|
|
return byteValue;
|
|
};
|
|
|
|
Initkeys.prototype.next = function () {
|
|
const k = (this.keys[2] | 2) >>> 0; // key
|
|
return (uMul(k, k ^ 1) >> 8) & 0xff; // decode
|
|
};
|
|
|
|
function make_decrypter(/*Buffer*/ pwd) {
|
|
// 1. Stage initialize key
|
|
const keys = new Initkeys(pwd);
|
|
|
|
// return decrypter function
|
|
return function (/*Buffer*/ data) {
|
|
// result - we create new Buffer for results
|
|
const result = Buffer.alloc(data.length);
|
|
let pos = 0;
|
|
// process input data
|
|
for (let c of data) {
|
|
//c ^= keys.next();
|
|
//result[pos++] = c; // decode & Save Value
|
|
result[pos++] = keys.updateKeys(c ^ keys.next()); // update keys with decoded byte
|
|
}
|
|
return result;
|
|
};
|
|
}
|
|
|
|
function make_encrypter(/*Buffer*/ pwd) {
|
|
// 1. Stage initialize key
|
|
const keys = new Initkeys(pwd);
|
|
|
|
// return encrypting function, result and pos is here so we dont have to merge buffers later
|
|
return function (/*Buffer*/ data, /*Buffer*/ result, /* Number */ pos = 0) {
|
|
// result - we create new Buffer for results
|
|
if (!result) result = Buffer.alloc(data.length);
|
|
// process input data
|
|
for (let c of data) {
|
|
const k = keys.next(); // save key byte
|
|
result[pos++] = c ^ k; // save val
|
|
keys.updateKeys(c); // update keys with decoded byte
|
|
}
|
|
return result;
|
|
};
|
|
}
|
|
|
|
function decrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd) {
|
|
if (!data || !Buffer.isBuffer(data) || data.length < 12) {
|
|
return Buffer.alloc(0);
|
|
}
|
|
|
|
// 1. We Initialize and generate decrypting function
|
|
const decrypter = make_decrypter(pwd);
|
|
|
|
// 2. decrypt salt what is always 12 bytes and is a part of file content
|
|
const salt = decrypter(data.slice(0, 12));
|
|
|
|
// 3. does password meet expectations
|
|
if (salt[11] !== header.crc >>> 24) {
|
|
throw "ADM-ZIP: Wrong Password";
|
|
}
|
|
|
|
// 4. decode content
|
|
return decrypter(data.slice(12));
|
|
}
|
|
|
|
// lets add way to populate salt, NOT RECOMMENDED for production but maybe useful for testing general functionality
|
|
function _salter(data) {
|
|
if (Buffer.isBuffer(data) && data.length >= 12) {
|
|
// be aware - currently salting buffer data is modified
|
|
config.genSalt = function () {
|
|
return data.slice(0, 12);
|
|
};
|
|
} else if (data === "node") {
|
|
// test salt generation with node random function
|
|
config.genSalt = genSalt.node;
|
|
} else {
|
|
// if value is not acceptable config gets reset.
|
|
config.genSalt = genSalt;
|
|
}
|
|
}
|
|
|
|
function encrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd, /*Boolean*/ oldlike = false) {
|
|
// 1. test data if data is not Buffer we make buffer from it
|
|
if (data == null) data = Buffer.alloc(0);
|
|
// if data is not buffer be make buffer from it
|
|
if (!Buffer.isBuffer(data)) data = Buffer.from(data.toString());
|
|
|
|
// 2. We Initialize and generate encrypting function
|
|
const encrypter = make_encrypter(pwd);
|
|
|
|
// 3. generate salt (12-bytes of random data)
|
|
const salt = config.genSalt();
|
|
salt[11] = (header.crc >>> 24) & 0xff;
|
|
|
|
// old implementations (before PKZip 2.04g) used two byte check
|
|
if (oldlike) salt[10] = (header.crc >>> 16) & 0xff;
|
|
|
|
// 4. create output
|
|
const result = Buffer.alloc(data.length + 12);
|
|
encrypter(salt, result);
|
|
|
|
// finally encode content
|
|
return encrypter(data, result, 12);
|
|
}
|
|
|
|
module.exports = { decrypt, encrypt, _salter };
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4522:
|
|
/***/ ((module) => {
|
|
|
|
module.exports = {
|
|
/* The local file header */
|
|
LOCHDR : 30, // LOC header size
|
|
LOCSIG : 0x04034b50, // "PK\003\004"
|
|
LOCVER : 4, // version needed to extract
|
|
LOCFLG : 6, // general purpose bit flag
|
|
LOCHOW : 8, // compression method
|
|
LOCTIM : 10, // modification time (2 bytes time, 2 bytes date)
|
|
LOCCRC : 14, // uncompressed file crc-32 value
|
|
LOCSIZ : 18, // compressed size
|
|
LOCLEN : 22, // uncompressed size
|
|
LOCNAM : 26, // filename length
|
|
LOCEXT : 28, // extra field length
|
|
|
|
/* The Data descriptor */
|
|
EXTSIG : 0x08074b50, // "PK\007\008"
|
|
EXTHDR : 16, // EXT header size
|
|
EXTCRC : 4, // uncompressed file crc-32 value
|
|
EXTSIZ : 8, // compressed size
|
|
EXTLEN : 12, // uncompressed size
|
|
|
|
/* The central directory file header */
|
|
CENHDR : 46, // CEN header size
|
|
CENSIG : 0x02014b50, // "PK\001\002"
|
|
CENVEM : 4, // version made by
|
|
CENVER : 6, // version needed to extract
|
|
CENFLG : 8, // encrypt, decrypt flags
|
|
CENHOW : 10, // compression method
|
|
CENTIM : 12, // modification time (2 bytes time, 2 bytes date)
|
|
CENCRC : 16, // uncompressed file crc-32 value
|
|
CENSIZ : 20, // compressed size
|
|
CENLEN : 24, // uncompressed size
|
|
CENNAM : 28, // filename length
|
|
CENEXT : 30, // extra field length
|
|
CENCOM : 32, // file comment length
|
|
CENDSK : 34, // volume number start
|
|
CENATT : 36, // internal file attributes
|
|
CENATX : 38, // external file attributes (host system dependent)
|
|
CENOFF : 42, // LOC header offset
|
|
|
|
/* The entries in the end of central directory */
|
|
ENDHDR : 22, // END header size
|
|
ENDSIG : 0x06054b50, // "PK\005\006"
|
|
ENDSUB : 8, // number of entries on this disk
|
|
ENDTOT : 10, // total number of entries
|
|
ENDSIZ : 12, // central directory size in bytes
|
|
ENDOFF : 16, // offset of first CEN header
|
|
ENDCOM : 20, // zip file comment length
|
|
|
|
END64HDR : 20, // zip64 END header size
|
|
END64SIG : 0x07064b50, // zip64 Locator signature, "PK\006\007"
|
|
END64START : 4, // number of the disk with the start of the zip64
|
|
END64OFF : 8, // relative offset of the zip64 end of central directory
|
|
END64NUMDISKS : 16, // total number of disks
|
|
|
|
ZIP64SIG : 0x06064b50, // zip64 signature, "PK\006\006"
|
|
ZIP64HDR : 56, // zip64 record minimum size
|
|
ZIP64LEAD : 12, // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE
|
|
ZIP64SIZE : 4, // zip64 size of the central directory record
|
|
ZIP64VEM : 12, // zip64 version made by
|
|
ZIP64VER : 14, // zip64 version needed to extract
|
|
ZIP64DSK : 16, // zip64 number of this disk
|
|
ZIP64DSKDIR : 20, // number of the disk with the start of the record directory
|
|
ZIP64SUB : 24, // number of entries on this disk
|
|
ZIP64TOT : 32, // total number of entries
|
|
ZIP64SIZB : 40, // zip64 central directory size in bytes
|
|
ZIP64OFF : 48, // offset of start of central directory with respect to the starting disk number
|
|
ZIP64EXTRA : 56, // extensible data sector
|
|
|
|
/* Compression methods */
|
|
STORED : 0, // no compression
|
|
SHRUNK : 1, // shrunk
|
|
REDUCED1 : 2, // reduced with compression factor 1
|
|
REDUCED2 : 3, // reduced with compression factor 2
|
|
REDUCED3 : 4, // reduced with compression factor 3
|
|
REDUCED4 : 5, // reduced with compression factor 4
|
|
IMPLODED : 6, // imploded
|
|
// 7 reserved
|
|
DEFLATED : 8, // deflated
|
|
ENHANCED_DEFLATED: 9, // enhanced deflated
|
|
PKWARE : 10,// PKWare DCL imploded
|
|
// 11 reserved
|
|
BZIP2 : 12, // compressed using BZIP2
|
|
// 13 reserved
|
|
LZMA : 14, // LZMA
|
|
// 15-17 reserved
|
|
IBM_TERSE : 18, // compressed using IBM TERSE
|
|
IBM_LZ77 : 19, //IBM LZ77 z
|
|
|
|
/* General purpose bit flag */
|
|
FLG_ENC : 0, // encripted file
|
|
FLG_COMP1 : 1, // compression option
|
|
FLG_COMP2 : 2, // compression option
|
|
FLG_DESC : 4, // data descriptor
|
|
FLG_ENH : 8, // enhanced deflation
|
|
FLG_STR : 16, // strong encryption
|
|
FLG_LNG : 1024, // language encoding
|
|
FLG_MSK : 4096, // mask header values
|
|
|
|
/* Load type */
|
|
FILE : 2,
|
|
BUFFER : 1,
|
|
NONE : 0,
|
|
|
|
/* 4.5 Extensible data fields */
|
|
EF_ID : 0,
|
|
EF_SIZE : 2,
|
|
|
|
/* Header IDs */
|
|
ID_ZIP64 : 0x0001,
|
|
ID_AVINFO : 0x0007,
|
|
ID_PFS : 0x0008,
|
|
ID_OS2 : 0x0009,
|
|
ID_NTFS : 0x000a,
|
|
ID_OPENVMS : 0x000c,
|
|
ID_UNIX : 0x000d,
|
|
ID_FORK : 0x000e,
|
|
ID_PATCH : 0x000f,
|
|
ID_X509_PKCS7 : 0x0014,
|
|
ID_X509_CERTID_F : 0x0015,
|
|
ID_X509_CERTID_C : 0x0016,
|
|
ID_STRONGENC : 0x0017,
|
|
ID_RECORD_MGT : 0x0018,
|
|
ID_X509_PKCS7_RL : 0x0019,
|
|
ID_IBM1 : 0x0065,
|
|
ID_IBM2 : 0x0066,
|
|
ID_POSZIP : 0x4690,
|
|
|
|
EF_ZIP64_OR_32 : 0xffffffff,
|
|
EF_ZIP64_OR_16 : 0xffff,
|
|
EF_ZIP64_SUNCOMP : 0,
|
|
EF_ZIP64_SCOMP : 8,
|
|
EF_ZIP64_RHO : 16,
|
|
EF_ZIP64_DSN : 24
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1255:
|
|
/***/ ((module) => {
|
|
|
|
module.exports = {
|
|
/* Header error messages */
|
|
"INVALID_LOC" : "Invalid LOC header (bad signature)",
|
|
"INVALID_CEN" : "Invalid CEN header (bad signature)",
|
|
"INVALID_END" : "Invalid END header (bad signature)",
|
|
|
|
/* ZipEntry error messages*/
|
|
"NO_DATA" : "Nothing to decompress",
|
|
"BAD_CRC" : "CRC32 checksum failed",
|
|
"FILE_IN_THE_WAY" : "There is a file in the way: %s",
|
|
"UNKNOWN_METHOD" : "Invalid/unsupported compression method",
|
|
|
|
/* Inflater error messages */
|
|
"AVAIL_DATA" : "inflate::Available inflate data did not terminate",
|
|
"INVALID_DISTANCE" : "inflate::Invalid literal/length or distance code in fixed or dynamic block",
|
|
"TO_MANY_CODES" : "inflate::Dynamic block code description: too many length or distance codes",
|
|
"INVALID_REPEAT_LEN" : "inflate::Dynamic block code description: repeat more than specified lengths",
|
|
"INVALID_REPEAT_FIRST" : "inflate::Dynamic block code description: repeat lengths with no first length",
|
|
"INCOMPLETE_CODES" : "inflate::Dynamic block code description: code lengths codes incomplete",
|
|
"INVALID_DYN_DISTANCE": "inflate::Dynamic block code description: invalid distance code lengths",
|
|
"INVALID_CODES_LEN": "inflate::Dynamic block code description: invalid literal/length code lengths",
|
|
"INVALID_STORE_BLOCK" : "inflate::Stored block length did not match one's complement",
|
|
"INVALID_BLOCK_TYPE" : "inflate::Invalid block type (type == 3)",
|
|
|
|
/* ADM-ZIP error messages */
|
|
"CANT_EXTRACT_FILE" : "Could not extract the file",
|
|
"CANT_OVERRIDE" : "Target file already exists",
|
|
"NO_ZIP" : "No zip file was loaded",
|
|
"NO_ENTRY" : "Entry doesn't exist",
|
|
"DIRECTORY_CONTENT_ERROR" : "A directory cannot have content",
|
|
"FILE_NOT_FOUND" : "File not found: %s",
|
|
"NOT_IMPLEMENTED" : "Not implemented",
|
|
"INVALID_FILENAME" : "Invalid filename",
|
|
"INVALID_FORMAT" : "Invalid or unsupported zip format. No END header found"
|
|
};
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8321:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
var fs = __nccwpck_require__(2895).require(),
|
|
pth = __nccwpck_require__(5622);
|
|
|
|
fs.existsSync = fs.existsSync || pth.existsSync;
|
|
|
|
module.exports = function(/*String*/path) {
|
|
|
|
var _path = path || "",
|
|
_permissions = 0,
|
|
_obj = newAttr(),
|
|
_stat = null;
|
|
|
|
function newAttr() {
|
|
return {
|
|
directory : false,
|
|
readonly : false,
|
|
hidden : false,
|
|
executable : false,
|
|
mtime : 0,
|
|
atime : 0
|
|
}
|
|
}
|
|
|
|
if (_path && fs.existsSync(_path)) {
|
|
_stat = fs.statSync(_path);
|
|
_obj.directory = _stat.isDirectory();
|
|
_obj.mtime = _stat.mtime;
|
|
_obj.atime = _stat.atime;
|
|
_obj.executable = (0o111 & _stat.mode) != 0; // file is executable who ever har right not just owner
|
|
_obj.readonly = (0o200 & _stat.mode) == 0; // readonly if owner has no write right
|
|
_obj.hidden = pth.basename(_path)[0] === ".";
|
|
} else {
|
|
console.warn("Invalid path: " + _path)
|
|
}
|
|
|
|
return {
|
|
|
|
get directory () {
|
|
return _obj.directory;
|
|
},
|
|
|
|
get readOnly () {
|
|
return _obj.readonly;
|
|
},
|
|
|
|
get hidden () {
|
|
return _obj.hidden;
|
|
},
|
|
|
|
get mtime () {
|
|
return _obj.mtime;
|
|
},
|
|
|
|
get atime () {
|
|
return _obj.atime;
|
|
},
|
|
|
|
|
|
get executable () {
|
|
return _obj.executable;
|
|
},
|
|
|
|
decodeAttributes : function(val) {
|
|
|
|
},
|
|
|
|
encodeAttributes : function (val) {
|
|
|
|
},
|
|
|
|
toString : function() {
|
|
return '{\n' +
|
|
'\t"path" : "' + _path + ",\n" +
|
|
'\t"isDirectory" : ' + _obj.directory + ",\n" +
|
|
'\t"isReadOnly" : ' + _obj.readonly + ",\n" +
|
|
'\t"isHidden" : ' + _obj.hidden + ",\n" +
|
|
'\t"isExecutable" : ' + _obj.executable + ",\n" +
|
|
'\t"mTime" : ' + _obj.mtime + "\n" +
|
|
'\t"aTime" : ' + _obj.atime + "\n" +
|
|
'}';
|
|
}
|
|
}
|
|
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2895:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
exports.require = function() {
|
|
var fs = __nccwpck_require__(5747);
|
|
if (process && process.versions && process.versions['electron']) {
|
|
try {
|
|
originalFs = __nccwpck_require__(2941);
|
|
if (Object.keys(originalFs).length > 0) {
|
|
fs = originalFs;
|
|
}
|
|
} catch (e) {}
|
|
}
|
|
return fs
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5182:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
module.exports = __nccwpck_require__(1291);
|
|
module.exports.FileSystem = __nccwpck_require__(2895);
|
|
module.exports.Constants = __nccwpck_require__(4522);
|
|
module.exports.Errors = __nccwpck_require__(1255);
|
|
module.exports.FileAttr = __nccwpck_require__(8321);
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1291:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
var fs = __nccwpck_require__(2895).require(),
|
|
pth = __nccwpck_require__(5622);
|
|
|
|
fs.existsSync = fs.existsSync || pth.existsSync;
|
|
|
|
module.exports = (function() {
|
|
|
|
var crcTable = [],
|
|
Constants = __nccwpck_require__(4522),
|
|
Errors = __nccwpck_require__(1255),
|
|
|
|
PATH_SEPARATOR = pth.sep;
|
|
|
|
|
|
function mkdirSync(/*String*/path) {
|
|
var resolvedPath = path.split(PATH_SEPARATOR)[0];
|
|
path.split(PATH_SEPARATOR).forEach(function(name) {
|
|
if (!name || name.substr(-1,1) === ":") return;
|
|
resolvedPath += PATH_SEPARATOR + name;
|
|
var stat;
|
|
try {
|
|
stat = fs.statSync(resolvedPath);
|
|
} catch (e) {
|
|
fs.mkdirSync(resolvedPath);
|
|
}
|
|
if (stat && stat.isFile())
|
|
throw Errors.FILE_IN_THE_WAY.replace("%s", resolvedPath);
|
|
});
|
|
}
|
|
|
|
function findSync(/*String*/dir, /*RegExp*/pattern, /*Boolean*/recoursive) {
|
|
if (typeof pattern === 'boolean') {
|
|
recoursive = pattern;
|
|
pattern = undefined;
|
|
}
|
|
var files = [];
|
|
fs.readdirSync(dir).forEach(function(file) {
|
|
var path = pth.join(dir, file);
|
|
|
|
if (fs.statSync(path).isDirectory() && recoursive)
|
|
files = files.concat(findSync(path, pattern, recoursive));
|
|
|
|
if (!pattern || pattern.test(path)) {
|
|
files.push(pth.normalize(path) + (fs.statSync(path).isDirectory() ? PATH_SEPARATOR : ""));
|
|
}
|
|
|
|
});
|
|
return files;
|
|
}
|
|
|
|
function readBigUInt64LE(/*Buffer*/buffer, /*int*/index) {
|
|
var slice = Buffer.from(buffer.slice(index, index + 8));
|
|
slice.swap64();
|
|
|
|
return parseInt(`0x${ slice.toString('hex') }`);
|
|
}
|
|
|
|
return {
|
|
makeDir : function(/*String*/path) {
|
|
mkdirSync(path);
|
|
},
|
|
|
|
crc32 : function(buf) {
|
|
if (typeof buf === 'string') {
|
|
buf = Buffer.from(buf);
|
|
}
|
|
var b = Buffer.alloc(4);
|
|
if (!crcTable.length) {
|
|
for (var n = 0; n < 256; n++) {
|
|
var c = n;
|
|
for (var k = 8; --k >= 0;) //
|
|
if ((c & 1) !== 0) { c = 0xedb88320 ^ (c >>> 1); } else { c = c >>> 1; }
|
|
if (c < 0) {
|
|
b.writeInt32LE(c, 0);
|
|
c = b.readUInt32LE(0);
|
|
}
|
|
crcTable[n] = c;
|
|
}
|
|
}
|
|
var crc = 0, off = 0, len = buf.length, c1 = ~crc;
|
|
while(--len >= 0) c1 = crcTable[(c1 ^ buf[off++]) & 0xff] ^ (c1 >>> 8);
|
|
crc = ~c1;
|
|
b.writeInt32LE(crc & 0xffffffff, 0);
|
|
return b.readUInt32LE(0);
|
|
},
|
|
|
|
methodToString : function(/*Number*/method) {
|
|
switch (method) {
|
|
case Constants.STORED:
|
|
return 'STORED (' + method + ')';
|
|
case Constants.DEFLATED:
|
|
return 'DEFLATED (' + method + ')';
|
|
default:
|
|
return 'UNSUPPORTED (' + method + ')';
|
|
}
|
|
|
|
},
|
|
|
|
writeFileTo : function(/*String*/path, /*Buffer*/content, /*Boolean*/overwrite, /*Number*/attr) {
|
|
if (fs.existsSync(path)) {
|
|
if (!overwrite)
|
|
return false; // cannot overwrite
|
|
|
|
var stat = fs.statSync(path);
|
|
if (stat.isDirectory()) {
|
|
return false;
|
|
}
|
|
}
|
|
var folder = pth.dirname(path);
|
|
if (!fs.existsSync(folder)) {
|
|
mkdirSync(folder);
|
|
}
|
|
|
|
var fd;
|
|
try {
|
|
fd = fs.openSync(path, 'w', 438); // 0666
|
|
} catch(e) {
|
|
fs.chmodSync(path, 438);
|
|
fd = fs.openSync(path, 'w', 438);
|
|
}
|
|
if (fd) {
|
|
try {
|
|
fs.writeSync(fd, content, 0, content.length, 0);
|
|
}
|
|
catch (e){
|
|
throw e;
|
|
}
|
|
finally {
|
|
fs.closeSync(fd);
|
|
}
|
|
}
|
|
fs.chmodSync(path, attr || 438);
|
|
return true;
|
|
},
|
|
|
|
writeFileToAsync : function(/*String*/path, /*Buffer*/content, /*Boolean*/overwrite, /*Number*/attr, /*Function*/callback) {
|
|
if(typeof attr === 'function') {
|
|
callback = attr;
|
|
attr = undefined;
|
|
}
|
|
|
|
fs.exists(path, function(exists) {
|
|
if(exists && !overwrite)
|
|
return callback(false);
|
|
|
|
fs.stat(path, function(err, stat) {
|
|
if(exists &&stat.isDirectory()) {
|
|
return callback(false);
|
|
}
|
|
|
|
var folder = pth.dirname(path);
|
|
fs.exists(folder, function(exists) {
|
|
if(!exists)
|
|
mkdirSync(folder);
|
|
|
|
fs.open(path, 'w', 438, function(err, fd) {
|
|
if(err) {
|
|
fs.chmod(path, 438, function() {
|
|
fs.open(path, 'w', 438, function(err, fd) {
|
|
fs.write(fd, content, 0, content.length, 0, function() {
|
|
fs.close(fd, function() {
|
|
fs.chmod(path, attr || 438, function() {
|
|
callback(true);
|
|
})
|
|
});
|
|
});
|
|
});
|
|
})
|
|
} else {
|
|
if(fd) {
|
|
fs.write(fd, content, 0, content.length, 0, function() {
|
|
fs.close(fd, function() {
|
|
fs.chmod(path, attr || 438, function() {
|
|
callback(true);
|
|
})
|
|
});
|
|
});
|
|
} else {
|
|
fs.chmod(path, attr || 438, function() {
|
|
callback(true);
|
|
})
|
|
}
|
|
}
|
|
});
|
|
})
|
|
})
|
|
})
|
|
},
|
|
|
|
findFiles : function(/*String*/path) {
|
|
return findSync(path, true);
|
|
},
|
|
|
|
getAttributes : function(/*String*/path) {
|
|
|
|
},
|
|
|
|
setAttributes : function(/*String*/path) {
|
|
|
|
},
|
|
|
|
toBuffer : function(input) {
|
|
if (Buffer.isBuffer(input)) {
|
|
return input;
|
|
} else {
|
|
if (input.length === 0) {
|
|
return Buffer.alloc(0)
|
|
}
|
|
return Buffer.from(input, 'utf8');
|
|
}
|
|
},
|
|
|
|
readBigUInt64LE,
|
|
|
|
Constants : Constants,
|
|
Errors : Errors
|
|
}
|
|
})();
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4057:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
var Utils = __nccwpck_require__(5182),
|
|
Headers = __nccwpck_require__(4958),
|
|
Constants = Utils.Constants,
|
|
Methods = __nccwpck_require__(3928);
|
|
|
|
module.exports = function (/*Buffer*/input) {
|
|
var _entryHeader = new Headers.EntryHeader(),
|
|
_entryName = Buffer.alloc(0),
|
|
_comment = Buffer.alloc(0),
|
|
_isDirectory = false,
|
|
uncompressedData = null,
|
|
_extra = Buffer.alloc(0);
|
|
|
|
function getCompressedDataFromZip() {
|
|
if (!input || !Buffer.isBuffer(input)) {
|
|
return Buffer.alloc(0);
|
|
}
|
|
_entryHeader.loadDataHeaderFromBinary(input);
|
|
return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize)
|
|
}
|
|
|
|
function crc32OK(data) {
|
|
// if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written
|
|
if ((_entryHeader.flags & 0x8) !== 0x8) {
|
|
if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) {
|
|
return false;
|
|
}
|
|
} else {
|
|
// @TODO: load and check data descriptor header
|
|
// The fields in the local header are filled with zero, and the CRC-32 and size are appended in a 12-byte structure
|
|
// (optionally preceded by a 4-byte signature) immediately after the compressed data:
|
|
}
|
|
return true;
|
|
}
|
|
|
|
function decompress(/*Boolean*/async, /*Function*/callback, /*String, Buffer*/pass) {
|
|
if(typeof callback === 'undefined' && typeof async === 'string') {
|
|
pass=async;
|
|
async=void 0;
|
|
}
|
|
if (_isDirectory) {
|
|
if (async && callback) {
|
|
callback(Buffer.alloc(0), Utils.Errors.DIRECTORY_CONTENT_ERROR); //si added error.
|
|
}
|
|
return Buffer.alloc(0);
|
|
}
|
|
|
|
var compressedData = getCompressedDataFromZip();
|
|
|
|
if (compressedData.length === 0) {
|
|
// File is empty, nothing to decompress.
|
|
if (async && callback) callback(compressedData);
|
|
return compressedData;
|
|
}
|
|
|
|
if (_entryHeader.encripted){
|
|
if ('string' !== typeof pass && !Buffer.isBuffer(pass)){
|
|
throw new Error('ADM-ZIP: Incompatible password parameter');
|
|
}
|
|
compressedData = Methods.ZipCrypto.decrypt(compressedData, _entryHeader, pass);
|
|
}
|
|
|
|
var data = Buffer.alloc(_entryHeader.size);
|
|
|
|
switch (_entryHeader.method) {
|
|
case Utils.Constants.STORED:
|
|
compressedData.copy(data);
|
|
if (!crc32OK(data)) {
|
|
if (async && callback) callback(data, Utils.Errors.BAD_CRC);//si added error
|
|
throw new Error(Utils.Errors.BAD_CRC);
|
|
} else {//si added otherwise did not seem to return data.
|
|
if (async && callback) callback(data);
|
|
return data;
|
|
}
|
|
case Utils.Constants.DEFLATED:
|
|
var inflater = new Methods.Inflater(compressedData);
|
|
if (!async) {
|
|
var result = inflater.inflate(data);
|
|
result.copy(data, 0);
|
|
if (!crc32OK(data)) {
|
|
throw new Error(Utils.Errors.BAD_CRC + " " + _entryName.toString());
|
|
}
|
|
return data;
|
|
} else {
|
|
inflater.inflateAsync(function(result) {
|
|
result.copy(data, 0);
|
|
if (!crc32OK(data)) {
|
|
if (callback) callback(data, Utils.Errors.BAD_CRC); //si added error
|
|
} else { //si added otherwise did not seem to return data.
|
|
if (callback) callback(data);
|
|
}
|
|
});
|
|
}
|
|
break;
|
|
default:
|
|
if (async && callback) callback(Buffer.alloc(0), Utils.Errors.UNKNOWN_METHOD);
|
|
throw new Error(Utils.Errors.UNKNOWN_METHOD);
|
|
}
|
|
}
|
|
|
|
function compress(/*Boolean*/async, /*Function*/callback) {
|
|
if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) {
|
|
// no data set or the data wasn't changed to require recompression
|
|
if (async && callback) callback(getCompressedDataFromZip());
|
|
return getCompressedDataFromZip();
|
|
}
|
|
|
|
if (uncompressedData.length && !_isDirectory) {
|
|
var compressedData;
|
|
// Local file header
|
|
switch (_entryHeader.method) {
|
|
case Utils.Constants.STORED:
|
|
_entryHeader.compressedSize = _entryHeader.size;
|
|
|
|
compressedData = Buffer.alloc(uncompressedData.length);
|
|
uncompressedData.copy(compressedData);
|
|
|
|
if (async && callback) callback(compressedData);
|
|
return compressedData;
|
|
default:
|
|
case Utils.Constants.DEFLATED:
|
|
|
|
var deflater = new Methods.Deflater(uncompressedData);
|
|
if (!async) {
|
|
var deflated = deflater.deflate();
|
|
_entryHeader.compressedSize = deflated.length;
|
|
return deflated;
|
|
} else {
|
|
deflater.deflateAsync(function(data) {
|
|
compressedData = Buffer.alloc(data.length);
|
|
_entryHeader.compressedSize = data.length;
|
|
data.copy(compressedData);
|
|
callback && callback(compressedData);
|
|
});
|
|
}
|
|
deflater = null;
|
|
break;
|
|
}
|
|
} else {
|
|
if (async && callback) {
|
|
callback(Buffer.alloc(0));
|
|
} else {
|
|
return Buffer.alloc(0);
|
|
}
|
|
}
|
|
}
|
|
|
|
function readUInt64LE(buffer, offset) {
|
|
return (buffer.readUInt32LE(offset + 4) << 4) + buffer.readUInt32LE(offset);
|
|
}
|
|
|
|
function parseExtra(data) {
|
|
var offset = 0;
|
|
var signature, size, part;
|
|
while(offset<data.length) {
|
|
signature = data.readUInt16LE(offset);
|
|
offset += 2;
|
|
size = data.readUInt16LE(offset);
|
|
offset += 2;
|
|
part = data.slice(offset, offset+size);
|
|
offset += size;
|
|
if(Constants.ID_ZIP64 === signature) {
|
|
parseZip64ExtendedInformation(part);
|
|
}
|
|
}
|
|
}
|
|
|
|
//Override header field values with values from the ZIP64 extra field
|
|
function parseZip64ExtendedInformation(data) {
|
|
var size, compressedSize, offset, diskNumStart;
|
|
|
|
if(data.length >= Constants.EF_ZIP64_SCOMP) {
|
|
size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP);
|
|
if(_entryHeader.size === Constants.EF_ZIP64_OR_32) {
|
|
_entryHeader.size = size;
|
|
}
|
|
}
|
|
if(data.length >= Constants.EF_ZIP64_RHO) {
|
|
compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP);
|
|
if(_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) {
|
|
_entryHeader.compressedSize = compressedSize;
|
|
}
|
|
}
|
|
if(data.length >= Constants.EF_ZIP64_DSN) {
|
|
offset = readUInt64LE(data, Constants.EF_ZIP64_RHO);
|
|
if(_entryHeader.offset === Constants.EF_ZIP64_OR_32) {
|
|
_entryHeader.offset = offset;
|
|
}
|
|
}
|
|
if(data.length >= Constants.EF_ZIP64_DSN+4) {
|
|
diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN);
|
|
if(_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) {
|
|
_entryHeader.diskNumStart = diskNumStart;
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
return {
|
|
get entryName () { return _entryName.toString(); },
|
|
get rawEntryName() { return _entryName; },
|
|
set entryName (val) {
|
|
_entryName = Utils.toBuffer(val);
|
|
var lastChar = _entryName[_entryName.length - 1];
|
|
_isDirectory = (lastChar === 47) || (lastChar === 92);
|
|
_entryHeader.fileNameLength = _entryName.length;
|
|
},
|
|
|
|
get extra () { return _extra; },
|
|
set extra (val) {
|
|
_extra = val;
|
|
_entryHeader.extraLength = val.length;
|
|
parseExtra(val);
|
|
},
|
|
|
|
get comment () { return _comment.toString(); },
|
|
set comment (val) {
|
|
_comment = Utils.toBuffer(val);
|
|
_entryHeader.commentLength = _comment.length;
|
|
},
|
|
|
|
get name () { var n = _entryName.toString(); return _isDirectory ? n.substr(n.length - 1).split("/").pop() : n.split("/").pop(); },
|
|
get isDirectory () { return _isDirectory },
|
|
|
|
getCompressedData : function() {
|
|
return compress(false, null)
|
|
},
|
|
|
|
getCompressedDataAsync : function(/*Function*/callback) {
|
|
compress(true, callback)
|
|
},
|
|
|
|
setData : function(value) {
|
|
uncompressedData = Utils.toBuffer(value);
|
|
if (!_isDirectory && uncompressedData.length) {
|
|
_entryHeader.size = uncompressedData.length;
|
|
_entryHeader.method = Utils.Constants.DEFLATED;
|
|
_entryHeader.crc = Utils.crc32(value);
|
|
_entryHeader.changed = true;
|
|
} else { // folders and blank files should be stored
|
|
_entryHeader.method = Utils.Constants.STORED;
|
|
}
|
|
},
|
|
|
|
getData : function(pass) {
|
|
if (_entryHeader.changed) {
|
|
return uncompressedData;
|
|
} else {
|
|
return decompress(false, null, pass);
|
|
}
|
|
},
|
|
|
|
getDataAsync : function(/*Function*/callback, pass) {
|
|
if (_entryHeader.changed) {
|
|
callback(uncompressedData);
|
|
} else {
|
|
decompress(true, callback, pass);
|
|
}
|
|
},
|
|
|
|
set attr(attr) { _entryHeader.attr = attr; },
|
|
get attr() { return _entryHeader.attr; },
|
|
|
|
set header(/*Buffer*/data) {
|
|
_entryHeader.loadFromBinary(data);
|
|
},
|
|
|
|
get header() {
|
|
return _entryHeader;
|
|
},
|
|
|
|
packHeader : function() {
|
|
// 1. create header (buffer)
|
|
var header = _entryHeader.entryHeaderToBinary();
|
|
var addpos = Utils.Constants.CENHDR;
|
|
// 2. add file name
|
|
_entryName.copy(header, addpos);
|
|
addpos += _entryName.length;
|
|
// 3. add extra data
|
|
if (_entryHeader.extraLength) {
|
|
_extra.copy(header, addpos);
|
|
addpos += _entryHeader.extraLength;
|
|
}
|
|
// 4. add file comment
|
|
if (_entryHeader.commentLength) {
|
|
_comment.copy(header, addpos);
|
|
}
|
|
return header;
|
|
},
|
|
|
|
toString : function() {
|
|
return '{\n' +
|
|
'\t"entryName" : "' + _entryName.toString() + "\",\n" +
|
|
'\t"name" : "' + (_isDirectory ? _entryName.toString().replace(/\/$/, '').split("/").pop() : _entryName.toString().split("/").pop()) + "\",\n" +
|
|
'\t"comment" : "' + _comment.toString() + "\",\n" +
|
|
'\t"isDirectory" : ' + _isDirectory + ",\n" +
|
|
'\t"header" : ' + _entryHeader.toString().replace(/\t/mg, "\t\t").replace(/}/mg, "\t}") + ",\n" +
|
|
'\t"compressedData" : <' + (input && input.length + " bytes buffer" || "null") + ">\n" +
|
|
'\t"data" : <' + (uncompressedData && uncompressedData.length + " bytes buffer" || "null") + ">\n" +
|
|
'}';
|
|
}
|
|
}
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7744:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
const ZipEntry = __nccwpck_require__(4057);
|
|
const Headers = __nccwpck_require__(4958);
|
|
const Utils = __nccwpck_require__(5182);
|
|
|
|
module.exports = function (/*Buffer|null*/inBuffer, /** object */options) {
|
|
var entryList = [],
|
|
entryTable = {},
|
|
_comment = Buffer.alloc(0),
|
|
mainHeader = new Headers.MainHeader(),
|
|
loadedEntries = false;
|
|
|
|
// assign options
|
|
const opts = Object.assign(Object.create(null), options);
|
|
|
|
if (inBuffer){
|
|
// is a memory buffer
|
|
readMainHeader(opts.readEntries);
|
|
} else {
|
|
// none. is a new file
|
|
loadedEntries = true;
|
|
}
|
|
|
|
function iterateEntries(callback) {
|
|
const totalEntries = mainHeader.diskEntries; // total number of entries
|
|
let index = mainHeader.offset; // offset of first CEN header
|
|
|
|
for (let i = 0; i < totalEntries; i++) {
|
|
let tmp = index;
|
|
const entry = new ZipEntry(inBuffer);
|
|
|
|
entry.header = inBuffer.slice(tmp, tmp += Utils.Constants.CENHDR);
|
|
entry.entryName = inBuffer.slice(tmp, tmp += entry.header.fileNameLength);
|
|
|
|
index += entry.header.entryHeaderSize;
|
|
|
|
callback(entry);
|
|
}
|
|
}
|
|
|
|
function readEntries() {
|
|
loadedEntries = true;
|
|
entryTable = {};
|
|
entryList = new Array(mainHeader.diskEntries); // total number of entries
|
|
var index = mainHeader.offset; // offset of first CEN header
|
|
for (var i = 0; i < entryList.length; i++) {
|
|
|
|
var tmp = index,
|
|
entry = new ZipEntry(inBuffer);
|
|
entry.header = inBuffer.slice(tmp, tmp += Utils.Constants.CENHDR);
|
|
|
|
entry.entryName = inBuffer.slice(tmp, tmp += entry.header.fileNameLength);
|
|
|
|
if (entry.header.extraLength) {
|
|
entry.extra = inBuffer.slice(tmp, tmp += entry.header.extraLength);
|
|
}
|
|
|
|
if (entry.header.commentLength)
|
|
entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength);
|
|
|
|
index += entry.header.entryHeaderSize;
|
|
|
|
entryList[i] = entry;
|
|
entryTable[entry.entryName] = entry;
|
|
}
|
|
}
|
|
|
|
function readMainHeader(/*Boolean*/ readNow) {
|
|
var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size
|
|
max = Math.max(0, i - 0xFFFF), // 0xFFFF is the max zip file comment length
|
|
n = max,
|
|
endStart = inBuffer.length,
|
|
endOffset = -1, // Start offset of the END header
|
|
commentEnd = 0;
|
|
|
|
for (i; i >= n; i--) {
|
|
if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P'
|
|
if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) { // "PK\005\006"
|
|
endOffset = i;
|
|
commentEnd = i;
|
|
endStart = i + Utils.Constants.ENDHDR;
|
|
// We already found a regular signature, let's look just a bit further to check if there's any zip64 signature
|
|
n = i - Utils.Constants.END64HDR;
|
|
continue;
|
|
}
|
|
|
|
if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) {
|
|
// Found a zip64 signature, let's continue reading the whole zip64 record
|
|
n = max;
|
|
continue;
|
|
}
|
|
|
|
if (inBuffer.readUInt32LE(i) == Utils.Constants.ZIP64SIG) {
|
|
// Found the zip64 record, let's determine it's size
|
|
endOffset = i;
|
|
endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD;
|
|
break;
|
|
}
|
|
}
|
|
|
|
if (!~endOffset)
|
|
throw new Error(Utils.Errors.INVALID_FORMAT);
|
|
|
|
mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart));
|
|
if (mainHeader.commentLength) {
|
|
_comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR);
|
|
}
|
|
if (readNow) readEntries();
|
|
}
|
|
|
|
return {
|
|
/**
|
|
* Returns an array of ZipEntry objects existent in the current opened archive
|
|
* @return Array
|
|
*/
|
|
get entries() {
|
|
if (!loadedEntries) {
|
|
readEntries();
|
|
}
|
|
return entryList;
|
|
},
|
|
|
|
/**
|
|
* Archive comment
|
|
* @return {String}
|
|
*/
|
|
get comment() {
|
|
return _comment.toString();
|
|
},
|
|
set comment(val) {
|
|
_comment = Utils.toBuffer(val);
|
|
mainHeader.commentLength = _comment.length;
|
|
},
|
|
|
|
getEntryCount: function() {
|
|
if (!loadedEntries) {
|
|
return mainHeader.diskEntries;
|
|
}
|
|
|
|
return entryList.length;
|
|
},
|
|
|
|
forEach: function(callback) {
|
|
if (!loadedEntries) {
|
|
iterateEntries(callback);
|
|
return;
|
|
}
|
|
|
|
entryList.forEach(callback);
|
|
},
|
|
|
|
/**
|
|
* Returns a reference to the entry with the given name or null if entry is inexistent
|
|
*
|
|
* @param entryName
|
|
* @return ZipEntry
|
|
*/
|
|
getEntry: function (/*String*/entryName) {
|
|
if (!loadedEntries) {
|
|
readEntries();
|
|
}
|
|
return entryTable[entryName] || null;
|
|
},
|
|
|
|
/**
|
|
* Adds the given entry to the entry list
|
|
*
|
|
* @param entry
|
|
*/
|
|
setEntry: function (/*ZipEntry*/entry) {
|
|
if (!loadedEntries) {
|
|
readEntries();
|
|
}
|
|
entryList.push(entry);
|
|
entryTable[entry.entryName] = entry;
|
|
mainHeader.totalEntries = entryList.length;
|
|
},
|
|
|
|
/**
|
|
* Removes the entry with the given name from the entry list.
|
|
*
|
|
* If the entry is a directory, then all nested files and directories will be removed
|
|
* @param entryName
|
|
*/
|
|
deleteEntry: function (/*String*/entryName) {
|
|
if (!loadedEntries) {
|
|
readEntries();
|
|
}
|
|
var entry = entryTable[entryName];
|
|
if (entry && entry.isDirectory) {
|
|
var _self = this;
|
|
this.getEntryChildren(entry).forEach(function (child) {
|
|
if (child.entryName !== entryName) {
|
|
_self.deleteEntry(child.entryName)
|
|
}
|
|
})
|
|
}
|
|
entryList.splice(entryList.indexOf(entry), 1);
|
|
delete(entryTable[entryName]);
|
|
mainHeader.totalEntries = entryList.length;
|
|
},
|
|
|
|
/**
|
|
* Iterates and returns all nested files and directories of the given entry
|
|
*
|
|
* @param entry
|
|
* @return Array
|
|
*/
|
|
getEntryChildren: function (/*ZipEntry*/entry) {
|
|
if (!loadedEntries) {
|
|
readEntries();
|
|
}
|
|
if (entry.isDirectory) {
|
|
var list = [],
|
|
name = entry.entryName,
|
|
len = name.length;
|
|
|
|
entryList.forEach(function (zipEntry) {
|
|
if (zipEntry.entryName.substr(0, len) === name) {
|
|
list.push(zipEntry);
|
|
}
|
|
});
|
|
return list;
|
|
}
|
|
return []
|
|
},
|
|
|
|
/**
|
|
* Returns the zip file
|
|
*
|
|
* @return Buffer
|
|
*/
|
|
compressToBuffer: function () {
|
|
if (!loadedEntries) {
|
|
readEntries();
|
|
}
|
|
if (entryList.length > 1) {
|
|
entryList.sort(function (a, b) {
|
|
var nameA = a.entryName.toLowerCase();
|
|
var nameB = b.entryName.toLowerCase();
|
|
if (nameA < nameB) {
|
|
return -1
|
|
}
|
|
if (nameA > nameB) {
|
|
return 1
|
|
}
|
|
return 0;
|
|
});
|
|
}
|
|
|
|
var totalSize = 0,
|
|
dataBlock = [],
|
|
entryHeaders = [],
|
|
dindex = 0;
|
|
|
|
mainHeader.size = 0;
|
|
mainHeader.offset = 0;
|
|
|
|
entryList.forEach(function (entry) {
|
|
// compress data and set local and entry header accordingly. Reason why is called first
|
|
var compressedData = entry.getCompressedData();
|
|
// data header
|
|
entry.header.offset = dindex;
|
|
var dataHeader = entry.header.dataHeaderToBinary();
|
|
var entryNameLen = entry.rawEntryName.length;
|
|
var extra = entry.extra.toString();
|
|
var postHeader = Buffer.alloc(entryNameLen + extra.length);
|
|
entry.rawEntryName.copy(postHeader, 0);
|
|
postHeader.fill(extra, entryNameLen);
|
|
|
|
var dataLength = dataHeader.length + postHeader.length + compressedData.length;
|
|
|
|
dindex += dataLength;
|
|
|
|
dataBlock.push(dataHeader);
|
|
dataBlock.push(postHeader);
|
|
dataBlock.push(compressedData);
|
|
|
|
var entryHeader = entry.packHeader();
|
|
entryHeaders.push(entryHeader);
|
|
mainHeader.size += entryHeader.length;
|
|
totalSize += (dataLength + entryHeader.length);
|
|
});
|
|
|
|
totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
|
|
// point to end of data and beginning of central directory first record
|
|
mainHeader.offset = dindex;
|
|
|
|
dindex = 0;
|
|
var outBuffer = Buffer.alloc(totalSize);
|
|
dataBlock.forEach(function (content) {
|
|
content.copy(outBuffer, dindex); // write data blocks
|
|
dindex += content.length;
|
|
});
|
|
entryHeaders.forEach(function (content) {
|
|
content.copy(outBuffer, dindex); // write central directory entries
|
|
dindex += content.length;
|
|
});
|
|
|
|
var mh = mainHeader.toBinary();
|
|
if (_comment) {
|
|
Buffer.from(_comment).copy(mh, Utils.Constants.ENDHDR); // add zip file comment
|
|
}
|
|
|
|
mh.copy(outBuffer, dindex); // write main header
|
|
|
|
return outBuffer;
|
|
},
|
|
|
|
toAsyncBuffer: function (/*Function*/onSuccess, /*Function*/onFail, /*Function*/onItemStart, /*Function*/onItemEnd) {
|
|
if (!loadedEntries) {
|
|
readEntries();
|
|
}
|
|
if (entryList.length > 1) {
|
|
entryList.sort(function (a, b) {
|
|
var nameA = a.entryName.toLowerCase();
|
|
var nameB = b.entryName.toLowerCase();
|
|
if (nameA > nameB) {
|
|
return -1
|
|
}
|
|
if (nameA < nameB) {
|
|
return 1
|
|
}
|
|
return 0;
|
|
});
|
|
}
|
|
|
|
var totalSize = 0,
|
|
dataBlock = [],
|
|
entryHeaders = [],
|
|
dindex = 0;
|
|
|
|
mainHeader.size = 0;
|
|
mainHeader.offset = 0;
|
|
|
|
var compress = function (entryList) {
|
|
var self = arguments.callee;
|
|
if (entryList.length) {
|
|
var entry = entryList.pop();
|
|
var name = entry.entryName + entry.extra.toString();
|
|
if (onItemStart) onItemStart(name);
|
|
entry.getCompressedDataAsync(function (compressedData) {
|
|
if (onItemEnd) onItemEnd(name);
|
|
|
|
entry.header.offset = dindex;
|
|
// data header
|
|
var dataHeader = entry.header.dataHeaderToBinary();
|
|
var postHeader;
|
|
try {
|
|
postHeader = Buffer.alloc(name.length, name); // using alloc will work on node 5.x+
|
|
} catch(e){
|
|
postHeader = new Buffer(name); // use deprecated method if alloc fails...
|
|
}
|
|
var dataLength = dataHeader.length + postHeader.length + compressedData.length;
|
|
|
|
dindex += dataLength;
|
|
|
|
dataBlock.push(dataHeader);
|
|
dataBlock.push(postHeader);
|
|
dataBlock.push(compressedData);
|
|
|
|
var entryHeader = entry.packHeader();
|
|
entryHeaders.push(entryHeader);
|
|
mainHeader.size += entryHeader.length;
|
|
totalSize += (dataLength + entryHeader.length);
|
|
|
|
if (entryList.length) {
|
|
self(entryList);
|
|
} else {
|
|
|
|
|
|
totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
|
|
// point to end of data and beginning of central directory first record
|
|
mainHeader.offset = dindex;
|
|
|
|
dindex = 0;
|
|
var outBuffer = Buffer.alloc(totalSize);
|
|
dataBlock.forEach(function (content) {
|
|
content.copy(outBuffer, dindex); // write data blocks
|
|
dindex += content.length;
|
|
});
|
|
entryHeaders.forEach(function (content) {
|
|
content.copy(outBuffer, dindex); // write central directory entries
|
|
dindex += content.length;
|
|
});
|
|
|
|
var mh = mainHeader.toBinary();
|
|
if (_comment) {
|
|
_comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment
|
|
}
|
|
|
|
mh.copy(outBuffer, dindex); // write main header
|
|
|
|
onSuccess(outBuffer);
|
|
}
|
|
});
|
|
}
|
|
};
|
|
|
|
compress(entryList);
|
|
}
|
|
}
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3682:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
var register = __nccwpck_require__(4670)
|
|
var addHook = __nccwpck_require__(5549)
|
|
var removeHook = __nccwpck_require__(6819)
|
|
|
|
// bind with array of arguments: https://stackoverflow.com/a/21792913
|
|
var bind = Function.bind
|
|
var bindable = bind.bind(bind)
|
|
|
|
function bindApi (hook, state, name) {
|
|
var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])
|
|
hook.api = { remove: removeHookRef }
|
|
hook.remove = removeHookRef
|
|
|
|
;['before', 'error', 'after', 'wrap'].forEach(function (kind) {
|
|
var args = name ? [state, kind, name] : [state, kind]
|
|
hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)
|
|
})
|
|
}
|
|
|
|
function HookSingular () {
|
|
var singularHookName = 'h'
|
|
var singularHookState = {
|
|
registry: {}
|
|
}
|
|
var singularHook = register.bind(null, singularHookState, singularHookName)
|
|
bindApi(singularHook, singularHookState, singularHookName)
|
|
return singularHook
|
|
}
|
|
|
|
function HookCollection () {
|
|
var state = {
|
|
registry: {}
|
|
}
|
|
|
|
var hook = register.bind(null, state)
|
|
bindApi(hook, state)
|
|
|
|
return hook
|
|
}
|
|
|
|
var collectionHookDeprecationMessageDisplayed = false
|
|
function Hook () {
|
|
if (!collectionHookDeprecationMessageDisplayed) {
|
|
console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4')
|
|
collectionHookDeprecationMessageDisplayed = true
|
|
}
|
|
return HookCollection()
|
|
}
|
|
|
|
Hook.Singular = HookSingular.bind()
|
|
Hook.Collection = HookCollection.bind()
|
|
|
|
module.exports = Hook
|
|
// expose constructors as a named property for TypeScript
|
|
module.exports.Hook = Hook
|
|
module.exports.Singular = Hook.Singular
|
|
module.exports.Collection = Hook.Collection
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5549:
|
|
/***/ ((module) => {
|
|
|
|
module.exports = addHook;
|
|
|
|
function addHook(state, kind, name, hook) {
|
|
var orig = hook;
|
|
if (!state.registry[name]) {
|
|
state.registry[name] = [];
|
|
}
|
|
|
|
if (kind === "before") {
|
|
hook = function (method, options) {
|
|
return Promise.resolve()
|
|
.then(orig.bind(null, options))
|
|
.then(method.bind(null, options));
|
|
};
|
|
}
|
|
|
|
if (kind === "after") {
|
|
hook = function (method, options) {
|
|
var result;
|
|
return Promise.resolve()
|
|
.then(method.bind(null, options))
|
|
.then(function (result_) {
|
|
result = result_;
|
|
return orig(result, options);
|
|
})
|
|
.then(function () {
|
|
return result;
|
|
});
|
|
};
|
|
}
|
|
|
|
if (kind === "error") {
|
|
hook = function (method, options) {
|
|
return Promise.resolve()
|
|
.then(method.bind(null, options))
|
|
.catch(function (error) {
|
|
return orig(error, options);
|
|
});
|
|
};
|
|
}
|
|
|
|
state.registry[name].push({
|
|
hook: hook,
|
|
orig: orig,
|
|
});
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4670:
|
|
/***/ ((module) => {
|
|
|
|
module.exports = register;
|
|
|
|
function register(state, name, method, options) {
|
|
if (typeof method !== "function") {
|
|
throw new Error("method for before hook must be a function");
|
|
}
|
|
|
|
if (!options) {
|
|
options = {};
|
|
}
|
|
|
|
if (Array.isArray(name)) {
|
|
return name.reverse().reduce(function (callback, name) {
|
|
return register.bind(null, state, name, callback, options);
|
|
}, method)();
|
|
}
|
|
|
|
return Promise.resolve().then(function () {
|
|
if (!state.registry[name]) {
|
|
return method(options);
|
|
}
|
|
|
|
return state.registry[name].reduce(function (method, registered) {
|
|
return registered.hook.bind(null, method, options);
|
|
}, method)();
|
|
});
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6819:
|
|
/***/ ((module) => {
|
|
|
|
module.exports = removeHook;
|
|
|
|
function removeHook(state, name, method) {
|
|
if (!state.registry[name]) {
|
|
return;
|
|
}
|
|
|
|
var index = state.registry[name]
|
|
.map(function (registered) {
|
|
return registered.orig;
|
|
})
|
|
.indexOf(method);
|
|
|
|
if (index === -1) {
|
|
return;
|
|
}
|
|
|
|
state.registry[name].splice(index, 1);
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1174:
|
|
/***/ (function(module) {
|
|
|
|
/**
|
|
* This file contains the Bottleneck library (MIT), compiled to ES2017, and without Clustering support.
|
|
* https://github.com/SGrondin/bottleneck
|
|
*/
|
|
(function (global, factory) {
|
|
true ? module.exports = factory() :
|
|
0;
|
|
}(this, (function () { 'use strict';
|
|
|
|
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
|
|
|
|
function getCjsExportFromNamespace (n) {
|
|
return n && n['default'] || n;
|
|
}
|
|
|
|
var load = function(received, defaults, onto = {}) {
|
|
var k, ref, v;
|
|
for (k in defaults) {
|
|
v = defaults[k];
|
|
onto[k] = (ref = received[k]) != null ? ref : v;
|
|
}
|
|
return onto;
|
|
};
|
|
|
|
var overwrite = function(received, defaults, onto = {}) {
|
|
var k, v;
|
|
for (k in received) {
|
|
v = received[k];
|
|
if (defaults[k] !== void 0) {
|
|
onto[k] = v;
|
|
}
|
|
}
|
|
return onto;
|
|
};
|
|
|
|
var parser = {
|
|
load: load,
|
|
overwrite: overwrite
|
|
};
|
|
|
|
var DLList;
|
|
|
|
DLList = class DLList {
|
|
constructor(incr, decr) {
|
|
this.incr = incr;
|
|
this.decr = decr;
|
|
this._first = null;
|
|
this._last = null;
|
|
this.length = 0;
|
|
}
|
|
|
|
push(value) {
|
|
var node;
|
|
this.length++;
|
|
if (typeof this.incr === "function") {
|
|
this.incr();
|
|
}
|
|
node = {
|
|
value,
|
|
prev: this._last,
|
|
next: null
|
|
};
|
|
if (this._last != null) {
|
|
this._last.next = node;
|
|
this._last = node;
|
|
} else {
|
|
this._first = this._last = node;
|
|
}
|
|
return void 0;
|
|
}
|
|
|
|
shift() {
|
|
var value;
|
|
if (this._first == null) {
|
|
return;
|
|
} else {
|
|
this.length--;
|
|
if (typeof this.decr === "function") {
|
|
this.decr();
|
|
}
|
|
}
|
|
value = this._first.value;
|
|
if ((this._first = this._first.next) != null) {
|
|
this._first.prev = null;
|
|
} else {
|
|
this._last = null;
|
|
}
|
|
return value;
|
|
}
|
|
|
|
first() {
|
|
if (this._first != null) {
|
|
return this._first.value;
|
|
}
|
|
}
|
|
|
|
getArray() {
|
|
var node, ref, results;
|
|
node = this._first;
|
|
results = [];
|
|
while (node != null) {
|
|
results.push((ref = node, node = node.next, ref.value));
|
|
}
|
|
return results;
|
|
}
|
|
|
|
forEachShift(cb) {
|
|
var node;
|
|
node = this.shift();
|
|
while (node != null) {
|
|
(cb(node), node = this.shift());
|
|
}
|
|
return void 0;
|
|
}
|
|
|
|
debug() {
|
|
var node, ref, ref1, ref2, results;
|
|
node = this._first;
|
|
results = [];
|
|
while (node != null) {
|
|
results.push((ref = node, node = node.next, {
|
|
value: ref.value,
|
|
prev: (ref1 = ref.prev) != null ? ref1.value : void 0,
|
|
next: (ref2 = ref.next) != null ? ref2.value : void 0
|
|
}));
|
|
}
|
|
return results;
|
|
}
|
|
|
|
};
|
|
|
|
var DLList_1 = DLList;
|
|
|
|
var Events;
|
|
|
|
Events = class Events {
|
|
constructor(instance) {
|
|
this.instance = instance;
|
|
this._events = {};
|
|
if ((this.instance.on != null) || (this.instance.once != null) || (this.instance.removeAllListeners != null)) {
|
|
throw new Error("An Emitter already exists for this object");
|
|
}
|
|
this.instance.on = (name, cb) => {
|
|
return this._addListener(name, "many", cb);
|
|
};
|
|
this.instance.once = (name, cb) => {
|
|
return this._addListener(name, "once", cb);
|
|
};
|
|
this.instance.removeAllListeners = (name = null) => {
|
|
if (name != null) {
|
|
return delete this._events[name];
|
|
} else {
|
|
return this._events = {};
|
|
}
|
|
};
|
|
}
|
|
|
|
_addListener(name, status, cb) {
|
|
var base;
|
|
if ((base = this._events)[name] == null) {
|
|
base[name] = [];
|
|
}
|
|
this._events[name].push({cb, status});
|
|
return this.instance;
|
|
}
|
|
|
|
listenerCount(name) {
|
|
if (this._events[name] != null) {
|
|
return this._events[name].length;
|
|
} else {
|
|
return 0;
|
|
}
|
|
}
|
|
|
|
async trigger(name, ...args) {
|
|
var e, promises;
|
|
try {
|
|
if (name !== "debug") {
|
|
this.trigger("debug", `Event triggered: ${name}`, args);
|
|
}
|
|
if (this._events[name] == null) {
|
|
return;
|
|
}
|
|
this._events[name] = this._events[name].filter(function(listener) {
|
|
return listener.status !== "none";
|
|
});
|
|
promises = this._events[name].map(async(listener) => {
|
|
var e, returned;
|
|
if (listener.status === "none") {
|
|
return;
|
|
}
|
|
if (listener.status === "once") {
|
|
listener.status = "none";
|
|
}
|
|
try {
|
|
returned = typeof listener.cb === "function" ? listener.cb(...args) : void 0;
|
|
if (typeof (returned != null ? returned.then : void 0) === "function") {
|
|
return (await returned);
|
|
} else {
|
|
return returned;
|
|
}
|
|
} catch (error) {
|
|
e = error;
|
|
{
|
|
this.trigger("error", e);
|
|
}
|
|
return null;
|
|
}
|
|
});
|
|
return ((await Promise.all(promises))).find(function(x) {
|
|
return x != null;
|
|
});
|
|
} catch (error) {
|
|
e = error;
|
|
{
|
|
this.trigger("error", e);
|
|
}
|
|
return null;
|
|
}
|
|
}
|
|
|
|
};
|
|
|
|
var Events_1 = Events;
|
|
|
|
var DLList$1, Events$1, Queues;
|
|
|
|
DLList$1 = DLList_1;
|
|
|
|
Events$1 = Events_1;
|
|
|
|
Queues = class Queues {
|
|
constructor(num_priorities) {
|
|
var i;
|
|
this.Events = new Events$1(this);
|
|
this._length = 0;
|
|
this._lists = (function() {
|
|
var j, ref, results;
|
|
results = [];
|
|
for (i = j = 1, ref = num_priorities; (1 <= ref ? j <= ref : j >= ref); i = 1 <= ref ? ++j : --j) {
|
|
results.push(new DLList$1((() => {
|
|
return this.incr();
|
|
}), (() => {
|
|
return this.decr();
|
|
})));
|
|
}
|
|
return results;
|
|
}).call(this);
|
|
}
|
|
|
|
incr() {
|
|
if (this._length++ === 0) {
|
|
return this.Events.trigger("leftzero");
|
|
}
|
|
}
|
|
|
|
decr() {
|
|
if (--this._length === 0) {
|
|
return this.Events.trigger("zero");
|
|
}
|
|
}
|
|
|
|
push(job) {
|
|
return this._lists[job.options.priority].push(job);
|
|
}
|
|
|
|
queued(priority) {
|
|
if (priority != null) {
|
|
return this._lists[priority].length;
|
|
} else {
|
|
return this._length;
|
|
}
|
|
}
|
|
|
|
shiftAll(fn) {
|
|
return this._lists.forEach(function(list) {
|
|
return list.forEachShift(fn);
|
|
});
|
|
}
|
|
|
|
getFirst(arr = this._lists) {
|
|
var j, len, list;
|
|
for (j = 0, len = arr.length; j < len; j++) {
|
|
list = arr[j];
|
|
if (list.length > 0) {
|
|
return list;
|
|
}
|
|
}
|
|
return [];
|
|
}
|
|
|
|
shiftLastFrom(priority) {
|
|
return this.getFirst(this._lists.slice(priority).reverse()).shift();
|
|
}
|
|
|
|
};
|
|
|
|
var Queues_1 = Queues;
|
|
|
|
var BottleneckError;
|
|
|
|
BottleneckError = class BottleneckError extends Error {};
|
|
|
|
var BottleneckError_1 = BottleneckError;
|
|
|
|
var BottleneckError$1, DEFAULT_PRIORITY, Job, NUM_PRIORITIES, parser$1;
|
|
|
|
NUM_PRIORITIES = 10;
|
|
|
|
DEFAULT_PRIORITY = 5;
|
|
|
|
parser$1 = parser;
|
|
|
|
BottleneckError$1 = BottleneckError_1;
|
|
|
|
Job = class Job {
|
|
constructor(task, args, options, jobDefaults, rejectOnDrop, Events, _states, Promise) {
|
|
this.task = task;
|
|
this.args = args;
|
|
this.rejectOnDrop = rejectOnDrop;
|
|
this.Events = Events;
|
|
this._states = _states;
|
|
this.Promise = Promise;
|
|
this.options = parser$1.load(options, jobDefaults);
|
|
this.options.priority = this._sanitizePriority(this.options.priority);
|
|
if (this.options.id === jobDefaults.id) {
|
|
this.options.id = `${this.options.id}-${this._randomIndex()}`;
|
|
}
|
|
this.promise = new this.Promise((_resolve, _reject) => {
|
|
this._resolve = _resolve;
|
|
this._reject = _reject;
|
|
});
|
|
this.retryCount = 0;
|
|
}
|
|
|
|
_sanitizePriority(priority) {
|
|
var sProperty;
|
|
sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority;
|
|
if (sProperty < 0) {
|
|
return 0;
|
|
} else if (sProperty > NUM_PRIORITIES - 1) {
|
|
return NUM_PRIORITIES - 1;
|
|
} else {
|
|
return sProperty;
|
|
}
|
|
}
|
|
|
|
_randomIndex() {
|
|
return Math.random().toString(36).slice(2);
|
|
}
|
|
|
|
doDrop({error, message = "This job has been dropped by Bottleneck"} = {}) {
|
|
if (this._states.remove(this.options.id)) {
|
|
if (this.rejectOnDrop) {
|
|
this._reject(error != null ? error : new BottleneckError$1(message));
|
|
}
|
|
this.Events.trigger("dropped", {args: this.args, options: this.options, task: this.task, promise: this.promise});
|
|
return true;
|
|
} else {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
_assertStatus(expected) {
|
|
var status;
|
|
status = this._states.jobStatus(this.options.id);
|
|
if (!(status === expected || (expected === "DONE" && status === null))) {
|
|
throw new BottleneckError$1(`Invalid job status ${status}, expected ${expected}. Please open an issue at https://github.com/SGrondin/bottleneck/issues`);
|
|
}
|
|
}
|
|
|
|
doReceive() {
|
|
this._states.start(this.options.id);
|
|
return this.Events.trigger("received", {args: this.args, options: this.options});
|
|
}
|
|
|
|
doQueue(reachedHWM, blocked) {
|
|
this._assertStatus("RECEIVED");
|
|
this._states.next(this.options.id);
|
|
return this.Events.trigger("queued", {args: this.args, options: this.options, reachedHWM, blocked});
|
|
}
|
|
|
|
doRun() {
|
|
if (this.retryCount === 0) {
|
|
this._assertStatus("QUEUED");
|
|
this._states.next(this.options.id);
|
|
} else {
|
|
this._assertStatus("EXECUTING");
|
|
}
|
|
return this.Events.trigger("scheduled", {args: this.args, options: this.options});
|
|
}
|
|
|
|
async doExecute(chained, clearGlobalState, run, free) {
|
|
var error, eventInfo, passed;
|
|
if (this.retryCount === 0) {
|
|
this._assertStatus("RUNNING");
|
|
this._states.next(this.options.id);
|
|
} else {
|
|
this._assertStatus("EXECUTING");
|
|
}
|
|
eventInfo = {args: this.args, options: this.options, retryCount: this.retryCount};
|
|
this.Events.trigger("executing", eventInfo);
|
|
try {
|
|
passed = (await (chained != null ? chained.schedule(this.options, this.task, ...this.args) : this.task(...this.args)));
|
|
if (clearGlobalState()) {
|
|
this.doDone(eventInfo);
|
|
await free(this.options, eventInfo);
|
|
this._assertStatus("DONE");
|
|
return this._resolve(passed);
|
|
}
|
|
} catch (error1) {
|
|
error = error1;
|
|
return this._onFailure(error, eventInfo, clearGlobalState, run, free);
|
|
}
|
|
}
|
|
|
|
doExpire(clearGlobalState, run, free) {
|
|
var error, eventInfo;
|
|
if (this._states.jobStatus(this.options.id === "RUNNING")) {
|
|
this._states.next(this.options.id);
|
|
}
|
|
this._assertStatus("EXECUTING");
|
|
eventInfo = {args: this.args, options: this.options, retryCount: this.retryCount};
|
|
error = new BottleneckError$1(`This job timed out after ${this.options.expiration} ms.`);
|
|
return this._onFailure(error, eventInfo, clearGlobalState, run, free);
|
|
}
|
|
|
|
async _onFailure(error, eventInfo, clearGlobalState, run, free) {
|
|
var retry, retryAfter;
|
|
if (clearGlobalState()) {
|
|
retry = (await this.Events.trigger("failed", error, eventInfo));
|
|
if (retry != null) {
|
|
retryAfter = ~~retry;
|
|
this.Events.trigger("retry", `Retrying ${this.options.id} after ${retryAfter} ms`, eventInfo);
|
|
this.retryCount++;
|
|
return run(retryAfter);
|
|
} else {
|
|
this.doDone(eventInfo);
|
|
await free(this.options, eventInfo);
|
|
this._assertStatus("DONE");
|
|
return this._reject(error);
|
|
}
|
|
}
|
|
}
|
|
|
|
doDone(eventInfo) {
|
|
this._assertStatus("EXECUTING");
|
|
this._states.next(this.options.id);
|
|
return this.Events.trigger("done", eventInfo);
|
|
}
|
|
|
|
};
|
|
|
|
var Job_1 = Job;
|
|
|
|
var BottleneckError$2, LocalDatastore, parser$2;
|
|
|
|
parser$2 = parser;
|
|
|
|
BottleneckError$2 = BottleneckError_1;
|
|
|
|
LocalDatastore = class LocalDatastore {
|
|
constructor(instance, storeOptions, storeInstanceOptions) {
|
|
this.instance = instance;
|
|
this.storeOptions = storeOptions;
|
|
this.clientId = this.instance._randomIndex();
|
|
parser$2.load(storeInstanceOptions, storeInstanceOptions, this);
|
|
this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now();
|
|
this._running = 0;
|
|
this._done = 0;
|
|
this._unblockTime = 0;
|
|
this.ready = this.Promise.resolve();
|
|
this.clients = {};
|
|
this._startHeartbeat();
|
|
}
|
|
|
|
_startHeartbeat() {
|
|
var base;
|
|
if ((this.heartbeat == null) && (((this.storeOptions.reservoirRefreshInterval != null) && (this.storeOptions.reservoirRefreshAmount != null)) || ((this.storeOptions.reservoirIncreaseInterval != null) && (this.storeOptions.reservoirIncreaseAmount != null)))) {
|
|
return typeof (base = (this.heartbeat = setInterval(() => {
|
|
var amount, incr, maximum, now, reservoir;
|
|
now = Date.now();
|
|
if ((this.storeOptions.reservoirRefreshInterval != null) && now >= this._lastReservoirRefresh + this.storeOptions.reservoirRefreshInterval) {
|
|
this._lastReservoirRefresh = now;
|
|
this.storeOptions.reservoir = this.storeOptions.reservoirRefreshAmount;
|
|
this.instance._drainAll(this.computeCapacity());
|
|
}
|
|
if ((this.storeOptions.reservoirIncreaseInterval != null) && now >= this._lastReservoirIncrease + this.storeOptions.reservoirIncreaseInterval) {
|
|
({
|
|
reservoirIncreaseAmount: amount,
|
|
reservoirIncreaseMaximum: maximum,
|
|
reservoir
|
|
} = this.storeOptions);
|
|
this._lastReservoirIncrease = now;
|
|
incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount;
|
|
if (incr > 0) {
|
|
this.storeOptions.reservoir += incr;
|
|
return this.instance._drainAll(this.computeCapacity());
|
|
}
|
|
}
|
|
}, this.heartbeatInterval))).unref === "function" ? base.unref() : void 0;
|
|
} else {
|
|
return clearInterval(this.heartbeat);
|
|
}
|
|
}
|
|
|
|
async __publish__(message) {
|
|
await this.yieldLoop();
|
|
return this.instance.Events.trigger("message", message.toString());
|
|
}
|
|
|
|
async __disconnect__(flush) {
|
|
await this.yieldLoop();
|
|
clearInterval(this.heartbeat);
|
|
return this.Promise.resolve();
|
|
}
|
|
|
|
yieldLoop(t = 0) {
|
|
return new this.Promise(function(resolve, reject) {
|
|
return setTimeout(resolve, t);
|
|
});
|
|
}
|
|
|
|
computePenalty() {
|
|
var ref;
|
|
return (ref = this.storeOptions.penalty) != null ? ref : (15 * this.storeOptions.minTime) || 5000;
|
|
}
|
|
|
|
async __updateSettings__(options) {
|
|
await this.yieldLoop();
|
|
parser$2.overwrite(options, options, this.storeOptions);
|
|
this._startHeartbeat();
|
|
this.instance._drainAll(this.computeCapacity());
|
|
return true;
|
|
}
|
|
|
|
async __running__() {
|
|
await this.yieldLoop();
|
|
return this._running;
|
|
}
|
|
|
|
async __queued__() {
|
|
await this.yieldLoop();
|
|
return this.instance.queued();
|
|
}
|
|
|
|
async __done__() {
|
|
await this.yieldLoop();
|
|
return this._done;
|
|
}
|
|
|
|
async __groupCheck__(time) {
|
|
await this.yieldLoop();
|
|
return (this._nextRequest + this.timeout) < time;
|
|
}
|
|
|
|
computeCapacity() {
|
|
var maxConcurrent, reservoir;
|
|
({maxConcurrent, reservoir} = this.storeOptions);
|
|
if ((maxConcurrent != null) && (reservoir != null)) {
|
|
return Math.min(maxConcurrent - this._running, reservoir);
|
|
} else if (maxConcurrent != null) {
|
|
return maxConcurrent - this._running;
|
|
} else if (reservoir != null) {
|
|
return reservoir;
|
|
} else {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
conditionsCheck(weight) {
|
|
var capacity;
|
|
capacity = this.computeCapacity();
|
|
return (capacity == null) || weight <= capacity;
|
|
}
|
|
|
|
async __incrementReservoir__(incr) {
|
|
var reservoir;
|
|
await this.yieldLoop();
|
|
reservoir = this.storeOptions.reservoir += incr;
|
|
this.instance._drainAll(this.computeCapacity());
|
|
return reservoir;
|
|
}
|
|
|
|
async __currentReservoir__() {
|
|
await this.yieldLoop();
|
|
return this.storeOptions.reservoir;
|
|
}
|
|
|
|
isBlocked(now) {
|
|
return this._unblockTime >= now;
|
|
}
|
|
|
|
check(weight, now) {
|
|
return this.conditionsCheck(weight) && (this._nextRequest - now) <= 0;
|
|
}
|
|
|
|
async __check__(weight) {
|
|
var now;
|
|
await this.yieldLoop();
|
|
now = Date.now();
|
|
return this.check(weight, now);
|
|
}
|
|
|
|
async __register__(index, weight, expiration) {
|
|
var now, wait;
|
|
await this.yieldLoop();
|
|
now = Date.now();
|
|
if (this.conditionsCheck(weight)) {
|
|
this._running += weight;
|
|
if (this.storeOptions.reservoir != null) {
|
|
this.storeOptions.reservoir -= weight;
|
|
}
|
|
wait = Math.max(this._nextRequest - now, 0);
|
|
this._nextRequest = now + wait + this.storeOptions.minTime;
|
|
return {
|
|
success: true,
|
|
wait,
|
|
reservoir: this.storeOptions.reservoir
|
|
};
|
|
} else {
|
|
return {
|
|
success: false
|
|
};
|
|
}
|
|
}
|
|
|
|
strategyIsBlock() {
|
|
return this.storeOptions.strategy === 3;
|
|
}
|
|
|
|
async __submit__(queueLength, weight) {
|
|
var blocked, now, reachedHWM;
|
|
await this.yieldLoop();
|
|
if ((this.storeOptions.maxConcurrent != null) && weight > this.storeOptions.maxConcurrent) {
|
|
throw new BottleneckError$2(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${this.storeOptions.maxConcurrent}`);
|
|
}
|
|
now = Date.now();
|
|
reachedHWM = (this.storeOptions.highWater != null) && queueLength === this.storeOptions.highWater && !this.check(weight, now);
|
|
blocked = this.strategyIsBlock() && (reachedHWM || this.isBlocked(now));
|
|
if (blocked) {
|
|
this._unblockTime = now + this.computePenalty();
|
|
this._nextRequest = this._unblockTime + this.storeOptions.minTime;
|
|
this.instance._dropAllQueued();
|
|
}
|
|
return {
|
|
reachedHWM,
|
|
blocked,
|
|
strategy: this.storeOptions.strategy
|
|
};
|
|
}
|
|
|
|
async __free__(index, weight) {
|
|
await this.yieldLoop();
|
|
this._running -= weight;
|
|
this._done += weight;
|
|
this.instance._drainAll(this.computeCapacity());
|
|
return {
|
|
running: this._running
|
|
};
|
|
}
|
|
|
|
};
|
|
|
|
var LocalDatastore_1 = LocalDatastore;
|
|
|
|
var BottleneckError$3, States;
|
|
|
|
BottleneckError$3 = BottleneckError_1;
|
|
|
|
States = class States {
|
|
constructor(status1) {
|
|
this.status = status1;
|
|
this._jobs = {};
|
|
this.counts = this.status.map(function() {
|
|
return 0;
|
|
});
|
|
}
|
|
|
|
next(id) {
|
|
var current, next;
|
|
current = this._jobs[id];
|
|
next = current + 1;
|
|
if ((current != null) && next < this.status.length) {
|
|
this.counts[current]--;
|
|
this.counts[next]++;
|
|
return this._jobs[id]++;
|
|
} else if (current != null) {
|
|
this.counts[current]--;
|
|
return delete this._jobs[id];
|
|
}
|
|
}
|
|
|
|
start(id) {
|
|
var initial;
|
|
initial = 0;
|
|
this._jobs[id] = initial;
|
|
return this.counts[initial]++;
|
|
}
|
|
|
|
remove(id) {
|
|
var current;
|
|
current = this._jobs[id];
|
|
if (current != null) {
|
|
this.counts[current]--;
|
|
delete this._jobs[id];
|
|
}
|
|
return current != null;
|
|
}
|
|
|
|
jobStatus(id) {
|
|
var ref;
|
|
return (ref = this.status[this._jobs[id]]) != null ? ref : null;
|
|
}
|
|
|
|
statusJobs(status) {
|
|
var k, pos, ref, results, v;
|
|
if (status != null) {
|
|
pos = this.status.indexOf(status);
|
|
if (pos < 0) {
|
|
throw new BottleneckError$3(`status must be one of ${this.status.join(', ')}`);
|
|
}
|
|
ref = this._jobs;
|
|
results = [];
|
|
for (k in ref) {
|
|
v = ref[k];
|
|
if (v === pos) {
|
|
results.push(k);
|
|
}
|
|
}
|
|
return results;
|
|
} else {
|
|
return Object.keys(this._jobs);
|
|
}
|
|
}
|
|
|
|
statusCounts() {
|
|
return this.counts.reduce(((acc, v, i) => {
|
|
acc[this.status[i]] = v;
|
|
return acc;
|
|
}), {});
|
|
}
|
|
|
|
};
|
|
|
|
var States_1 = States;
|
|
|
|
var DLList$2, Sync;
|
|
|
|
DLList$2 = DLList_1;
|
|
|
|
Sync = class Sync {
|
|
constructor(name, Promise) {
|
|
this.schedule = this.schedule.bind(this);
|
|
this.name = name;
|
|
this.Promise = Promise;
|
|
this._running = 0;
|
|
this._queue = new DLList$2();
|
|
}
|
|
|
|
isEmpty() {
|
|
return this._queue.length === 0;
|
|
}
|
|
|
|
async _tryToRun() {
|
|
var args, cb, error, reject, resolve, returned, task;
|
|
if ((this._running < 1) && this._queue.length > 0) {
|
|
this._running++;
|
|
({task, args, resolve, reject} = this._queue.shift());
|
|
cb = (await (async function() {
|
|
try {
|
|
returned = (await task(...args));
|
|
return function() {
|
|
return resolve(returned);
|
|
};
|
|
} catch (error1) {
|
|
error = error1;
|
|
return function() {
|
|
return reject(error);
|
|
};
|
|
}
|
|
})());
|
|
this._running--;
|
|
this._tryToRun();
|
|
return cb();
|
|
}
|
|
}
|
|
|
|
schedule(task, ...args) {
|
|
var promise, reject, resolve;
|
|
resolve = reject = null;
|
|
promise = new this.Promise(function(_resolve, _reject) {
|
|
resolve = _resolve;
|
|
return reject = _reject;
|
|
});
|
|
this._queue.push({task, args, resolve, reject});
|
|
this._tryToRun();
|
|
return promise;
|
|
}
|
|
|
|
};
|
|
|
|
var Sync_1 = Sync;
|
|
|
|
var version = "2.19.5";
|
|
var version$1 = {
|
|
version: version
|
|
};
|
|
|
|
var version$2 = /*#__PURE__*/Object.freeze({
|
|
version: version,
|
|
default: version$1
|
|
});
|
|
|
|
var require$$2 = () => console.log('You must import the full version of Bottleneck in order to use this feature.');
|
|
|
|
var require$$3 = () => console.log('You must import the full version of Bottleneck in order to use this feature.');
|
|
|
|
var require$$4 = () => console.log('You must import the full version of Bottleneck in order to use this feature.');
|
|
|
|
var Events$2, Group, IORedisConnection$1, RedisConnection$1, Scripts$1, parser$3;
|
|
|
|
parser$3 = parser;
|
|
|
|
Events$2 = Events_1;
|
|
|
|
RedisConnection$1 = require$$2;
|
|
|
|
IORedisConnection$1 = require$$3;
|
|
|
|
Scripts$1 = require$$4;
|
|
|
|
Group = (function() {
|
|
class Group {
|
|
constructor(limiterOptions = {}) {
|
|
this.deleteKey = this.deleteKey.bind(this);
|
|
this.limiterOptions = limiterOptions;
|
|
parser$3.load(this.limiterOptions, this.defaults, this);
|
|
this.Events = new Events$2(this);
|
|
this.instances = {};
|
|
this.Bottleneck = Bottleneck_1;
|
|
this._startAutoCleanup();
|
|
this.sharedConnection = this.connection != null;
|
|
if (this.connection == null) {
|
|
if (this.limiterOptions.datastore === "redis") {
|
|
this.connection = new RedisConnection$1(Object.assign({}, this.limiterOptions, {Events: this.Events}));
|
|
} else if (this.limiterOptions.datastore === "ioredis") {
|
|
this.connection = new IORedisConnection$1(Object.assign({}, this.limiterOptions, {Events: this.Events}));
|
|
}
|
|
}
|
|
}
|
|
|
|
key(key = "") {
|
|
var ref;
|
|
return (ref = this.instances[key]) != null ? ref : (() => {
|
|
var limiter;
|
|
limiter = this.instances[key] = new this.Bottleneck(Object.assign(this.limiterOptions, {
|
|
id: `${this.id}-${key}`,
|
|
timeout: this.timeout,
|
|
connection: this.connection
|
|
}));
|
|
this.Events.trigger("created", limiter, key);
|
|
return limiter;
|
|
})();
|
|
}
|
|
|
|
async deleteKey(key = "") {
|
|
var deleted, instance;
|
|
instance = this.instances[key];
|
|
if (this.connection) {
|
|
deleted = (await this.connection.__runCommand__(['del', ...Scripts$1.allKeys(`${this.id}-${key}`)]));
|
|
}
|
|
if (instance != null) {
|
|
delete this.instances[key];
|
|
await instance.disconnect();
|
|
}
|
|
return (instance != null) || deleted > 0;
|
|
}
|
|
|
|
limiters() {
|
|
var k, ref, results, v;
|
|
ref = this.instances;
|
|
results = [];
|
|
for (k in ref) {
|
|
v = ref[k];
|
|
results.push({
|
|
key: k,
|
|
limiter: v
|
|
});
|
|
}
|
|
return results;
|
|
}
|
|
|
|
keys() {
|
|
return Object.keys(this.instances);
|
|
}
|
|
|
|
async clusterKeys() {
|
|
var cursor, end, found, i, k, keys, len, next, start;
|
|
if (this.connection == null) {
|
|
return this.Promise.resolve(this.keys());
|
|
}
|
|
keys = [];
|
|
cursor = null;
|
|
start = `b_${this.id}-`.length;
|
|
end = "_settings".length;
|
|
while (cursor !== 0) {
|
|
[next, found] = (await this.connection.__runCommand__(["scan", cursor != null ? cursor : 0, "match", `b_${this.id}-*_settings`, "count", 10000]));
|
|
cursor = ~~next;
|
|
for (i = 0, len = found.length; i < len; i++) {
|
|
k = found[i];
|
|
keys.push(k.slice(start, -end));
|
|
}
|
|
}
|
|
return keys;
|
|
}
|
|
|
|
_startAutoCleanup() {
|
|
var base;
|
|
clearInterval(this.interval);
|
|
return typeof (base = (this.interval = setInterval(async() => {
|
|
var e, k, ref, results, time, v;
|
|
time = Date.now();
|
|
ref = this.instances;
|
|
results = [];
|
|
for (k in ref) {
|
|
v = ref[k];
|
|
try {
|
|
if ((await v._store.__groupCheck__(time))) {
|
|
results.push(this.deleteKey(k));
|
|
} else {
|
|
results.push(void 0);
|
|
}
|
|
} catch (error) {
|
|
e = error;
|
|
results.push(v.Events.trigger("error", e));
|
|
}
|
|
}
|
|
return results;
|
|
}, this.timeout / 2))).unref === "function" ? base.unref() : void 0;
|
|
}
|
|
|
|
updateSettings(options = {}) {
|
|
parser$3.overwrite(options, this.defaults, this);
|
|
parser$3.overwrite(options, options, this.limiterOptions);
|
|
if (options.timeout != null) {
|
|
return this._startAutoCleanup();
|
|
}
|
|
}
|
|
|
|
disconnect(flush = true) {
|
|
var ref;
|
|
if (!this.sharedConnection) {
|
|
return (ref = this.connection) != null ? ref.disconnect(flush) : void 0;
|
|
}
|
|
}
|
|
|
|
}
|
|
Group.prototype.defaults = {
|
|
timeout: 1000 * 60 * 5,
|
|
connection: null,
|
|
Promise: Promise,
|
|
id: "group-key"
|
|
};
|
|
|
|
return Group;
|
|
|
|
}).call(commonjsGlobal);
|
|
|
|
var Group_1 = Group;
|
|
|
|
var Batcher, Events$3, parser$4;
|
|
|
|
parser$4 = parser;
|
|
|
|
Events$3 = Events_1;
|
|
|
|
Batcher = (function() {
|
|
class Batcher {
|
|
constructor(options = {}) {
|
|
this.options = options;
|
|
parser$4.load(this.options, this.defaults, this);
|
|
this.Events = new Events$3(this);
|
|
this._arr = [];
|
|
this._resetPromise();
|
|
this._lastFlush = Date.now();
|
|
}
|
|
|
|
_resetPromise() {
|
|
return this._promise = new this.Promise((res, rej) => {
|
|
return this._resolve = res;
|
|
});
|
|
}
|
|
|
|
_flush() {
|
|
clearTimeout(this._timeout);
|
|
this._lastFlush = Date.now();
|
|
this._resolve();
|
|
this.Events.trigger("batch", this._arr);
|
|
this._arr = [];
|
|
return this._resetPromise();
|
|
}
|
|
|
|
add(data) {
|
|
var ret;
|
|
this._arr.push(data);
|
|
ret = this._promise;
|
|
if (this._arr.length === this.maxSize) {
|
|
this._flush();
|
|
} else if ((this.maxTime != null) && this._arr.length === 1) {
|
|
this._timeout = setTimeout(() => {
|
|
return this._flush();
|
|
}, this.maxTime);
|
|
}
|
|
return ret;
|
|
}
|
|
|
|
}
|
|
Batcher.prototype.defaults = {
|
|
maxTime: null,
|
|
maxSize: null,
|
|
Promise: Promise
|
|
};
|
|
|
|
return Batcher;
|
|
|
|
}).call(commonjsGlobal);
|
|
|
|
var Batcher_1 = Batcher;
|
|
|
|
var require$$4$1 = () => console.log('You must import the full version of Bottleneck in order to use this feature.');
|
|
|
|
var require$$8 = getCjsExportFromNamespace(version$2);
|
|
|
|
var Bottleneck, DEFAULT_PRIORITY$1, Events$4, Job$1, LocalDatastore$1, NUM_PRIORITIES$1, Queues$1, RedisDatastore$1, States$1, Sync$1, parser$5,
|
|
splice = [].splice;
|
|
|
|
NUM_PRIORITIES$1 = 10;
|
|
|
|
DEFAULT_PRIORITY$1 = 5;
|
|
|
|
parser$5 = parser;
|
|
|
|
Queues$1 = Queues_1;
|
|
|
|
Job$1 = Job_1;
|
|
|
|
LocalDatastore$1 = LocalDatastore_1;
|
|
|
|
RedisDatastore$1 = require$$4$1;
|
|
|
|
Events$4 = Events_1;
|
|
|
|
States$1 = States_1;
|
|
|
|
Sync$1 = Sync_1;
|
|
|
|
Bottleneck = (function() {
|
|
class Bottleneck {
|
|
constructor(options = {}, ...invalid) {
|
|
var storeInstanceOptions, storeOptions;
|
|
this._addToQueue = this._addToQueue.bind(this);
|
|
this._validateOptions(options, invalid);
|
|
parser$5.load(options, this.instanceDefaults, this);
|
|
this._queues = new Queues$1(NUM_PRIORITIES$1);
|
|
this._scheduled = {};
|
|
this._states = new States$1(["RECEIVED", "QUEUED", "RUNNING", "EXECUTING"].concat(this.trackDoneStatus ? ["DONE"] : []));
|
|
this._limiter = null;
|
|
this.Events = new Events$4(this);
|
|
this._submitLock = new Sync$1("submit", this.Promise);
|
|
this._registerLock = new Sync$1("register", this.Promise);
|
|
storeOptions = parser$5.load(options, this.storeDefaults, {});
|
|
this._store = (function() {
|
|
if (this.datastore === "redis" || this.datastore === "ioredis" || (this.connection != null)) {
|
|
storeInstanceOptions = parser$5.load(options, this.redisStoreDefaults, {});
|
|
return new RedisDatastore$1(this, storeOptions, storeInstanceOptions);
|
|
} else if (this.datastore === "local") {
|
|
storeInstanceOptions = parser$5.load(options, this.localStoreDefaults, {});
|
|
return new LocalDatastore$1(this, storeOptions, storeInstanceOptions);
|
|
} else {
|
|
throw new Bottleneck.prototype.BottleneckError(`Invalid datastore type: ${this.datastore}`);
|
|
}
|
|
}).call(this);
|
|
this._queues.on("leftzero", () => {
|
|
var ref;
|
|
return (ref = this._store.heartbeat) != null ? typeof ref.ref === "function" ? ref.ref() : void 0 : void 0;
|
|
});
|
|
this._queues.on("zero", () => {
|
|
var ref;
|
|
return (ref = this._store.heartbeat) != null ? typeof ref.unref === "function" ? ref.unref() : void 0 : void 0;
|
|
});
|
|
}
|
|
|
|
_validateOptions(options, invalid) {
|
|
if (!((options != null) && typeof options === "object" && invalid.length === 0)) {
|
|
throw new Bottleneck.prototype.BottleneckError("Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1.");
|
|
}
|
|
}
|
|
|
|
ready() {
|
|
return this._store.ready;
|
|
}
|
|
|
|
clients() {
|
|
return this._store.clients;
|
|
}
|
|
|
|
channel() {
|
|
return `b_${this.id}`;
|
|
}
|
|
|
|
channel_client() {
|
|
return `b_${this.id}_${this._store.clientId}`;
|
|
}
|
|
|
|
publish(message) {
|
|
return this._store.__publish__(message);
|
|
}
|
|
|
|
disconnect(flush = true) {
|
|
return this._store.__disconnect__(flush);
|
|
}
|
|
|
|
chain(_limiter) {
|
|
this._limiter = _limiter;
|
|
return this;
|
|
}
|
|
|
|
queued(priority) {
|
|
return this._queues.queued(priority);
|
|
}
|
|
|
|
clusterQueued() {
|
|
return this._store.__queued__();
|
|
}
|
|
|
|
empty() {
|
|
return this.queued() === 0 && this._submitLock.isEmpty();
|
|
}
|
|
|
|
running() {
|
|
return this._store.__running__();
|
|
}
|
|
|
|
done() {
|
|
return this._store.__done__();
|
|
}
|
|
|
|
jobStatus(id) {
|
|
return this._states.jobStatus(id);
|
|
}
|
|
|
|
jobs(status) {
|
|
return this._states.statusJobs(status);
|
|
}
|
|
|
|
counts() {
|
|
return this._states.statusCounts();
|
|
}
|
|
|
|
_randomIndex() {
|
|
return Math.random().toString(36).slice(2);
|
|
}
|
|
|
|
check(weight = 1) {
|
|
return this._store.__check__(weight);
|
|
}
|
|
|
|
_clearGlobalState(index) {
|
|
if (this._scheduled[index] != null) {
|
|
clearTimeout(this._scheduled[index].expiration);
|
|
delete this._scheduled[index];
|
|
return true;
|
|
} else {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
async _free(index, job, options, eventInfo) {
|
|
var e, running;
|
|
try {
|
|
({running} = (await this._store.__free__(index, options.weight)));
|
|
this.Events.trigger("debug", `Freed ${options.id}`, eventInfo);
|
|
if (running === 0 && this.empty()) {
|
|
return this.Events.trigger("idle");
|
|
}
|
|
} catch (error1) {
|
|
e = error1;
|
|
return this.Events.trigger("error", e);
|
|
}
|
|
}
|
|
|
|
_run(index, job, wait) {
|
|
var clearGlobalState, free, run;
|
|
job.doRun();
|
|
clearGlobalState = this._clearGlobalState.bind(this, index);
|
|
run = this._run.bind(this, index, job);
|
|
free = this._free.bind(this, index, job);
|
|
return this._scheduled[index] = {
|
|
timeout: setTimeout(() => {
|
|
return job.doExecute(this._limiter, clearGlobalState, run, free);
|
|
}, wait),
|
|
expiration: job.options.expiration != null ? setTimeout(function() {
|
|
return job.doExpire(clearGlobalState, run, free);
|
|
}, wait + job.options.expiration) : void 0,
|
|
job: job
|
|
};
|
|
}
|
|
|
|
_drainOne(capacity) {
|
|
return this._registerLock.schedule(() => {
|
|
var args, index, next, options, queue;
|
|
if (this.queued() === 0) {
|
|
return this.Promise.resolve(null);
|
|
}
|
|
queue = this._queues.getFirst();
|
|
({options, args} = next = queue.first());
|
|
if ((capacity != null) && options.weight > capacity) {
|
|
return this.Promise.resolve(null);
|
|
}
|
|
this.Events.trigger("debug", `Draining ${options.id}`, {args, options});
|
|
index = this._randomIndex();
|
|
return this._store.__register__(index, options.weight, options.expiration).then(({success, wait, reservoir}) => {
|
|
var empty;
|
|
this.Events.trigger("debug", `Drained ${options.id}`, {success, args, options});
|
|
if (success) {
|
|
queue.shift();
|
|
empty = this.empty();
|
|
if (empty) {
|
|
this.Events.trigger("empty");
|
|
}
|
|
if (reservoir === 0) {
|
|
this.Events.trigger("depleted", empty);
|
|
}
|
|
this._run(index, next, wait);
|
|
return this.Promise.resolve(options.weight);
|
|
} else {
|
|
return this.Promise.resolve(null);
|
|
}
|
|
});
|
|
});
|
|
}
|
|
|
|
_drainAll(capacity, total = 0) {
|
|
return this._drainOne(capacity).then((drained) => {
|
|
var newCapacity;
|
|
if (drained != null) {
|
|
newCapacity = capacity != null ? capacity - drained : capacity;
|
|
return this._drainAll(newCapacity, total + drained);
|
|
} else {
|
|
return this.Promise.resolve(total);
|
|
}
|
|
}).catch((e) => {
|
|
return this.Events.trigger("error", e);
|
|
});
|
|
}
|
|
|
|
_dropAllQueued(message) {
|
|
return this._queues.shiftAll(function(job) {
|
|
return job.doDrop({message});
|
|
});
|
|
}
|
|
|
|
stop(options = {}) {
|
|
var done, waitForExecuting;
|
|
options = parser$5.load(options, this.stopDefaults);
|
|
waitForExecuting = (at) => {
|
|
var finished;
|
|
finished = () => {
|
|
var counts;
|
|
counts = this._states.counts;
|
|
return (counts[0] + counts[1] + counts[2] + counts[3]) === at;
|
|
};
|
|
return new this.Promise((resolve, reject) => {
|
|
if (finished()) {
|
|
return resolve();
|
|
} else {
|
|
return this.on("done", () => {
|
|
if (finished()) {
|
|
this.removeAllListeners("done");
|
|
return resolve();
|
|
}
|
|
});
|
|
}
|
|
});
|
|
};
|
|
done = options.dropWaitingJobs ? (this._run = function(index, next) {
|
|
return next.doDrop({
|
|
message: options.dropErrorMessage
|
|
});
|
|
}, this._drainOne = () => {
|
|
return this.Promise.resolve(null);
|
|
}, this._registerLock.schedule(() => {
|
|
return this._submitLock.schedule(() => {
|
|
var k, ref, v;
|
|
ref = this._scheduled;
|
|
for (k in ref) {
|
|
v = ref[k];
|
|
if (this.jobStatus(v.job.options.id) === "RUNNING") {
|
|
clearTimeout(v.timeout);
|
|
clearTimeout(v.expiration);
|
|
v.job.doDrop({
|
|
message: options.dropErrorMessage
|
|
});
|
|
}
|
|
}
|
|
this._dropAllQueued(options.dropErrorMessage);
|
|
return waitForExecuting(0);
|
|
});
|
|
})) : this.schedule({
|
|
priority: NUM_PRIORITIES$1 - 1,
|
|
weight: 0
|
|
}, () => {
|
|
return waitForExecuting(1);
|
|
});
|
|
this._receive = function(job) {
|
|
return job._reject(new Bottleneck.prototype.BottleneckError(options.enqueueErrorMessage));
|
|
};
|
|
this.stop = () => {
|
|
return this.Promise.reject(new Bottleneck.prototype.BottleneckError("stop() has already been called"));
|
|
};
|
|
return done;
|
|
}
|
|
|
|
async _addToQueue(job) {
|
|
var args, blocked, error, options, reachedHWM, shifted, strategy;
|
|
({args, options} = job);
|
|
try {
|
|
({reachedHWM, blocked, strategy} = (await this._store.__submit__(this.queued(), options.weight)));
|
|
} catch (error1) {
|
|
error = error1;
|
|
this.Events.trigger("debug", `Could not queue ${options.id}`, {args, options, error});
|
|
job.doDrop({error});
|
|
return false;
|
|
}
|
|
if (blocked) {
|
|
job.doDrop();
|
|
return true;
|
|
} else if (reachedHWM) {
|
|
shifted = strategy === Bottleneck.prototype.strategy.LEAK ? this._queues.shiftLastFrom(options.priority) : strategy === Bottleneck.prototype.strategy.OVERFLOW_PRIORITY ? this._queues.shiftLastFrom(options.priority + 1) : strategy === Bottleneck.prototype.strategy.OVERFLOW ? job : void 0;
|
|
if (shifted != null) {
|
|
shifted.doDrop();
|
|
}
|
|
if ((shifted == null) || strategy === Bottleneck.prototype.strategy.OVERFLOW) {
|
|
if (shifted == null) {
|
|
job.doDrop();
|
|
}
|
|
return reachedHWM;
|
|
}
|
|
}
|
|
job.doQueue(reachedHWM, blocked);
|
|
this._queues.push(job);
|
|
await this._drainAll();
|
|
return reachedHWM;
|
|
}
|
|
|
|
_receive(job) {
|
|
if (this._states.jobStatus(job.options.id) != null) {
|
|
job._reject(new Bottleneck.prototype.BottleneckError(`A job with the same id already exists (id=${job.options.id})`));
|
|
return false;
|
|
} else {
|
|
job.doReceive();
|
|
return this._submitLock.schedule(this._addToQueue, job);
|
|
}
|
|
}
|
|
|
|
submit(...args) {
|
|
var cb, fn, job, options, ref, ref1, task;
|
|
if (typeof args[0] === "function") {
|
|
ref = args, [fn, ...args] = ref, [cb] = splice.call(args, -1);
|
|
options = parser$5.load({}, this.jobDefaults);
|
|
} else {
|
|
ref1 = args, [options, fn, ...args] = ref1, [cb] = splice.call(args, -1);
|
|
options = parser$5.load(options, this.jobDefaults);
|
|
}
|
|
task = (...args) => {
|
|
return new this.Promise(function(resolve, reject) {
|
|
return fn(...args, function(...args) {
|
|
return (args[0] != null ? reject : resolve)(args);
|
|
});
|
|
});
|
|
};
|
|
job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise);
|
|
job.promise.then(function(args) {
|
|
return typeof cb === "function" ? cb(...args) : void 0;
|
|
}).catch(function(args) {
|
|
if (Array.isArray(args)) {
|
|
return typeof cb === "function" ? cb(...args) : void 0;
|
|
} else {
|
|
return typeof cb === "function" ? cb(args) : void 0;
|
|
}
|
|
});
|
|
return this._receive(job);
|
|
}
|
|
|
|
schedule(...args) {
|
|
var job, options, task;
|
|
if (typeof args[0] === "function") {
|
|
[task, ...args] = args;
|
|
options = {};
|
|
} else {
|
|
[options, task, ...args] = args;
|
|
}
|
|
job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise);
|
|
this._receive(job);
|
|
return job.promise;
|
|
}
|
|
|
|
wrap(fn) {
|
|
var schedule, wrapped;
|
|
schedule = this.schedule.bind(this);
|
|
wrapped = function(...args) {
|
|
return schedule(fn.bind(this), ...args);
|
|
};
|
|
wrapped.withOptions = function(options, ...args) {
|
|
return schedule(options, fn, ...args);
|
|
};
|
|
return wrapped;
|
|
}
|
|
|
|
async updateSettings(options = {}) {
|
|
await this._store.__updateSettings__(parser$5.overwrite(options, this.storeDefaults));
|
|
parser$5.overwrite(options, this.instanceDefaults, this);
|
|
return this;
|
|
}
|
|
|
|
currentReservoir() {
|
|
return this._store.__currentReservoir__();
|
|
}
|
|
|
|
incrementReservoir(incr = 0) {
|
|
return this._store.__incrementReservoir__(incr);
|
|
}
|
|
|
|
}
|
|
Bottleneck.default = Bottleneck;
|
|
|
|
Bottleneck.Events = Events$4;
|
|
|
|
Bottleneck.version = Bottleneck.prototype.version = require$$8.version;
|
|
|
|
Bottleneck.strategy = Bottleneck.prototype.strategy = {
|
|
LEAK: 1,
|
|
OVERFLOW: 2,
|
|
OVERFLOW_PRIORITY: 4,
|
|
BLOCK: 3
|
|
};
|
|
|
|
Bottleneck.BottleneckError = Bottleneck.prototype.BottleneckError = BottleneckError_1;
|
|
|
|
Bottleneck.Group = Bottleneck.prototype.Group = Group_1;
|
|
|
|
Bottleneck.RedisConnection = Bottleneck.prototype.RedisConnection = require$$2;
|
|
|
|
Bottleneck.IORedisConnection = Bottleneck.prototype.IORedisConnection = require$$3;
|
|
|
|
Bottleneck.Batcher = Bottleneck.prototype.Batcher = Batcher_1;
|
|
|
|
Bottleneck.prototype.jobDefaults = {
|
|
priority: DEFAULT_PRIORITY$1,
|
|
weight: 1,
|
|
expiration: null,
|
|
id: "<no-id>"
|
|
};
|
|
|
|
Bottleneck.prototype.storeDefaults = {
|
|
maxConcurrent: null,
|
|
minTime: 0,
|
|
highWater: null,
|
|
strategy: Bottleneck.prototype.strategy.LEAK,
|
|
penalty: null,
|
|
reservoir: null,
|
|
reservoirRefreshInterval: null,
|
|
reservoirRefreshAmount: null,
|
|
reservoirIncreaseInterval: null,
|
|
reservoirIncreaseAmount: null,
|
|
reservoirIncreaseMaximum: null
|
|
};
|
|
|
|
Bottleneck.prototype.localStoreDefaults = {
|
|
Promise: Promise,
|
|
timeout: null,
|
|
heartbeatInterval: 250
|
|
};
|
|
|
|
Bottleneck.prototype.redisStoreDefaults = {
|
|
Promise: Promise,
|
|
timeout: null,
|
|
heartbeatInterval: 5000,
|
|
clientTimeout: 10000,
|
|
Redis: null,
|
|
clientOptions: {},
|
|
clusterNodes: null,
|
|
clearDatastore: false,
|
|
connection: null
|
|
};
|
|
|
|
Bottleneck.prototype.instanceDefaults = {
|
|
datastore: "local",
|
|
connection: null,
|
|
id: "<no-id>",
|
|
rejectOnDrop: true,
|
|
trackDoneStatus: false,
|
|
Promise: Promise
|
|
};
|
|
|
|
Bottleneck.prototype.stopDefaults = {
|
|
enqueueErrorMessage: "This limiter has been stopped and cannot accept new jobs.",
|
|
dropWaitingJobs: true,
|
|
dropErrorMessage: "This limiter has been stopped."
|
|
};
|
|
|
|
return Bottleneck;
|
|
|
|
}).call(commonjsGlobal);
|
|
|
|
var Bottleneck_1 = Bottleneck;
|
|
|
|
var lib = Bottleneck_1;
|
|
|
|
return lib;
|
|
|
|
})));
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8932:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
|
|
class Deprecation extends Error {
|
|
constructor(message) {
|
|
super(message); // Maintains proper stack trace (only available on V8)
|
|
|
|
/* istanbul ignore next */
|
|
|
|
if (Error.captureStackTrace) {
|
|
Error.captureStackTrace(this, this.constructor);
|
|
}
|
|
|
|
this.name = 'Deprecation';
|
|
}
|
|
|
|
}
|
|
|
|
exports.Deprecation = Deprecation;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5060:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
/**
|
|
* filesize
|
|
*
|
|
* @copyright 2020 Jason Mulligan <jason.mulligan@avoidwork.com>
|
|
* @license BSD-3-Clause
|
|
* @version 6.1.0
|
|
*/
|
|
(function (global) {
|
|
var b = /^(b|B)$/,
|
|
symbol = {
|
|
iec: {
|
|
bits: ["b", "Kib", "Mib", "Gib", "Tib", "Pib", "Eib", "Zib", "Yib"],
|
|
bytes: ["B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"]
|
|
},
|
|
jedec: {
|
|
bits: ["b", "Kb", "Mb", "Gb", "Tb", "Pb", "Eb", "Zb", "Yb"],
|
|
bytes: ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]
|
|
}
|
|
},
|
|
fullform = {
|
|
iec: ["", "kibi", "mebi", "gibi", "tebi", "pebi", "exbi", "zebi", "yobi"],
|
|
jedec: ["", "kilo", "mega", "giga", "tera", "peta", "exa", "zetta", "yotta"]
|
|
};
|
|
/**
|
|
* filesize
|
|
*
|
|
* @method filesize
|
|
* @param {Mixed} arg String, Int or Float to transform
|
|
* @param {Object} descriptor [Optional] Flags
|
|
* @return {String} Readable file size String
|
|
*/
|
|
|
|
function filesize(arg) {
|
|
var descriptor = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
var result = [],
|
|
val = 0,
|
|
e = void 0,
|
|
base = void 0,
|
|
bits = void 0,
|
|
ceil = void 0,
|
|
full = void 0,
|
|
fullforms = void 0,
|
|
locale = void 0,
|
|
localeOptions = void 0,
|
|
neg = void 0,
|
|
num = void 0,
|
|
output = void 0,
|
|
round = void 0,
|
|
unix = void 0,
|
|
separator = void 0,
|
|
spacer = void 0,
|
|
standard = void 0,
|
|
symbols = void 0;
|
|
|
|
if (isNaN(arg)) {
|
|
throw new TypeError("Invalid number");
|
|
}
|
|
|
|
bits = descriptor.bits === true;
|
|
unix = descriptor.unix === true;
|
|
base = descriptor.base || 2;
|
|
round = descriptor.round !== void 0 ? descriptor.round : unix ? 1 : 2;
|
|
locale = descriptor.locale !== void 0 ? descriptor.locale : "";
|
|
localeOptions = descriptor.localeOptions || {};
|
|
separator = descriptor.separator !== void 0 ? descriptor.separator : "";
|
|
spacer = descriptor.spacer !== void 0 ? descriptor.spacer : unix ? "" : " ";
|
|
symbols = descriptor.symbols || {};
|
|
standard = base === 2 ? descriptor.standard || "jedec" : "jedec";
|
|
output = descriptor.output || "string";
|
|
full = descriptor.fullform === true;
|
|
fullforms = descriptor.fullforms instanceof Array ? descriptor.fullforms : [];
|
|
e = descriptor.exponent !== void 0 ? descriptor.exponent : -1;
|
|
num = Number(arg);
|
|
neg = num < 0;
|
|
ceil = base > 2 ? 1000 : 1024; // Flipping a negative number to determine the size
|
|
|
|
if (neg) {
|
|
num = -num;
|
|
} // Determining the exponent
|
|
|
|
|
|
if (e === -1 || isNaN(e)) {
|
|
e = Math.floor(Math.log(num) / Math.log(ceil));
|
|
|
|
if (e < 0) {
|
|
e = 0;
|
|
}
|
|
} // Exceeding supported length, time to reduce & multiply
|
|
|
|
|
|
if (e > 8) {
|
|
e = 8;
|
|
}
|
|
|
|
if (output === "exponent") {
|
|
return e;
|
|
} // Zero is now a special case because bytes divide by 1
|
|
|
|
|
|
if (num === 0) {
|
|
result[0] = 0;
|
|
result[1] = unix ? "" : symbol[standard][bits ? "bits" : "bytes"][e];
|
|
} else {
|
|
val = num / (base === 2 ? Math.pow(2, e * 10) : Math.pow(1000, e));
|
|
|
|
if (bits) {
|
|
val = val * 8;
|
|
|
|
if (val >= ceil && e < 8) {
|
|
val = val / ceil;
|
|
e++;
|
|
}
|
|
}
|
|
|
|
result[0] = Number(val.toFixed(e > 0 ? round : 0));
|
|
|
|
if (result[0] === ceil && e < 8 && descriptor.exponent === void 0) {
|
|
result[0] = 1;
|
|
e++;
|
|
}
|
|
|
|
result[1] = base === 10 && e === 1 ? bits ? "kb" : "kB" : symbol[standard][bits ? "bits" : "bytes"][e];
|
|
|
|
if (unix) {
|
|
result[1] = standard === "jedec" ? result[1].charAt(0) : e > 0 ? result[1].replace(/B$/, "") : result[1];
|
|
|
|
if (b.test(result[1])) {
|
|
result[0] = Math.floor(result[0]);
|
|
result[1] = "";
|
|
}
|
|
}
|
|
} // Decorating a 'diff'
|
|
|
|
|
|
if (neg) {
|
|
result[0] = -result[0];
|
|
} // Applying custom symbol
|
|
|
|
|
|
result[1] = symbols[result[1]] || result[1];
|
|
|
|
if (locale === true) {
|
|
result[0] = result[0].toLocaleString();
|
|
} else if (locale.length > 0) {
|
|
result[0] = result[0].toLocaleString(locale, localeOptions);
|
|
} else if (separator.length > 0) {
|
|
result[0] = result[0].toString().replace(".", separator);
|
|
} // Returning Array, Object, or String (default)
|
|
|
|
|
|
if (output === "array") {
|
|
return result;
|
|
}
|
|
|
|
if (full) {
|
|
result[1] = fullforms[e] ? fullforms[e] : fullform[standard][e] + (bits ? "bit" : "byte") + (result[0] === 1 ? "" : "s");
|
|
}
|
|
|
|
if (output === "object") {
|
|
return {
|
|
value: result[0],
|
|
symbol: result[1],
|
|
exponent: e
|
|
};
|
|
}
|
|
|
|
return result.join(spacer);
|
|
} // Partial application for functional programming
|
|
|
|
|
|
filesize.partial = function (opt) {
|
|
return function (arg) {
|
|
return filesize(arg, opt);
|
|
};
|
|
}; // CommonJS, AMD, script tag
|
|
|
|
|
|
if (true) {
|
|
module.exports = filesize;
|
|
} else {}
|
|
})(typeof window !== "undefined" ? window : global);
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 3287:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
|
|
/*!
|
|
* is-plain-object <https://github.com/jonschlinkert/is-plain-object>
|
|
*
|
|
* Copyright (c) 2014-2017, Jon Schlinkert.
|
|
* Released under the MIT License.
|
|
*/
|
|
|
|
function isObject(o) {
|
|
return Object.prototype.toString.call(o) === '[object Object]';
|
|
}
|
|
|
|
function isPlainObject(o) {
|
|
var ctor,prot;
|
|
|
|
if (isObject(o) === false) return false;
|
|
|
|
// If has modified constructor
|
|
ctor = o.constructor;
|
|
if (ctor === undefined) return true;
|
|
|
|
// If has modified prototype
|
|
prot = ctor.prototype;
|
|
if (isObject(prot) === false) return false;
|
|
|
|
// If constructor does not have an Object-specific method
|
|
if (prot.hasOwnProperty('isPrototypeOf') === false) {
|
|
return false;
|
|
}
|
|
|
|
// Most likely a plain Object
|
|
return true;
|
|
}
|
|
|
|
exports.isPlainObject = isPlainObject;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 467:
|
|
/***/ ((module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
|
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
|
|
|
var Stream = _interopDefault(__nccwpck_require__(2413));
|
|
var http = _interopDefault(__nccwpck_require__(8605));
|
|
var Url = _interopDefault(__nccwpck_require__(8835));
|
|
var https = _interopDefault(__nccwpck_require__(7211));
|
|
var zlib = _interopDefault(__nccwpck_require__(8761));
|
|
|
|
// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
|
|
|
|
// fix for "Readable" isn't a named export issue
|
|
const Readable = Stream.Readable;
|
|
|
|
const BUFFER = Symbol('buffer');
|
|
const TYPE = Symbol('type');
|
|
|
|
class Blob {
|
|
constructor() {
|
|
this[TYPE] = '';
|
|
|
|
const blobParts = arguments[0];
|
|
const options = arguments[1];
|
|
|
|
const buffers = [];
|
|
let size = 0;
|
|
|
|
if (blobParts) {
|
|
const a = blobParts;
|
|
const length = Number(a.length);
|
|
for (let i = 0; i < length; i++) {
|
|
const element = a[i];
|
|
let buffer;
|
|
if (element instanceof Buffer) {
|
|
buffer = element;
|
|
} else if (ArrayBuffer.isView(element)) {
|
|
buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
|
|
} else if (element instanceof ArrayBuffer) {
|
|
buffer = Buffer.from(element);
|
|
} else if (element instanceof Blob) {
|
|
buffer = element[BUFFER];
|
|
} else {
|
|
buffer = Buffer.from(typeof element === 'string' ? element : String(element));
|
|
}
|
|
size += buffer.length;
|
|
buffers.push(buffer);
|
|
}
|
|
}
|
|
|
|
this[BUFFER] = Buffer.concat(buffers);
|
|
|
|
let type = options && options.type !== undefined && String(options.type).toLowerCase();
|
|
if (type && !/[^\u0020-\u007E]/.test(type)) {
|
|
this[TYPE] = type;
|
|
}
|
|
}
|
|
get size() {
|
|
return this[BUFFER].length;
|
|
}
|
|
get type() {
|
|
return this[TYPE];
|
|
}
|
|
text() {
|
|
return Promise.resolve(this[BUFFER].toString());
|
|
}
|
|
arrayBuffer() {
|
|
const buf = this[BUFFER];
|
|
const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
|
|
return Promise.resolve(ab);
|
|
}
|
|
stream() {
|
|
const readable = new Readable();
|
|
readable._read = function () {};
|
|
readable.push(this[BUFFER]);
|
|
readable.push(null);
|
|
return readable;
|
|
}
|
|
toString() {
|
|
return '[object Blob]';
|
|
}
|
|
slice() {
|
|
const size = this.size;
|
|
|
|
const start = arguments[0];
|
|
const end = arguments[1];
|
|
let relativeStart, relativeEnd;
|
|
if (start === undefined) {
|
|
relativeStart = 0;
|
|
} else if (start < 0) {
|
|
relativeStart = Math.max(size + start, 0);
|
|
} else {
|
|
relativeStart = Math.min(start, size);
|
|
}
|
|
if (end === undefined) {
|
|
relativeEnd = size;
|
|
} else if (end < 0) {
|
|
relativeEnd = Math.max(size + end, 0);
|
|
} else {
|
|
relativeEnd = Math.min(end, size);
|
|
}
|
|
const span = Math.max(relativeEnd - relativeStart, 0);
|
|
|
|
const buffer = this[BUFFER];
|
|
const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
|
|
const blob = new Blob([], { type: arguments[2] });
|
|
blob[BUFFER] = slicedBuffer;
|
|
return blob;
|
|
}
|
|
}
|
|
|
|
Object.defineProperties(Blob.prototype, {
|
|
size: { enumerable: true },
|
|
type: { enumerable: true },
|
|
slice: { enumerable: true }
|
|
});
|
|
|
|
Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
|
|
value: 'Blob',
|
|
writable: false,
|
|
enumerable: false,
|
|
configurable: true
|
|
});
|
|
|
|
/**
|
|
* fetch-error.js
|
|
*
|
|
* FetchError interface for operational errors
|
|
*/
|
|
|
|
/**
|
|
* Create FetchError instance
|
|
*
|
|
* @param String message Error message for human
|
|
* @param String type Error type for machine
|
|
* @param String systemError For Node.js system error
|
|
* @return FetchError
|
|
*/
|
|
function FetchError(message, type, systemError) {
|
|
Error.call(this, message);
|
|
|
|
this.message = message;
|
|
this.type = type;
|
|
|
|
// when err.type is `system`, err.code contains system error code
|
|
if (systemError) {
|
|
this.code = this.errno = systemError.code;
|
|
}
|
|
|
|
// hide custom error implementation details from end-users
|
|
Error.captureStackTrace(this, this.constructor);
|
|
}
|
|
|
|
FetchError.prototype = Object.create(Error.prototype);
|
|
FetchError.prototype.constructor = FetchError;
|
|
FetchError.prototype.name = 'FetchError';
|
|
|
|
let convert;
|
|
try {
|
|
convert = __nccwpck_require__(2877).convert;
|
|
} catch (e) {}
|
|
|
|
const INTERNALS = Symbol('Body internals');
|
|
|
|
// fix an issue where "PassThrough" isn't a named export for node <10
|
|
const PassThrough = Stream.PassThrough;
|
|
|
|
/**
|
|
* Body mixin
|
|
*
|
|
* Ref: https://fetch.spec.whatwg.org/#body
|
|
*
|
|
* @param Stream body Readable stream
|
|
* @param Object opts Response options
|
|
* @return Void
|
|
*/
|
|
function Body(body) {
|
|
var _this = this;
|
|
|
|
var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
|
|
_ref$size = _ref.size;
|
|
|
|
let size = _ref$size === undefined ? 0 : _ref$size;
|
|
var _ref$timeout = _ref.timeout;
|
|
let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
|
|
|
|
if (body == null) {
|
|
// body is undefined or null
|
|
body = null;
|
|
} else if (isURLSearchParams(body)) {
|
|
// body is a URLSearchParams
|
|
body = Buffer.from(body.toString());
|
|
} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
|
|
// body is ArrayBuffer
|
|
body = Buffer.from(body);
|
|
} else if (ArrayBuffer.isView(body)) {
|
|
// body is ArrayBufferView
|
|
body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
|
|
} else if (body instanceof Stream) ; else {
|
|
// none of the above
|
|
// coerce to string then buffer
|
|
body = Buffer.from(String(body));
|
|
}
|
|
this[INTERNALS] = {
|
|
body,
|
|
disturbed: false,
|
|
error: null
|
|
};
|
|
this.size = size;
|
|
this.timeout = timeout;
|
|
|
|
if (body instanceof Stream) {
|
|
body.on('error', function (err) {
|
|
const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
|
|
_this[INTERNALS].error = error;
|
|
});
|
|
}
|
|
}
|
|
|
|
Body.prototype = {
|
|
get body() {
|
|
return this[INTERNALS].body;
|
|
},
|
|
|
|
get bodyUsed() {
|
|
return this[INTERNALS].disturbed;
|
|
},
|
|
|
|
/**
|
|
* Decode response as ArrayBuffer
|
|
*
|
|
* @return Promise
|
|
*/
|
|
arrayBuffer() {
|
|
return consumeBody.call(this).then(function (buf) {
|
|
return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
|
|
});
|
|
},
|
|
|
|
/**
|
|
* Return raw response as Blob
|
|
*
|
|
* @return Promise
|
|
*/
|
|
blob() {
|
|
let ct = this.headers && this.headers.get('content-type') || '';
|
|
return consumeBody.call(this).then(function (buf) {
|
|
return Object.assign(
|
|
// Prevent copying
|
|
new Blob([], {
|
|
type: ct.toLowerCase()
|
|
}), {
|
|
[BUFFER]: buf
|
|
});
|
|
});
|
|
},
|
|
|
|
/**
|
|
* Decode response as json
|
|
*
|
|
* @return Promise
|
|
*/
|
|
json() {
|
|
var _this2 = this;
|
|
|
|
return consumeBody.call(this).then(function (buffer) {
|
|
try {
|
|
return JSON.parse(buffer.toString());
|
|
} catch (err) {
|
|
return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
|
|
}
|
|
});
|
|
},
|
|
|
|
/**
|
|
* Decode response as text
|
|
*
|
|
* @return Promise
|
|
*/
|
|
text() {
|
|
return consumeBody.call(this).then(function (buffer) {
|
|
return buffer.toString();
|
|
});
|
|
},
|
|
|
|
/**
|
|
* Decode response as buffer (non-spec api)
|
|
*
|
|
* @return Promise
|
|
*/
|
|
buffer() {
|
|
return consumeBody.call(this);
|
|
},
|
|
|
|
/**
|
|
* Decode response as text, while automatically detecting the encoding and
|
|
* trying to decode to UTF-8 (non-spec api)
|
|
*
|
|
* @return Promise
|
|
*/
|
|
textConverted() {
|
|
var _this3 = this;
|
|
|
|
return consumeBody.call(this).then(function (buffer) {
|
|
return convertBody(buffer, _this3.headers);
|
|
});
|
|
}
|
|
};
|
|
|
|
// In browsers, all properties are enumerable.
|
|
Object.defineProperties(Body.prototype, {
|
|
body: { enumerable: true },
|
|
bodyUsed: { enumerable: true },
|
|
arrayBuffer: { enumerable: true },
|
|
blob: { enumerable: true },
|
|
json: { enumerable: true },
|
|
text: { enumerable: true }
|
|
});
|
|
|
|
Body.mixIn = function (proto) {
|
|
for (const name of Object.getOwnPropertyNames(Body.prototype)) {
|
|
// istanbul ignore else: future proof
|
|
if (!(name in proto)) {
|
|
const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
|
|
Object.defineProperty(proto, name, desc);
|
|
}
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Consume and convert an entire Body to a Buffer.
|
|
*
|
|
* Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
|
|
*
|
|
* @return Promise
|
|
*/
|
|
function consumeBody() {
|
|
var _this4 = this;
|
|
|
|
if (this[INTERNALS].disturbed) {
|
|
return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
|
|
}
|
|
|
|
this[INTERNALS].disturbed = true;
|
|
|
|
if (this[INTERNALS].error) {
|
|
return Body.Promise.reject(this[INTERNALS].error);
|
|
}
|
|
|
|
let body = this.body;
|
|
|
|
// body is null
|
|
if (body === null) {
|
|
return Body.Promise.resolve(Buffer.alloc(0));
|
|
}
|
|
|
|
// body is blob
|
|
if (isBlob(body)) {
|
|
body = body.stream();
|
|
}
|
|
|
|
// body is buffer
|
|
if (Buffer.isBuffer(body)) {
|
|
return Body.Promise.resolve(body);
|
|
}
|
|
|
|
// istanbul ignore if: should never happen
|
|
if (!(body instanceof Stream)) {
|
|
return Body.Promise.resolve(Buffer.alloc(0));
|
|
}
|
|
|
|
// body is stream
|
|
// get ready to actually consume the body
|
|
let accum = [];
|
|
let accumBytes = 0;
|
|
let abort = false;
|
|
|
|
return new Body.Promise(function (resolve, reject) {
|
|
let resTimeout;
|
|
|
|
// allow timeout on slow response body
|
|
if (_this4.timeout) {
|
|
resTimeout = setTimeout(function () {
|
|
abort = true;
|
|
reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
|
|
}, _this4.timeout);
|
|
}
|
|
|
|
// handle stream errors
|
|
body.on('error', function (err) {
|
|
if (err.name === 'AbortError') {
|
|
// if the request was aborted, reject with this Error
|
|
abort = true;
|
|
reject(err);
|
|
} else {
|
|
// other errors, such as incorrect content-encoding
|
|
reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
|
|
}
|
|
});
|
|
|
|
body.on('data', function (chunk) {
|
|
if (abort || chunk === null) {
|
|
return;
|
|
}
|
|
|
|
if (_this4.size && accumBytes + chunk.length > _this4.size) {
|
|
abort = true;
|
|
reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
|
|
return;
|
|
}
|
|
|
|
accumBytes += chunk.length;
|
|
accum.push(chunk);
|
|
});
|
|
|
|
body.on('end', function () {
|
|
if (abort) {
|
|
return;
|
|
}
|
|
|
|
clearTimeout(resTimeout);
|
|
|
|
try {
|
|
resolve(Buffer.concat(accum, accumBytes));
|
|
} catch (err) {
|
|
// handle streams that have accumulated too much data (issue #414)
|
|
reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
|
|
}
|
|
});
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Detect buffer encoding and convert to target encoding
|
|
* ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
|
|
*
|
|
* @param Buffer buffer Incoming buffer
|
|
* @param String encoding Target encoding
|
|
* @return String
|
|
*/
|
|
function convertBody(buffer, headers) {
|
|
if (typeof convert !== 'function') {
|
|
throw new Error('The package `encoding` must be installed to use the textConverted() function');
|
|
}
|
|
|
|
const ct = headers.get('content-type');
|
|
let charset = 'utf-8';
|
|
let res, str;
|
|
|
|
// header
|
|
if (ct) {
|
|
res = /charset=([^;]*)/i.exec(ct);
|
|
}
|
|
|
|
// no charset in content type, peek at response body for at most 1024 bytes
|
|
str = buffer.slice(0, 1024).toString();
|
|
|
|
// html5
|
|
if (!res && str) {
|
|
res = /<meta.+?charset=(['"])(.+?)\1/i.exec(str);
|
|
}
|
|
|
|
// html4
|
|
if (!res && str) {
|
|
res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i.exec(str);
|
|
if (!res) {
|
|
res = /<meta[\s]+?content=(['"])(.+?)\1[\s]+?http-equiv=(['"])content-type\3/i.exec(str);
|
|
if (res) {
|
|
res.pop(); // drop last quote
|
|
}
|
|
}
|
|
|
|
if (res) {
|
|
res = /charset=(.*)/i.exec(res.pop());
|
|
}
|
|
}
|
|
|
|
// xml
|
|
if (!res && str) {
|
|
res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str);
|
|
}
|
|
|
|
// found charset
|
|
if (res) {
|
|
charset = res.pop();
|
|
|
|
// prevent decode issues when sites use incorrect encoding
|
|
// ref: https://hsivonen.fi/encoding-menu/
|
|
if (charset === 'gb2312' || charset === 'gbk') {
|
|
charset = 'gb18030';
|
|
}
|
|
}
|
|
|
|
// turn raw buffers into a single utf-8 buffer
|
|
return convert(buffer, 'UTF-8', charset).toString();
|
|
}
|
|
|
|
/**
|
|
* Detect a URLSearchParams object
|
|
* ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143
|
|
*
|
|
* @param Object obj Object to detect by type or brand
|
|
* @return String
|
|
*/
|
|
function isURLSearchParams(obj) {
|
|
// Duck-typing as a necessary condition.
|
|
if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') {
|
|
return false;
|
|
}
|
|
|
|
// Brand-checking and more duck-typing as optional condition.
|
|
return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function';
|
|
}
|
|
|
|
/**
|
|
* Check if `obj` is a W3C `Blob` object (which `File` inherits from)
|
|
* @param {*} obj
|
|
* @return {boolean}
|
|
*/
|
|
function isBlob(obj) {
|
|
return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]);
|
|
}
|
|
|
|
/**
|
|
* Clone body given Res/Req instance
|
|
*
|
|
* @param Mixed instance Response or Request instance
|
|
* @return Mixed
|
|
*/
|
|
function clone(instance) {
|
|
let p1, p2;
|
|
let body = instance.body;
|
|
|
|
// don't allow cloning a used body
|
|
if (instance.bodyUsed) {
|
|
throw new Error('cannot clone body after it is used');
|
|
}
|
|
|
|
// check that body is a stream and not form-data object
|
|
// note: we can't clone the form-data object without having it as a dependency
|
|
if (body instanceof Stream && typeof body.getBoundary !== 'function') {
|
|
// tee instance body
|
|
p1 = new PassThrough();
|
|
p2 = new PassThrough();
|
|
body.pipe(p1);
|
|
body.pipe(p2);
|
|
// set instance body to teed body and return the other teed body
|
|
instance[INTERNALS].body = p1;
|
|
body = p2;
|
|
}
|
|
|
|
return body;
|
|
}
|
|
|
|
/**
|
|
* Performs the operation "extract a `Content-Type` value from |object|" as
|
|
* specified in the specification:
|
|
* https://fetch.spec.whatwg.org/#concept-bodyinit-extract
|
|
*
|
|
* This function assumes that instance.body is present.
|
|
*
|
|
* @param Mixed instance Any options.body input
|
|
*/
|
|
function extractContentType(body) {
|
|
if (body === null) {
|
|
// body is null
|
|
return null;
|
|
} else if (typeof body === 'string') {
|
|
// body is string
|
|
return 'text/plain;charset=UTF-8';
|
|
} else if (isURLSearchParams(body)) {
|
|
// body is a URLSearchParams
|
|
return 'application/x-www-form-urlencoded;charset=UTF-8';
|
|
} else if (isBlob(body)) {
|
|
// body is blob
|
|
return body.type || null;
|
|
} else if (Buffer.isBuffer(body)) {
|
|
// body is buffer
|
|
return null;
|
|
} else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
|
|
// body is ArrayBuffer
|
|
return null;
|
|
} else if (ArrayBuffer.isView(body)) {
|
|
// body is ArrayBufferView
|
|
return null;
|
|
} else if (typeof body.getBoundary === 'function') {
|
|
// detect form data input from form-data module
|
|
return `multipart/form-data;boundary=${body.getBoundary()}`;
|
|
} else if (body instanceof Stream) {
|
|
// body is stream
|
|
// can't really do much about this
|
|
return null;
|
|
} else {
|
|
// Body constructor defaults other things to string
|
|
return 'text/plain;charset=UTF-8';
|
|
}
|
|
}
|
|
|
|
/**
|
|
* The Fetch Standard treats this as if "total bytes" is a property on the body.
|
|
* For us, we have to explicitly get it with a function.
|
|
*
|
|
* ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes
|
|
*
|
|
* @param Body instance Instance of Body
|
|
* @return Number? Number of bytes, or null if not possible
|
|
*/
|
|
function getTotalBytes(instance) {
|
|
const body = instance.body;
|
|
|
|
|
|
if (body === null) {
|
|
// body is null
|
|
return 0;
|
|
} else if (isBlob(body)) {
|
|
return body.size;
|
|
} else if (Buffer.isBuffer(body)) {
|
|
// body is buffer
|
|
return body.length;
|
|
} else if (body && typeof body.getLengthSync === 'function') {
|
|
// detect form data input from form-data module
|
|
if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x
|
|
body.hasKnownLength && body.hasKnownLength()) {
|
|
// 2.x
|
|
return body.getLengthSync();
|
|
}
|
|
return null;
|
|
} else {
|
|
// body is stream
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Write a Body to a Node.js WritableStream (e.g. http.Request) object.
|
|
*
|
|
* @param Body instance Instance of Body
|
|
* @return Void
|
|
*/
|
|
function writeToStream(dest, instance) {
|
|
const body = instance.body;
|
|
|
|
|
|
if (body === null) {
|
|
// body is null
|
|
dest.end();
|
|
} else if (isBlob(body)) {
|
|
body.stream().pipe(dest);
|
|
} else if (Buffer.isBuffer(body)) {
|
|
// body is buffer
|
|
dest.write(body);
|
|
dest.end();
|
|
} else {
|
|
// body is stream
|
|
body.pipe(dest);
|
|
}
|
|
}
|
|
|
|
// expose Promise
|
|
Body.Promise = global.Promise;
|
|
|
|
/**
|
|
* headers.js
|
|
*
|
|
* Headers class offers convenient helpers
|
|
*/
|
|
|
|
const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/;
|
|
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/;
|
|
|
|
function validateName(name) {
|
|
name = `${name}`;
|
|
if (invalidTokenRegex.test(name) || name === '') {
|
|
throw new TypeError(`${name} is not a legal HTTP header name`);
|
|
}
|
|
}
|
|
|
|
function validateValue(value) {
|
|
value = `${value}`;
|
|
if (invalidHeaderCharRegex.test(value)) {
|
|
throw new TypeError(`${value} is not a legal HTTP header value`);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Find the key in the map object given a header name.
|
|
*
|
|
* Returns undefined if not found.
|
|
*
|
|
* @param String name Header name
|
|
* @return String|Undefined
|
|
*/
|
|
function find(map, name) {
|
|
name = name.toLowerCase();
|
|
for (const key in map) {
|
|
if (key.toLowerCase() === name) {
|
|
return key;
|
|
}
|
|
}
|
|
return undefined;
|
|
}
|
|
|
|
const MAP = Symbol('map');
|
|
class Headers {
|
|
/**
|
|
* Headers class
|
|
*
|
|
* @param Object headers Response headers
|
|
* @return Void
|
|
*/
|
|
constructor() {
|
|
let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined;
|
|
|
|
this[MAP] = Object.create(null);
|
|
|
|
if (init instanceof Headers) {
|
|
const rawHeaders = init.raw();
|
|
const headerNames = Object.keys(rawHeaders);
|
|
|
|
for (const headerName of headerNames) {
|
|
for (const value of rawHeaders[headerName]) {
|
|
this.append(headerName, value);
|
|
}
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
// We don't worry about converting prop to ByteString here as append()
|
|
// will handle it.
|
|
if (init == null) ; else if (typeof init === 'object') {
|
|
const method = init[Symbol.iterator];
|
|
if (method != null) {
|
|
if (typeof method !== 'function') {
|
|
throw new TypeError('Header pairs must be iterable');
|
|
}
|
|
|
|
// sequence<sequence<ByteString>>
|
|
// Note: per spec we have to first exhaust the lists then process them
|
|
const pairs = [];
|
|
for (const pair of init) {
|
|
if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
|
|
throw new TypeError('Each header pair must be iterable');
|
|
}
|
|
pairs.push(Array.from(pair));
|
|
}
|
|
|
|
for (const pair of pairs) {
|
|
if (pair.length !== 2) {
|
|
throw new TypeError('Each header pair must be a name/value tuple');
|
|
}
|
|
this.append(pair[0], pair[1]);
|
|
}
|
|
} else {
|
|
// record<ByteString, ByteString>
|
|
for (const key of Object.keys(init)) {
|
|
const value = init[key];
|
|
this.append(key, value);
|
|
}
|
|
}
|
|
} else {
|
|
throw new TypeError('Provided initializer must be an object');
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Return combined header value given name
|
|
*
|
|
* @param String name Header name
|
|
* @return Mixed
|
|
*/
|
|
get(name) {
|
|
name = `${name}`;
|
|
validateName(name);
|
|
const key = find(this[MAP], name);
|
|
if (key === undefined) {
|
|
return null;
|
|
}
|
|
|
|
return this[MAP][key].join(', ');
|
|
}
|
|
|
|
/**
|
|
* Iterate over all headers
|
|
*
|
|
* @param Function callback Executed for each item with parameters (value, name, thisArg)
|
|
* @param Boolean thisArg `this` context for callback function
|
|
* @return Void
|
|
*/
|
|
forEach(callback) {
|
|
let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
|
|
|
|
let pairs = getHeaders(this);
|
|
let i = 0;
|
|
while (i < pairs.length) {
|
|
var _pairs$i = pairs[i];
|
|
const name = _pairs$i[0],
|
|
value = _pairs$i[1];
|
|
|
|
callback.call(thisArg, value, name, this);
|
|
pairs = getHeaders(this);
|
|
i++;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Overwrite header values given name
|
|
*
|
|
* @param String name Header name
|
|
* @param String value Header value
|
|
* @return Void
|
|
*/
|
|
set(name, value) {
|
|
name = `${name}`;
|
|
value = `${value}`;
|
|
validateName(name);
|
|
validateValue(value);
|
|
const key = find(this[MAP], name);
|
|
this[MAP][key !== undefined ? key : name] = [value];
|
|
}
|
|
|
|
/**
|
|
* Append a value onto existing header
|
|
*
|
|
* @param String name Header name
|
|
* @param String value Header value
|
|
* @return Void
|
|
*/
|
|
append(name, value) {
|
|
name = `${name}`;
|
|
value = `${value}`;
|
|
validateName(name);
|
|
validateValue(value);
|
|
const key = find(this[MAP], name);
|
|
if (key !== undefined) {
|
|
this[MAP][key].push(value);
|
|
} else {
|
|
this[MAP][name] = [value];
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Check for header name existence
|
|
*
|
|
* @param String name Header name
|
|
* @return Boolean
|
|
*/
|
|
has(name) {
|
|
name = `${name}`;
|
|
validateName(name);
|
|
return find(this[MAP], name) !== undefined;
|
|
}
|
|
|
|
/**
|
|
* Delete all header values given name
|
|
*
|
|
* @param String name Header name
|
|
* @return Void
|
|
*/
|
|
delete(name) {
|
|
name = `${name}`;
|
|
validateName(name);
|
|
const key = find(this[MAP], name);
|
|
if (key !== undefined) {
|
|
delete this[MAP][key];
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Return raw headers (non-spec api)
|
|
*
|
|
* @return Object
|
|
*/
|
|
raw() {
|
|
return this[MAP];
|
|
}
|
|
|
|
/**
|
|
* Get an iterator on keys.
|
|
*
|
|
* @return Iterator
|
|
*/
|
|
keys() {
|
|
return createHeadersIterator(this, 'key');
|
|
}
|
|
|
|
/**
|
|
* Get an iterator on values.
|
|
*
|
|
* @return Iterator
|
|
*/
|
|
values() {
|
|
return createHeadersIterator(this, 'value');
|
|
}
|
|
|
|
/**
|
|
* Get an iterator on entries.
|
|
*
|
|
* This is the default iterator of the Headers object.
|
|
*
|
|
* @return Iterator
|
|
*/
|
|
[Symbol.iterator]() {
|
|
return createHeadersIterator(this, 'key+value');
|
|
}
|
|
}
|
|
Headers.prototype.entries = Headers.prototype[Symbol.iterator];
|
|
|
|
Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
|
|
value: 'Headers',
|
|
writable: false,
|
|
enumerable: false,
|
|
configurable: true
|
|
});
|
|
|
|
Object.defineProperties(Headers.prototype, {
|
|
get: { enumerable: true },
|
|
forEach: { enumerable: true },
|
|
set: { enumerable: true },
|
|
append: { enumerable: true },
|
|
has: { enumerable: true },
|
|
delete: { enumerable: true },
|
|
keys: { enumerable: true },
|
|
values: { enumerable: true },
|
|
entries: { enumerable: true }
|
|
});
|
|
|
|
function getHeaders(headers) {
|
|
let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
|
|
|
|
const keys = Object.keys(headers[MAP]).sort();
|
|
return keys.map(kind === 'key' ? function (k) {
|
|
return k.toLowerCase();
|
|
} : kind === 'value' ? function (k) {
|
|
return headers[MAP][k].join(', ');
|
|
} : function (k) {
|
|
return [k.toLowerCase(), headers[MAP][k].join(', ')];
|
|
});
|
|
}
|
|
|
|
const INTERNAL = Symbol('internal');
|
|
|
|
function createHeadersIterator(target, kind) {
|
|
const iterator = Object.create(HeadersIteratorPrototype);
|
|
iterator[INTERNAL] = {
|
|
target,
|
|
kind,
|
|
index: 0
|
|
};
|
|
return iterator;
|
|
}
|
|
|
|
const HeadersIteratorPrototype = Object.setPrototypeOf({
|
|
next() {
|
|
// istanbul ignore if
|
|
if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
|
|
throw new TypeError('Value of `this` is not a HeadersIterator');
|
|
}
|
|
|
|
var _INTERNAL = this[INTERNAL];
|
|
const target = _INTERNAL.target,
|
|
kind = _INTERNAL.kind,
|
|
index = _INTERNAL.index;
|
|
|
|
const values = getHeaders(target, kind);
|
|
const len = values.length;
|
|
if (index >= len) {
|
|
return {
|
|
value: undefined,
|
|
done: true
|
|
};
|
|
}
|
|
|
|
this[INTERNAL].index = index + 1;
|
|
|
|
return {
|
|
value: values[index],
|
|
done: false
|
|
};
|
|
}
|
|
}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
|
|
|
|
Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
|
|
value: 'HeadersIterator',
|
|
writable: false,
|
|
enumerable: false,
|
|
configurable: true
|
|
});
|
|
|
|
/**
|
|
* Export the Headers object in a form that Node.js can consume.
|
|
*
|
|
* @param Headers headers
|
|
* @return Object
|
|
*/
|
|
function exportNodeCompatibleHeaders(headers) {
|
|
const obj = Object.assign({ __proto__: null }, headers[MAP]);
|
|
|
|
// http.request() only supports string as Host header. This hack makes
|
|
// specifying custom Host header possible.
|
|
const hostHeaderKey = find(headers[MAP], 'Host');
|
|
if (hostHeaderKey !== undefined) {
|
|
obj[hostHeaderKey] = obj[hostHeaderKey][0];
|
|
}
|
|
|
|
return obj;
|
|
}
|
|
|
|
/**
|
|
* Create a Headers object from an object of headers, ignoring those that do
|
|
* not conform to HTTP grammar productions.
|
|
*
|
|
* @param Object obj Object of headers
|
|
* @return Headers
|
|
*/
|
|
function createHeadersLenient(obj) {
|
|
const headers = new Headers();
|
|
for (const name of Object.keys(obj)) {
|
|
if (invalidTokenRegex.test(name)) {
|
|
continue;
|
|
}
|
|
if (Array.isArray(obj[name])) {
|
|
for (const val of obj[name]) {
|
|
if (invalidHeaderCharRegex.test(val)) {
|
|
continue;
|
|
}
|
|
if (headers[MAP][name] === undefined) {
|
|
headers[MAP][name] = [val];
|
|
} else {
|
|
headers[MAP][name].push(val);
|
|
}
|
|
}
|
|
} else if (!invalidHeaderCharRegex.test(obj[name])) {
|
|
headers[MAP][name] = [obj[name]];
|
|
}
|
|
}
|
|
return headers;
|
|
}
|
|
|
|
const INTERNALS$1 = Symbol('Response internals');
|
|
|
|
// fix an issue where "STATUS_CODES" aren't a named export for node <10
|
|
const STATUS_CODES = http.STATUS_CODES;
|
|
|
|
/**
|
|
* Response class
|
|
*
|
|
* @param Stream body Readable stream
|
|
* @param Object opts Response options
|
|
* @return Void
|
|
*/
|
|
class Response {
|
|
constructor() {
|
|
let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
|
|
let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
|
|
Body.call(this, body, opts);
|
|
|
|
const status = opts.status || 200;
|
|
const headers = new Headers(opts.headers);
|
|
|
|
if (body != null && !headers.has('Content-Type')) {
|
|
const contentType = extractContentType(body);
|
|
if (contentType) {
|
|
headers.append('Content-Type', contentType);
|
|
}
|
|
}
|
|
|
|
this[INTERNALS$1] = {
|
|
url: opts.url,
|
|
status,
|
|
statusText: opts.statusText || STATUS_CODES[status],
|
|
headers,
|
|
counter: opts.counter
|
|
};
|
|
}
|
|
|
|
get url() {
|
|
return this[INTERNALS$1].url || '';
|
|
}
|
|
|
|
get status() {
|
|
return this[INTERNALS$1].status;
|
|
}
|
|
|
|
/**
|
|
* Convenience property representing if the request ended normally
|
|
*/
|
|
get ok() {
|
|
return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
|
|
}
|
|
|
|
get redirected() {
|
|
return this[INTERNALS$1].counter > 0;
|
|
}
|
|
|
|
get statusText() {
|
|
return this[INTERNALS$1].statusText;
|
|
}
|
|
|
|
get headers() {
|
|
return this[INTERNALS$1].headers;
|
|
}
|
|
|
|
/**
|
|
* Clone this response
|
|
*
|
|
* @return Response
|
|
*/
|
|
clone() {
|
|
return new Response(clone(this), {
|
|
url: this.url,
|
|
status: this.status,
|
|
statusText: this.statusText,
|
|
headers: this.headers,
|
|
ok: this.ok,
|
|
redirected: this.redirected
|
|
});
|
|
}
|
|
}
|
|
|
|
Body.mixIn(Response.prototype);
|
|
|
|
Object.defineProperties(Response.prototype, {
|
|
url: { enumerable: true },
|
|
status: { enumerable: true },
|
|
ok: { enumerable: true },
|
|
redirected: { enumerable: true },
|
|
statusText: { enumerable: true },
|
|
headers: { enumerable: true },
|
|
clone: { enumerable: true }
|
|
});
|
|
|
|
Object.defineProperty(Response.prototype, Symbol.toStringTag, {
|
|
value: 'Response',
|
|
writable: false,
|
|
enumerable: false,
|
|
configurable: true
|
|
});
|
|
|
|
const INTERNALS$2 = Symbol('Request internals');
|
|
|
|
// fix an issue where "format", "parse" aren't a named export for node <10
|
|
const parse_url = Url.parse;
|
|
const format_url = Url.format;
|
|
|
|
const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
|
|
|
|
/**
|
|
* Check if a value is an instance of Request.
|
|
*
|
|
* @param Mixed input
|
|
* @return Boolean
|
|
*/
|
|
function isRequest(input) {
|
|
return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
|
|
}
|
|
|
|
function isAbortSignal(signal) {
|
|
const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
|
|
return !!(proto && proto.constructor.name === 'AbortSignal');
|
|
}
|
|
|
|
/**
|
|
* Request class
|
|
*
|
|
* @param Mixed input Url or Request instance
|
|
* @param Object init Custom options
|
|
* @return Void
|
|
*/
|
|
class Request {
|
|
constructor(input) {
|
|
let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
|
|
let parsedURL;
|
|
|
|
// normalize input
|
|
if (!isRequest(input)) {
|
|
if (input && input.href) {
|
|
// in order to support Node.js' Url objects; though WHATWG's URL objects
|
|
// will fall into this branch also (since their `toString()` will return
|
|
// `href` property anyway)
|
|
parsedURL = parse_url(input.href);
|
|
} else {
|
|
// coerce input to a string before attempting to parse
|
|
parsedURL = parse_url(`${input}`);
|
|
}
|
|
input = {};
|
|
} else {
|
|
parsedURL = parse_url(input.url);
|
|
}
|
|
|
|
let method = init.method || input.method || 'GET';
|
|
method = method.toUpperCase();
|
|
|
|
if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
|
|
throw new TypeError('Request with GET/HEAD method cannot have body');
|
|
}
|
|
|
|
let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
|
|
|
|
Body.call(this, inputBody, {
|
|
timeout: init.timeout || input.timeout || 0,
|
|
size: init.size || input.size || 0
|
|
});
|
|
|
|
const headers = new Headers(init.headers || input.headers || {});
|
|
|
|
if (inputBody != null && !headers.has('Content-Type')) {
|
|
const contentType = extractContentType(inputBody);
|
|
if (contentType) {
|
|
headers.append('Content-Type', contentType);
|
|
}
|
|
}
|
|
|
|
let signal = isRequest(input) ? input.signal : null;
|
|
if ('signal' in init) signal = init.signal;
|
|
|
|
if (signal != null && !isAbortSignal(signal)) {
|
|
throw new TypeError('Expected signal to be an instanceof AbortSignal');
|
|
}
|
|
|
|
this[INTERNALS$2] = {
|
|
method,
|
|
redirect: init.redirect || input.redirect || 'follow',
|
|
headers,
|
|
parsedURL,
|
|
signal
|
|
};
|
|
|
|
// node-fetch-only options
|
|
this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
|
|
this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
|
|
this.counter = init.counter || input.counter || 0;
|
|
this.agent = init.agent || input.agent;
|
|
}
|
|
|
|
get method() {
|
|
return this[INTERNALS$2].method;
|
|
}
|
|
|
|
get url() {
|
|
return format_url(this[INTERNALS$2].parsedURL);
|
|
}
|
|
|
|
get headers() {
|
|
return this[INTERNALS$2].headers;
|
|
}
|
|
|
|
get redirect() {
|
|
return this[INTERNALS$2].redirect;
|
|
}
|
|
|
|
get signal() {
|
|
return this[INTERNALS$2].signal;
|
|
}
|
|
|
|
/**
|
|
* Clone this request
|
|
*
|
|
* @return Request
|
|
*/
|
|
clone() {
|
|
return new Request(this);
|
|
}
|
|
}
|
|
|
|
Body.mixIn(Request.prototype);
|
|
|
|
Object.defineProperty(Request.prototype, Symbol.toStringTag, {
|
|
value: 'Request',
|
|
writable: false,
|
|
enumerable: false,
|
|
configurable: true
|
|
});
|
|
|
|
Object.defineProperties(Request.prototype, {
|
|
method: { enumerable: true },
|
|
url: { enumerable: true },
|
|
headers: { enumerable: true },
|
|
redirect: { enumerable: true },
|
|
clone: { enumerable: true },
|
|
signal: { enumerable: true }
|
|
});
|
|
|
|
/**
|
|
* Convert a Request to Node.js http request options.
|
|
*
|
|
* @param Request A Request instance
|
|
* @return Object The options object to be passed to http.request
|
|
*/
|
|
function getNodeRequestOptions(request) {
|
|
const parsedURL = request[INTERNALS$2].parsedURL;
|
|
const headers = new Headers(request[INTERNALS$2].headers);
|
|
|
|
// fetch step 1.3
|
|
if (!headers.has('Accept')) {
|
|
headers.set('Accept', '*/*');
|
|
}
|
|
|
|
// Basic fetch
|
|
if (!parsedURL.protocol || !parsedURL.hostname) {
|
|
throw new TypeError('Only absolute URLs are supported');
|
|
}
|
|
|
|
if (!/^https?:$/.test(parsedURL.protocol)) {
|
|
throw new TypeError('Only HTTP(S) protocols are supported');
|
|
}
|
|
|
|
if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
|
|
throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
|
|
}
|
|
|
|
// HTTP-network-or-cache fetch steps 2.4-2.7
|
|
let contentLengthValue = null;
|
|
if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
|
|
contentLengthValue = '0';
|
|
}
|
|
if (request.body != null) {
|
|
const totalBytes = getTotalBytes(request);
|
|
if (typeof totalBytes === 'number') {
|
|
contentLengthValue = String(totalBytes);
|
|
}
|
|
}
|
|
if (contentLengthValue) {
|
|
headers.set('Content-Length', contentLengthValue);
|
|
}
|
|
|
|
// HTTP-network-or-cache fetch step 2.11
|
|
if (!headers.has('User-Agent')) {
|
|
headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
|
|
}
|
|
|
|
// HTTP-network-or-cache fetch step 2.15
|
|
if (request.compress && !headers.has('Accept-Encoding')) {
|
|
headers.set('Accept-Encoding', 'gzip,deflate');
|
|
}
|
|
|
|
let agent = request.agent;
|
|
if (typeof agent === 'function') {
|
|
agent = agent(parsedURL);
|
|
}
|
|
|
|
if (!headers.has('Connection') && !agent) {
|
|
headers.set('Connection', 'close');
|
|
}
|
|
|
|
// HTTP-network fetch step 4.2
|
|
// chunked encoding is handled by Node.js
|
|
|
|
return Object.assign({}, parsedURL, {
|
|
method: request.method,
|
|
headers: exportNodeCompatibleHeaders(headers),
|
|
agent
|
|
});
|
|
}
|
|
|
|
/**
|
|
* abort-error.js
|
|
*
|
|
* AbortError interface for cancelled requests
|
|
*/
|
|
|
|
/**
|
|
* Create AbortError instance
|
|
*
|
|
* @param String message Error message for human
|
|
* @return AbortError
|
|
*/
|
|
function AbortError(message) {
|
|
Error.call(this, message);
|
|
|
|
this.type = 'aborted';
|
|
this.message = message;
|
|
|
|
// hide custom error implementation details from end-users
|
|
Error.captureStackTrace(this, this.constructor);
|
|
}
|
|
|
|
AbortError.prototype = Object.create(Error.prototype);
|
|
AbortError.prototype.constructor = AbortError;
|
|
AbortError.prototype.name = 'AbortError';
|
|
|
|
// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
|
|
const PassThrough$1 = Stream.PassThrough;
|
|
const resolve_url = Url.resolve;
|
|
|
|
/**
|
|
* Fetch function
|
|
*
|
|
* @param Mixed url Absolute url or Request instance
|
|
* @param Object opts Fetch options
|
|
* @return Promise
|
|
*/
|
|
function fetch(url, opts) {
|
|
|
|
// allow custom promise
|
|
if (!fetch.Promise) {
|
|
throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
|
|
}
|
|
|
|
Body.Promise = fetch.Promise;
|
|
|
|
// wrap http.request into fetch
|
|
return new fetch.Promise(function (resolve, reject) {
|
|
// build request object
|
|
const request = new Request(url, opts);
|
|
const options = getNodeRequestOptions(request);
|
|
|
|
const send = (options.protocol === 'https:' ? https : http).request;
|
|
const signal = request.signal;
|
|
|
|
let response = null;
|
|
|
|
const abort = function abort() {
|
|
let error = new AbortError('The user aborted a request.');
|
|
reject(error);
|
|
if (request.body && request.body instanceof Stream.Readable) {
|
|
request.body.destroy(error);
|
|
}
|
|
if (!response || !response.body) return;
|
|
response.body.emit('error', error);
|
|
};
|
|
|
|
if (signal && signal.aborted) {
|
|
abort();
|
|
return;
|
|
}
|
|
|
|
const abortAndFinalize = function abortAndFinalize() {
|
|
abort();
|
|
finalize();
|
|
};
|
|
|
|
// send request
|
|
const req = send(options);
|
|
let reqTimeout;
|
|
|
|
if (signal) {
|
|
signal.addEventListener('abort', abortAndFinalize);
|
|
}
|
|
|
|
function finalize() {
|
|
req.abort();
|
|
if (signal) signal.removeEventListener('abort', abortAndFinalize);
|
|
clearTimeout(reqTimeout);
|
|
}
|
|
|
|
if (request.timeout) {
|
|
req.once('socket', function (socket) {
|
|
reqTimeout = setTimeout(function () {
|
|
reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
|
|
finalize();
|
|
}, request.timeout);
|
|
});
|
|
}
|
|
|
|
req.on('error', function (err) {
|
|
reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
|
|
finalize();
|
|
});
|
|
|
|
req.on('response', function (res) {
|
|
clearTimeout(reqTimeout);
|
|
|
|
const headers = createHeadersLenient(res.headers);
|
|
|
|
// HTTP fetch step 5
|
|
if (fetch.isRedirect(res.statusCode)) {
|
|
// HTTP fetch step 5.2
|
|
const location = headers.get('Location');
|
|
|
|
// HTTP fetch step 5.3
|
|
const locationURL = location === null ? null : resolve_url(request.url, location);
|
|
|
|
// HTTP fetch step 5.5
|
|
switch (request.redirect) {
|
|
case 'error':
|
|
reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
|
|
finalize();
|
|
return;
|
|
case 'manual':
|
|
// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
|
|
if (locationURL !== null) {
|
|
// handle corrupted header
|
|
try {
|
|
headers.set('Location', locationURL);
|
|
} catch (err) {
|
|
// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
|
|
reject(err);
|
|
}
|
|
}
|
|
break;
|
|
case 'follow':
|
|
// HTTP-redirect fetch step 2
|
|
if (locationURL === null) {
|
|
break;
|
|
}
|
|
|
|
// HTTP-redirect fetch step 5
|
|
if (request.counter >= request.follow) {
|
|
reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
|
|
finalize();
|
|
return;
|
|
}
|
|
|
|
// HTTP-redirect fetch step 6 (counter increment)
|
|
// Create a new Request object.
|
|
const requestOpts = {
|
|
headers: new Headers(request.headers),
|
|
follow: request.follow,
|
|
counter: request.counter + 1,
|
|
agent: request.agent,
|
|
compress: request.compress,
|
|
method: request.method,
|
|
body: request.body,
|
|
signal: request.signal,
|
|
timeout: request.timeout,
|
|
size: request.size
|
|
};
|
|
|
|
// HTTP-redirect fetch step 9
|
|
if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
|
|
reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
|
|
finalize();
|
|
return;
|
|
}
|
|
|
|
// HTTP-redirect fetch step 11
|
|
if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
|
|
requestOpts.method = 'GET';
|
|
requestOpts.body = undefined;
|
|
requestOpts.headers.delete('content-length');
|
|
}
|
|
|
|
// HTTP-redirect fetch step 15
|
|
resolve(fetch(new Request(locationURL, requestOpts)));
|
|
finalize();
|
|
return;
|
|
}
|
|
}
|
|
|
|
// prepare response
|
|
res.once('end', function () {
|
|
if (signal) signal.removeEventListener('abort', abortAndFinalize);
|
|
});
|
|
let body = res.pipe(new PassThrough$1());
|
|
|
|
const response_options = {
|
|
url: request.url,
|
|
status: res.statusCode,
|
|
statusText: res.statusMessage,
|
|
headers: headers,
|
|
size: request.size,
|
|
timeout: request.timeout,
|
|
counter: request.counter
|
|
};
|
|
|
|
// HTTP-network fetch step 12.1.1.3
|
|
const codings = headers.get('Content-Encoding');
|
|
|
|
// HTTP-network fetch step 12.1.1.4: handle content codings
|
|
|
|
// in following scenarios we ignore compression support
|
|
// 1. compression support is disabled
|
|
// 2. HEAD request
|
|
// 3. no Content-Encoding header
|
|
// 4. no content response (204)
|
|
// 5. content not modified response (304)
|
|
if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
|
|
response = new Response(body, response_options);
|
|
resolve(response);
|
|
return;
|
|
}
|
|
|
|
// For Node v6+
|
|
// Be less strict when decoding compressed responses, since sometimes
|
|
// servers send slightly invalid responses that are still accepted
|
|
// by common browsers.
|
|
// Always using Z_SYNC_FLUSH is what cURL does.
|
|
const zlibOptions = {
|
|
flush: zlib.Z_SYNC_FLUSH,
|
|
finishFlush: zlib.Z_SYNC_FLUSH
|
|
};
|
|
|
|
// for gzip
|
|
if (codings == 'gzip' || codings == 'x-gzip') {
|
|
body = body.pipe(zlib.createGunzip(zlibOptions));
|
|
response = new Response(body, response_options);
|
|
resolve(response);
|
|
return;
|
|
}
|
|
|
|
// for deflate
|
|
if (codings == 'deflate' || codings == 'x-deflate') {
|
|
// handle the infamous raw deflate response from old servers
|
|
// a hack for old IIS and Apache servers
|
|
const raw = res.pipe(new PassThrough$1());
|
|
raw.once('data', function (chunk) {
|
|
// see http://stackoverflow.com/questions/37519828
|
|
if ((chunk[0] & 0x0F) === 0x08) {
|
|
body = body.pipe(zlib.createInflate());
|
|
} else {
|
|
body = body.pipe(zlib.createInflateRaw());
|
|
}
|
|
response = new Response(body, response_options);
|
|
resolve(response);
|
|
});
|
|
return;
|
|
}
|
|
|
|
// for br
|
|
if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
|
|
body = body.pipe(zlib.createBrotliDecompress());
|
|
response = new Response(body, response_options);
|
|
resolve(response);
|
|
return;
|
|
}
|
|
|
|
// otherwise, use response as-is
|
|
response = new Response(body, response_options);
|
|
resolve(response);
|
|
});
|
|
|
|
writeToStream(req, request);
|
|
});
|
|
}
|
|
/**
|
|
* Redirect code matching
|
|
*
|
|
* @param Number code Status code
|
|
* @return Boolean
|
|
*/
|
|
fetch.isRedirect = function (code) {
|
|
return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
|
|
};
|
|
|
|
// expose Promise
|
|
fetch.Promise = global.Promise;
|
|
|
|
module.exports = exports = fetch;
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
exports.default = exports;
|
|
exports.Headers = Headers;
|
|
exports.Request = Request;
|
|
exports.Response = Response;
|
|
exports.FetchError = FetchError;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1223:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
var wrappy = __nccwpck_require__(2940)
|
|
module.exports = wrappy(once)
|
|
module.exports.strict = wrappy(onceStrict)
|
|
|
|
once.proto = once(function () {
|
|
Object.defineProperty(Function.prototype, 'once', {
|
|
value: function () {
|
|
return once(this)
|
|
},
|
|
configurable: true
|
|
})
|
|
|
|
Object.defineProperty(Function.prototype, 'onceStrict', {
|
|
value: function () {
|
|
return onceStrict(this)
|
|
},
|
|
configurable: true
|
|
})
|
|
})
|
|
|
|
function once (fn) {
|
|
var f = function () {
|
|
if (f.called) return f.value
|
|
f.called = true
|
|
return f.value = fn.apply(this, arguments)
|
|
}
|
|
f.called = false
|
|
return f
|
|
}
|
|
|
|
function onceStrict (fn) {
|
|
var f = function () {
|
|
if (f.called)
|
|
throw new Error(f.onceError)
|
|
f.called = true
|
|
return f.value = fn.apply(this, arguments)
|
|
}
|
|
var name = fn.name || 'Function wrapped with `once`'
|
|
f.onceError = name + " shouldn't be called more than once"
|
|
f.called = false
|
|
return f
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4294:
|
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|
|
|
module.exports = __nccwpck_require__(4219);
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4219:
|
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
var net = __nccwpck_require__(1631);
|
|
var tls = __nccwpck_require__(4016);
|
|
var http = __nccwpck_require__(8605);
|
|
var https = __nccwpck_require__(7211);
|
|
var events = __nccwpck_require__(8614);
|
|
var assert = __nccwpck_require__(2357);
|
|
var util = __nccwpck_require__(1669);
|
|
|
|
|
|
exports.httpOverHttp = httpOverHttp;
|
|
exports.httpsOverHttp = httpsOverHttp;
|
|
exports.httpOverHttps = httpOverHttps;
|
|
exports.httpsOverHttps = httpsOverHttps;
|
|
|
|
|
|
function httpOverHttp(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = http.request;
|
|
return agent;
|
|
}
|
|
|
|
function httpsOverHttp(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = http.request;
|
|
agent.createSocket = createSecureSocket;
|
|
agent.defaultPort = 443;
|
|
return agent;
|
|
}
|
|
|
|
function httpOverHttps(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = https.request;
|
|
return agent;
|
|
}
|
|
|
|
function httpsOverHttps(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = https.request;
|
|
agent.createSocket = createSecureSocket;
|
|
agent.defaultPort = 443;
|
|
return agent;
|
|
}
|
|
|
|
|
|
function TunnelingAgent(options) {
|
|
var self = this;
|
|
self.options = options || {};
|
|
self.proxyOptions = self.options.proxy || {};
|
|
self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
|
|
self.requests = [];
|
|
self.sockets = [];
|
|
|
|
self.on('free', function onFree(socket, host, port, localAddress) {
|
|
var options = toOptions(host, port, localAddress);
|
|
for (var i = 0, len = self.requests.length; i < len; ++i) {
|
|
var pending = self.requests[i];
|
|
if (pending.host === options.host && pending.port === options.port) {
|
|
// Detect the request to connect same origin server,
|
|
// reuse the connection.
|
|
self.requests.splice(i, 1);
|
|
pending.request.onSocket(socket);
|
|
return;
|
|
}
|
|
}
|
|
socket.destroy();
|
|
self.removeSocket(socket);
|
|
});
|
|
}
|
|
util.inherits(TunnelingAgent, events.EventEmitter);
|
|
|
|
TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
|
|
var self = this;
|
|
var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
|
|
|
|
if (self.sockets.length >= this.maxSockets) {
|
|
// We are over limit so we'll add it to the queue.
|
|
self.requests.push(options);
|
|
return;
|
|
}
|
|
|
|
// If we are under maxSockets create a new one.
|
|
self.createSocket(options, function(socket) {
|
|
socket.on('free', onFree);
|
|
socket.on('close', onCloseOrRemove);
|
|
socket.on('agentRemove', onCloseOrRemove);
|
|
req.onSocket(socket);
|
|
|
|
function onFree() {
|
|
self.emit('free', socket, options);
|
|
}
|
|
|
|
function onCloseOrRemove(err) {
|
|
self.removeSocket(socket);
|
|
socket.removeListener('free', onFree);
|
|
socket.removeListener('close', onCloseOrRemove);
|
|
socket.removeListener('agentRemove', onCloseOrRemove);
|
|
}
|
|
});
|
|
};
|
|
|
|
TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
|
|
var self = this;
|
|
var placeholder = {};
|
|
self.sockets.push(placeholder);
|
|
|
|
var connectOptions = mergeOptions({}, self.proxyOptions, {
|
|
method: 'CONNECT',
|
|
path: options.host + ':' + options.port,
|
|
agent: false,
|
|
headers: {
|
|
host: options.host + ':' + options.port
|
|
}
|
|
});
|
|
if (options.localAddress) {
|
|
connectOptions.localAddress = options.localAddress;
|
|
}
|
|
if (connectOptions.proxyAuth) {
|
|
connectOptions.headers = connectOptions.headers || {};
|
|
connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
|
|
new Buffer(connectOptions.proxyAuth).toString('base64');
|
|
}
|
|
|
|
debug('making CONNECT request');
|
|
var connectReq = self.request(connectOptions);
|
|
connectReq.useChunkedEncodingByDefault = false; // for v0.6
|
|
connectReq.once('response', onResponse); // for v0.6
|
|
connectReq.once('upgrade', onUpgrade); // for v0.6
|
|
connectReq.once('connect', onConnect); // for v0.7 or later
|
|
connectReq.once('error', onError);
|
|
connectReq.end();
|
|
|
|
function onResponse(res) {
|
|
// Very hacky. This is necessary to avoid http-parser leaks.
|
|
res.upgrade = true;
|
|
}
|
|
|
|
function onUpgrade(res, socket, head) {
|
|
// Hacky.
|
|
process.nextTick(function() {
|
|
onConnect(res, socket, head);
|
|
});
|
|
}
|
|
|
|
function onConnect(res, socket, head) {
|
|
connectReq.removeAllListeners();
|
|
socket.removeAllListeners();
|
|
|
|
if (res.statusCode !== 200) {
|
|
debug('tunneling socket could not be established, statusCode=%d',
|
|
res.statusCode);
|
|
socket.destroy();
|
|
var error = new Error('tunneling socket could not be established, ' +
|
|
'statusCode=' + res.statusCode);
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
return;
|
|
}
|
|
if (head.length > 0) {
|
|
debug('got illegal response body from proxy');
|
|
socket.destroy();
|
|
var error = new Error('got illegal response body from proxy');
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
return;
|
|
}
|
|
debug('tunneling connection has established');
|
|
self.sockets[self.sockets.indexOf(placeholder)] = socket;
|
|
return cb(socket);
|
|
}
|
|
|
|
function onError(cause) {
|
|
connectReq.removeAllListeners();
|
|
|
|
debug('tunneling socket could not be established, cause=%s\n',
|
|
cause.message, cause.stack);
|
|
var error = new Error('tunneling socket could not be established, ' +
|
|
'cause=' + cause.message);
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
}
|
|
};
|
|
|
|
TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
|
|
var pos = this.sockets.indexOf(socket)
|
|
if (pos === -1) {
|
|
return;
|
|
}
|
|
this.sockets.splice(pos, 1);
|
|
|
|
var pending = this.requests.shift();
|
|
if (pending) {
|
|
// If we have pending requests and a socket gets closed a new one
|
|
// needs to be created to take over in the pool for the one that closed.
|
|
this.createSocket(pending, function(socket) {
|
|
pending.request.onSocket(socket);
|
|
});
|
|
}
|
|
};
|
|
|
|
function createSecureSocket(options, cb) {
|
|
var self = this;
|
|
TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
|
|
var hostHeader = options.request.getHeader('host');
|
|
var tlsOptions = mergeOptions({}, self.options, {
|
|
socket: socket,
|
|
servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
|
|
});
|
|
|
|
// 0 is dummy port for v0.6
|
|
var secureSocket = tls.connect(0, tlsOptions);
|
|
self.sockets[self.sockets.indexOf(socket)] = secureSocket;
|
|
cb(secureSocket);
|
|
});
|
|
}
|
|
|
|
|
|
function toOptions(host, port, localAddress) {
|
|
if (typeof host === 'string') { // since v0.10
|
|
return {
|
|
host: host,
|
|
port: port,
|
|
localAddress: localAddress
|
|
};
|
|
}
|
|
return host; // for v0.11 or later
|
|
}
|
|
|
|
function mergeOptions(target) {
|
|
for (var i = 1, len = arguments.length; i < len; ++i) {
|
|
var overrides = arguments[i];
|
|
if (typeof overrides === 'object') {
|
|
var keys = Object.keys(overrides);
|
|
for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
|
|
var k = keys[j];
|
|
if (overrides[k] !== undefined) {
|
|
target[k] = overrides[k];
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return target;
|
|
}
|
|
|
|
|
|
var debug;
|
|
if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
|
|
debug = function() {
|
|
var args = Array.prototype.slice.call(arguments);
|
|
if (typeof args[0] === 'string') {
|
|
args[0] = 'TUNNEL: ' + args[0];
|
|
} else {
|
|
args.unshift('TUNNEL:');
|
|
}
|
|
console.error.apply(console, args);
|
|
}
|
|
} else {
|
|
debug = function() {};
|
|
}
|
|
exports.debug = debug; // for test
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5030:
|
|
/***/ ((__unused_webpack_module, exports) => {
|
|
|
|
"use strict";
|
|
|
|
|
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
|
|
function getUserAgent() {
|
|
if (typeof navigator === "object" && "userAgent" in navigator) {
|
|
return navigator.userAgent;
|
|
}
|
|
|
|
if (typeof process === "object" && "version" in process) {
|
|
return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;
|
|
}
|
|
|
|
return "<environment undetectable>";
|
|
}
|
|
|
|
exports.getUserAgent = getUserAgent;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2940:
|
|
/***/ ((module) => {
|
|
|
|
// Returns a wrapper function that returns a wrapped callback
|
|
// The wrapper function should do some stuff, and return a
|
|
// presumably different callback function.
|
|
// This makes sure that own properties are retained, so that
|
|
// decorations and such are not lost along the way.
|
|
module.exports = wrappy
|
|
function wrappy (fn, cb) {
|
|
if (fn && cb) return wrappy(fn)(cb)
|
|
|
|
if (typeof fn !== 'function')
|
|
throw new TypeError('need wrapper function')
|
|
|
|
Object.keys(fn).forEach(function (k) {
|
|
wrapper[k] = fn[k]
|
|
})
|
|
|
|
return wrapper
|
|
|
|
function wrapper() {
|
|
var args = new Array(arguments.length)
|
|
for (var i = 0; i < args.length; i++) {
|
|
args[i] = arguments[i]
|
|
}
|
|
var ret = fn.apply(this, args)
|
|
var cb = args[args.length-1]
|
|
if (typeof ret === 'function' && ret !== cb) {
|
|
Object.keys(cb).forEach(function (k) {
|
|
ret[k] = cb[k]
|
|
})
|
|
}
|
|
return ret
|
|
}
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2877:
|
|
/***/ ((module) => {
|
|
|
|
module.exports = eval("require")("encoding");
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2941:
|
|
/***/ ((module) => {
|
|
|
|
module.exports = eval("require")("original-fs");
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2357:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("assert");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 6417:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("crypto");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8614:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("events");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5747:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("fs");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8605:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("http");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 7211:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("https");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1631:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("net");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2087:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("os");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 5622:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("path");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 2413:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("stream");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 4016:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("tls");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8835:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("url");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 1669:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("util");;
|
|
|
|
/***/ }),
|
|
|
|
/***/ 8761:
|
|
/***/ ((module) => {
|
|
|
|
"use strict";
|
|
module.exports = require("zlib");;
|
|
|
|
/***/ })
|
|
|
|
/******/ });
|
|
/************************************************************************/
|
|
/******/ // The module cache
|
|
/******/ var __webpack_module_cache__ = {};
|
|
/******/
|
|
/******/ // The require function
|
|
/******/ function __nccwpck_require__(moduleId) {
|
|
/******/ // Check if module is in cache
|
|
/******/ if(__webpack_module_cache__[moduleId]) {
|
|
/******/ return __webpack_module_cache__[moduleId].exports;
|
|
/******/ }
|
|
/******/ // Create a new module (and put it into the cache)
|
|
/******/ var module = __webpack_module_cache__[moduleId] = {
|
|
/******/ // no module.id needed
|
|
/******/ // no module.loaded needed
|
|
/******/ exports: {}
|
|
/******/ };
|
|
/******/
|
|
/******/ // Execute the module function
|
|
/******/ var threw = true;
|
|
/******/ try {
|
|
/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__);
|
|
/******/ threw = false;
|
|
/******/ } finally {
|
|
/******/ if(threw) delete __webpack_module_cache__[moduleId];
|
|
/******/ }
|
|
/******/
|
|
/******/ // Return the exports of the module
|
|
/******/ return module.exports;
|
|
/******/ }
|
|
/******/
|
|
/************************************************************************/
|
|
/******/ /* webpack/runtime/compat */
|
|
/******/
|
|
/******/ __nccwpck_require__.ab = __dirname + "/";/************************************************************************/
|
|
/******/ // module exports must be returned from runtime so entry inlining is disabled
|
|
/******/ // startup
|
|
/******/ // Load entry module and return exports
|
|
/******/ return __nccwpck_require__(5496);
|
|
/******/ })()
|
|
; |