mirror of
https://github.com/garronej/ts-ci.git
synced 2025-12-01 05:43:06 +00:00
12748 lines
416 KiB
JavaScript
12748 lines
416 KiB
JavaScript
|
|
module.exports =
|
||
|
|
/******/ (function(modules, runtime) { // webpackBootstrap
|
||
|
|
/******/ "use strict";
|
||
|
|
/******/ // The module cache
|
||
|
|
/******/ var installedModules = {};
|
||
|
|
/******/
|
||
|
|
/******/ // The require function
|
||
|
|
/******/ function __webpack_require__(moduleId) {
|
||
|
|
/******/
|
||
|
|
/******/ // Check if module is in cache
|
||
|
|
/******/ if(installedModules[moduleId]) {
|
||
|
|
/******/ return installedModules[moduleId].exports;
|
||
|
|
/******/ }
|
||
|
|
/******/ // Create a new module (and put it into the cache)
|
||
|
|
/******/ var module = installedModules[moduleId] = {
|
||
|
|
/******/ i: moduleId,
|
||
|
|
/******/ l: false,
|
||
|
|
/******/ exports: {}
|
||
|
|
/******/ };
|
||
|
|
/******/
|
||
|
|
/******/ // Execute the module function
|
||
|
|
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
|
||
|
|
/******/
|
||
|
|
/******/ // Flag the module as loaded
|
||
|
|
/******/ module.l = true;
|
||
|
|
/******/
|
||
|
|
/******/ // Return the exports of the module
|
||
|
|
/******/ return module.exports;
|
||
|
|
/******/ }
|
||
|
|
/******/
|
||
|
|
/******/
|
||
|
|
/******/ __webpack_require__.ab = __dirname + "/";
|
||
|
|
/******/
|
||
|
|
/******/ // the startup function
|
||
|
|
/******/ function startup() {
|
||
|
|
/******/ // Load entry module and return exports
|
||
|
|
/******/ return __webpack_require__(198);
|
||
|
|
/******/ };
|
||
|
|
/******/
|
||
|
|
/******/ // run startup
|
||
|
|
/******/ return startup();
|
||
|
|
/******/ })
|
||
|
|
/************************************************************************/
|
||
|
|
/******/ ({
|
||
|
|
|
||
|
|
/***/ 2:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
const os = __webpack_require__(87);
|
||
|
|
const macosRelease = __webpack_require__(118);
|
||
|
|
const winRelease = __webpack_require__(49);
|
||
|
|
|
||
|
|
const osName = (platform, release) => {
|
||
|
|
if (!platform && release) {
|
||
|
|
throw new Error('You can\'t specify a `release` without specifying `platform`');
|
||
|
|
}
|
||
|
|
|
||
|
|
platform = platform || os.platform();
|
||
|
|
|
||
|
|
let id;
|
||
|
|
|
||
|
|
if (platform === 'darwin') {
|
||
|
|
if (!release && os.platform() === 'darwin') {
|
||
|
|
release = os.release();
|
||
|
|
}
|
||
|
|
|
||
|
|
const prefix = release ? (Number(release.split('.')[0]) > 15 ? 'macOS' : 'OS X') : 'macOS';
|
||
|
|
id = release ? macosRelease(release).name : '';
|
||
|
|
return prefix + (id ? ' ' + id : '');
|
||
|
|
}
|
||
|
|
|
||
|
|
if (platform === 'linux') {
|
||
|
|
if (!release && os.platform() === 'linux') {
|
||
|
|
release = os.release();
|
||
|
|
}
|
||
|
|
|
||
|
|
id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : '';
|
||
|
|
return 'Linux' + (id ? ' ' + id : '');
|
||
|
|
}
|
||
|
|
|
||
|
|
if (platform === 'win32') {
|
||
|
|
if (!release && os.platform() === 'win32') {
|
||
|
|
release = os.release();
|
||
|
|
}
|
||
|
|
|
||
|
|
id = release ? winRelease(release) : '';
|
||
|
|
return 'Windows' + (id ? ' ' + id : '');
|
||
|
|
}
|
||
|
|
|
||
|
|
return platform;
|
||
|
|
};
|
||
|
|
|
||
|
|
module.exports = osName;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 9:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
var once = __webpack_require__(969);
|
||
|
|
|
||
|
|
var noop = function() {};
|
||
|
|
|
||
|
|
var isRequest = function(stream) {
|
||
|
|
return stream.setHeader && typeof stream.abort === 'function';
|
||
|
|
};
|
||
|
|
|
||
|
|
var isChildProcess = function(stream) {
|
||
|
|
return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
|
||
|
|
};
|
||
|
|
|
||
|
|
var eos = function(stream, opts, callback) {
|
||
|
|
if (typeof opts === 'function') return eos(stream, null, opts);
|
||
|
|
if (!opts) opts = {};
|
||
|
|
|
||
|
|
callback = once(callback || noop);
|
||
|
|
|
||
|
|
var ws = stream._writableState;
|
||
|
|
var rs = stream._readableState;
|
||
|
|
var readable = opts.readable || (opts.readable !== false && stream.readable);
|
||
|
|
var writable = opts.writable || (opts.writable !== false && stream.writable);
|
||
|
|
var cancelled = false;
|
||
|
|
|
||
|
|
var onlegacyfinish = function() {
|
||
|
|
if (!stream.writable) onfinish();
|
||
|
|
};
|
||
|
|
|
||
|
|
var onfinish = function() {
|
||
|
|
writable = false;
|
||
|
|
if (!readable) callback.call(stream);
|
||
|
|
};
|
||
|
|
|
||
|
|
var onend = function() {
|
||
|
|
readable = false;
|
||
|
|
if (!writable) callback.call(stream);
|
||
|
|
};
|
||
|
|
|
||
|
|
var onexit = function(exitCode) {
|
||
|
|
callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
|
||
|
|
};
|
||
|
|
|
||
|
|
var onerror = function(err) {
|
||
|
|
callback.call(stream, err);
|
||
|
|
};
|
||
|
|
|
||
|
|
var onclose = function() {
|
||
|
|
process.nextTick(onclosenexttick);
|
||
|
|
};
|
||
|
|
|
||
|
|
var onclosenexttick = function() {
|
||
|
|
if (cancelled) return;
|
||
|
|
if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));
|
||
|
|
if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));
|
||
|
|
};
|
||
|
|
|
||
|
|
var onrequest = function() {
|
||
|
|
stream.req.on('finish', onfinish);
|
||
|
|
};
|
||
|
|
|
||
|
|
if (isRequest(stream)) {
|
||
|
|
stream.on('complete', onfinish);
|
||
|
|
stream.on('abort', onclose);
|
||
|
|
if (stream.req) onrequest();
|
||
|
|
else stream.on('request', onrequest);
|
||
|
|
} else if (writable && !ws) { // legacy streams
|
||
|
|
stream.on('end', onlegacyfinish);
|
||
|
|
stream.on('close', onlegacyfinish);
|
||
|
|
}
|
||
|
|
|
||
|
|
if (isChildProcess(stream)) stream.on('exit', onexit);
|
||
|
|
|
||
|
|
stream.on('end', onend);
|
||
|
|
stream.on('finish', onfinish);
|
||
|
|
if (opts.error !== false) stream.on('error', onerror);
|
||
|
|
stream.on('close', onclose);
|
||
|
|
|
||
|
|
return function() {
|
||
|
|
cancelled = true;
|
||
|
|
stream.removeListener('complete', onfinish);
|
||
|
|
stream.removeListener('abort', onclose);
|
||
|
|
stream.removeListener('request', onrequest);
|
||
|
|
if (stream.req) stream.req.removeListener('finish', onfinish);
|
||
|
|
stream.removeListener('end', onlegacyfinish);
|
||
|
|
stream.removeListener('close', onlegacyfinish);
|
||
|
|
stream.removeListener('finish', onfinish);
|
||
|
|
stream.removeListener('exit', onexit);
|
||
|
|
stream.removeListener('end', onend);
|
||
|
|
stream.removeListener('error', onerror);
|
||
|
|
stream.removeListener('close', onclose);
|
||
|
|
};
|
||
|
|
};
|
||
|
|
|
||
|
|
module.exports = eos;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 11:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
// Returns a wrapper function that returns a wrapped callback
|
||
|
|
// The wrapper function should do some stuff, and return a
|
||
|
|
// presumably different callback function.
|
||
|
|
// This makes sure that own properties are retained, so that
|
||
|
|
// decorations and such are not lost along the way.
|
||
|
|
module.exports = wrappy
|
||
|
|
function wrappy (fn, cb) {
|
||
|
|
if (fn && cb) return wrappy(fn)(cb)
|
||
|
|
|
||
|
|
if (typeof fn !== 'function')
|
||
|
|
throw new TypeError('need wrapper function')
|
||
|
|
|
||
|
|
Object.keys(fn).forEach(function (k) {
|
||
|
|
wrapper[k] = fn[k]
|
||
|
|
})
|
||
|
|
|
||
|
|
return wrapper
|
||
|
|
|
||
|
|
function wrapper() {
|
||
|
|
var args = new Array(arguments.length)
|
||
|
|
for (var i = 0; i < args.length; i++) {
|
||
|
|
args[i] = arguments[i]
|
||
|
|
}
|
||
|
|
var ret = fn.apply(this, args)
|
||
|
|
var cb = args[args.length-1]
|
||
|
|
if (typeof ret === 'function' && ret !== cb) {
|
||
|
|
Object.keys(cb).forEach(function (k) {
|
||
|
|
ret[k] = cb[k]
|
||
|
|
})
|
||
|
|
}
|
||
|
|
return ret
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 18:
|
||
|
|
/***/ (function() {
|
||
|
|
|
||
|
|
eval("require")("encoding");
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 20:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
const cp = __webpack_require__(129);
|
||
|
|
const parse = __webpack_require__(568);
|
||
|
|
const enoent = __webpack_require__(881);
|
||
|
|
|
||
|
|
function spawn(command, args, options) {
|
||
|
|
// Parse the arguments
|
||
|
|
const parsed = parse(command, args, options);
|
||
|
|
|
||
|
|
// Spawn the child process
|
||
|
|
const spawned = cp.spawn(parsed.command, parsed.args, parsed.options);
|
||
|
|
|
||
|
|
// Hook into child process "exit" event to emit an error if the command
|
||
|
|
// does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
|
||
|
|
enoent.hookChildProcess(spawned, parsed);
|
||
|
|
|
||
|
|
return spawned;
|
||
|
|
}
|
||
|
|
|
||
|
|
function spawnSync(command, args, options) {
|
||
|
|
// Parse the arguments
|
||
|
|
const parsed = parse(command, args, options);
|
||
|
|
|
||
|
|
// Spawn the child process
|
||
|
|
const result = cp.spawnSync(parsed.command, parsed.args, parsed.options);
|
||
|
|
|
||
|
|
// Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
|
||
|
|
result.error = result.error || enoent.verifyENOENTSync(result.status, parsed);
|
||
|
|
|
||
|
|
return result;
|
||
|
|
}
|
||
|
|
|
||
|
|
module.exports = spawn;
|
||
|
|
module.exports.spawn = spawn;
|
||
|
|
module.exports.sync = spawnSync;
|
||
|
|
|
||
|
|
module.exports._parse = parse;
|
||
|
|
module.exports._enoent = enoent;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 32:
|
||
|
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.getCommitAsyncIterableFactory = void 0;
|
||
|
|
const per_page = 30;
|
||
|
|
/** Iterate over the commits of a repo's branch */
|
||
|
|
function getCommitAsyncIterableFactory(params) {
|
||
|
|
const { octokit } = params;
|
||
|
|
function getCommitAsyncIterable(params) {
|
||
|
|
const { owner, repo, branch } = params;
|
||
|
|
let commits = [];
|
||
|
|
let page = 0;
|
||
|
|
let isLastPage = undefined;
|
||
|
|
const getReposListCommitsResponseData = (params) => octokit.repos.listCommits({
|
||
|
|
owner,
|
||
|
|
repo,
|
||
|
|
per_page,
|
||
|
|
"page": params.page,
|
||
|
|
"sha": branch
|
||
|
|
}).then(({ data }) => data);
|
||
|
|
return {
|
||
|
|
[Symbol.asyncIterator]() {
|
||
|
|
return {
|
||
|
|
"next": () => __awaiter(this, void 0, void 0, function* () {
|
||
|
|
if (commits.length === 0) {
|
||
|
|
if (isLastPage) {
|
||
|
|
return { "done": true, "value": undefined };
|
||
|
|
}
|
||
|
|
page++;
|
||
|
|
commits = yield getReposListCommitsResponseData({ page });
|
||
|
|
if (commits.length === 0) {
|
||
|
|
return { "done": true, "value": undefined };
|
||
|
|
}
|
||
|
|
isLastPage =
|
||
|
|
commits.length !== per_page ||
|
||
|
|
(yield getReposListCommitsResponseData({ "page": page + 1 })).length === 0;
|
||
|
|
}
|
||
|
|
const [commit, ...rest] = commits;
|
||
|
|
commits = rest;
|
||
|
|
return {
|
||
|
|
"value": commit,
|
||
|
|
"done": false
|
||
|
|
};
|
||
|
|
})
|
||
|
|
};
|
||
|
|
}
|
||
|
|
};
|
||
|
|
}
|
||
|
|
return { getCommitAsyncIterable };
|
||
|
|
}
|
||
|
|
exports.getCommitAsyncIterableFactory = getCommitAsyncIterableFactory;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 39:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
module.exports = opts => {
|
||
|
|
opts = opts || {};
|
||
|
|
|
||
|
|
const env = opts.env || process.env;
|
||
|
|
const platform = opts.platform || process.platform;
|
||
|
|
|
||
|
|
if (platform !== 'win32') {
|
||
|
|
return 'PATH';
|
||
|
|
}
|
||
|
|
|
||
|
|
return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path';
|
||
|
|
};
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 40:
|
||
|
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
exports.__esModule = true;
|
||
|
|
/** Object.keys() with types */
|
||
|
|
function objectKeys(o) {
|
||
|
|
return Object.keys(o);
|
||
|
|
}
|
||
|
|
exports.objectKeys = objectKeys;
|
||
|
|
//# sourceMappingURL=objectKeys.js.map
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 43:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.action = exports.setOutput = exports.getActionParams = void 0;
|
||
|
|
const node_fetch_1 = __importDefault(__webpack_require__(454));
|
||
|
|
const urlJoin = __webpack_require__(683);
|
||
|
|
const outputHelper_1 = __webpack_require__(762);
|
||
|
|
const NpmModuleVersion_1 = __webpack_require__(395);
|
||
|
|
const inputHelper_1 = __webpack_require__(649);
|
||
|
|
exports.getActionParams = inputHelper_1.getActionParamsFactory({
|
||
|
|
"inputNameSubset": [
|
||
|
|
"owner",
|
||
|
|
"repo",
|
||
|
|
"branch",
|
||
|
|
"compare_to_version"
|
||
|
|
]
|
||
|
|
}).getActionParams;
|
||
|
|
exports.setOutput = outputHelper_1.setOutputFactory().setOutput;
|
||
|
|
function action(_actionName, params, core) {
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
core.debug(JSON.stringify(params));
|
||
|
|
const { owner, repo, branch, compare_to_version } = params;
|
||
|
|
const version = yield node_fetch_1.default(urlJoin("https://raw.github.com", owner, repo, branch, "package.json"))
|
||
|
|
.then(res => res.text())
|
||
|
|
.then(text => JSON.parse(text))
|
||
|
|
.then(({ version }) => version)
|
||
|
|
.catch(() => "");
|
||
|
|
core.debug(`Version on ${owner}/${repo}#${branch} is ${version}`);
|
||
|
|
return {
|
||
|
|
version,
|
||
|
|
"compare_result": NpmModuleVersion_1.NpmModuleVersion.compare(NpmModuleVersion_1.NpmModuleVersion.parse(version || "0.0.0"), NpmModuleVersion_1.NpmModuleVersion.parse(compare_to_version)).toString()
|
||
|
|
};
|
||
|
|
});
|
||
|
|
}
|
||
|
|
exports.action = action;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 49:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
const os = __webpack_require__(87);
|
||
|
|
const execa = __webpack_require__(955);
|
||
|
|
|
||
|
|
// Reference: https://www.gaijin.at/en/lstwinver.php
|
||
|
|
const names = new Map([
|
||
|
|
['10.0', '10'],
|
||
|
|
['6.3', '8.1'],
|
||
|
|
['6.2', '8'],
|
||
|
|
['6.1', '7'],
|
||
|
|
['6.0', 'Vista'],
|
||
|
|
['5.2', 'Server 2003'],
|
||
|
|
['5.1', 'XP'],
|
||
|
|
['5.0', '2000'],
|
||
|
|
['4.9', 'ME'],
|
||
|
|
['4.1', '98'],
|
||
|
|
['4.0', '95']
|
||
|
|
]);
|
||
|
|
|
||
|
|
const windowsRelease = release => {
|
||
|
|
const version = /\d+\.\d/.exec(release || os.release());
|
||
|
|
|
||
|
|
if (release && !version) {
|
||
|
|
throw new Error('`release` argument doesn\'t match `n.n`');
|
||
|
|
}
|
||
|
|
|
||
|
|
const ver = (version || [])[0];
|
||
|
|
|
||
|
|
// Server 2008, 2012, 2016, and 2019 versions are ambiguous with desktop versions and must be detected at runtime.
|
||
|
|
// If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version
|
||
|
|
// then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx
|
||
|
|
// If `wmic` is obsoloete (later versions of Windows 10), use PowerShell instead.
|
||
|
|
// If the resulting caption contains the year 2008, 2012, 2016 or 2019, it is a server version, so return a server OS name.
|
||
|
|
if ((!release || release === os.release()) && ['6.1', '6.2', '6.3', '10.0'].includes(ver)) {
|
||
|
|
let stdout;
|
||
|
|
try {
|
||
|
|
stdout = execa.sync('wmic', ['os', 'get', 'Caption']).stdout || '';
|
||
|
|
} catch (_) {
|
||
|
|
stdout = execa.sync('powershell', ['(Get-CimInstance -ClassName Win32_OperatingSystem).caption']).stdout || '';
|
||
|
|
}
|
||
|
|
|
||
|
|
const year = (stdout.match(/2008|2012|2016|2019/) || [])[0];
|
||
|
|
|
||
|
|
if (year) {
|
||
|
|
return `Server ${year}`;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
return names.get(ver);
|
||
|
|
};
|
||
|
|
|
||
|
|
module.exports = windowsRelease;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 58:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = require("readline");
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 82:
|
||
|
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
// We use any as a valid input type
|
||
|
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
/**
|
||
|
|
* Sanitizes an input into a string so it can be passed into issueCommand safely
|
||
|
|
* @param input input to sanitize into a string
|
||
|
|
*/
|
||
|
|
function toCommandValue(input) {
|
||
|
|
if (input === null || input === undefined) {
|
||
|
|
return '';
|
||
|
|
}
|
||
|
|
else if (typeof input === 'string' || input instanceof String) {
|
||
|
|
return input;
|
||
|
|
}
|
||
|
|
return JSON.stringify(input);
|
||
|
|
}
|
||
|
|
exports.toCommandValue = toCommandValue;
|
||
|
|
//# sourceMappingURL=utils.js.map
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 87:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = require("os");
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 102:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
// For internal use, subject to change.
|
||
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
|
if (mod && mod.__esModule) return mod;
|
||
|
|
var result = {};
|
||
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||
|
|
result["default"] = mod;
|
||
|
|
return result;
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
// We use any as a valid input type
|
||
|
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||
|
|
const fs = __importStar(__webpack_require__(747));
|
||
|
|
const os = __importStar(__webpack_require__(87));
|
||
|
|
const utils_1 = __webpack_require__(82);
|
||
|
|
function issueCommand(command, message) {
|
||
|
|
const filePath = process.env[`GITHUB_${command}`];
|
||
|
|
if (!filePath) {
|
||
|
|
throw new Error(`Unable to find environment variable for file command ${command}`);
|
||
|
|
}
|
||
|
|
if (!fs.existsSync(filePath)) {
|
||
|
|
throw new Error(`Missing file at path: ${filePath}`);
|
||
|
|
}
|
||
|
|
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
|
||
|
|
encoding: 'utf8'
|
||
|
|
});
|
||
|
|
}
|
||
|
|
exports.issueCommand = issueCommand;
|
||
|
|
//# sourceMappingURL=file-command.js.map
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 118:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
const os = __webpack_require__(87);
|
||
|
|
|
||
|
|
const nameMap = new Map([
|
||
|
|
[20, ['Big Sur', '11']],
|
||
|
|
[19, ['Catalina', '10.15']],
|
||
|
|
[18, ['Mojave', '10.14']],
|
||
|
|
[17, ['High Sierra', '10.13']],
|
||
|
|
[16, ['Sierra', '10.12']],
|
||
|
|
[15, ['El Capitan', '10.11']],
|
||
|
|
[14, ['Yosemite', '10.10']],
|
||
|
|
[13, ['Mavericks', '10.9']],
|
||
|
|
[12, ['Mountain Lion', '10.8']],
|
||
|
|
[11, ['Lion', '10.7']],
|
||
|
|
[10, ['Snow Leopard', '10.6']],
|
||
|
|
[9, ['Leopard', '10.5']],
|
||
|
|
[8, ['Tiger', '10.4']],
|
||
|
|
[7, ['Panther', '10.3']],
|
||
|
|
[6, ['Jaguar', '10.2']],
|
||
|
|
[5, ['Puma', '10.1']]
|
||
|
|
]);
|
||
|
|
|
||
|
|
const macosRelease = release => {
|
||
|
|
release = Number((release || os.release()).split('.')[0]);
|
||
|
|
|
||
|
|
const [name, version] = nameMap.get(release);
|
||
|
|
|
||
|
|
return {
|
||
|
|
name,
|
||
|
|
version
|
||
|
|
};
|
||
|
|
};
|
||
|
|
|
||
|
|
module.exports = macosRelease;
|
||
|
|
// TODO: remove this in the next major version
|
||
|
|
module.exports.default = macosRelease;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 129:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = require("child_process");
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 145:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
const pump = __webpack_require__(453);
|
||
|
|
const bufferStream = __webpack_require__(966);
|
||
|
|
|
||
|
|
class MaxBufferError extends Error {
|
||
|
|
constructor() {
|
||
|
|
super('maxBuffer exceeded');
|
||
|
|
this.name = 'MaxBufferError';
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
function getStream(inputStream, options) {
|
||
|
|
if (!inputStream) {
|
||
|
|
return Promise.reject(new Error('Expected a stream'));
|
||
|
|
}
|
||
|
|
|
||
|
|
options = Object.assign({maxBuffer: Infinity}, options);
|
||
|
|
|
||
|
|
const {maxBuffer} = options;
|
||
|
|
|
||
|
|
let stream;
|
||
|
|
return new Promise((resolve, reject) => {
|
||
|
|
const rejectPromise = error => {
|
||
|
|
if (error) { // A null check
|
||
|
|
error.bufferedData = stream.getBufferedValue();
|
||
|
|
}
|
||
|
|
reject(error);
|
||
|
|
};
|
||
|
|
|
||
|
|
stream = pump(inputStream, bufferStream(options), error => {
|
||
|
|
if (error) {
|
||
|
|
rejectPromise(error);
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
resolve();
|
||
|
|
});
|
||
|
|
|
||
|
|
stream.on('data', () => {
|
||
|
|
if (stream.getBufferedLength() > maxBuffer) {
|
||
|
|
rejectPromise(new MaxBufferError());
|
||
|
|
}
|
||
|
|
});
|
||
|
|
}).then(() => stream.getBufferedValue());
|
||
|
|
}
|
||
|
|
|
||
|
|
module.exports = getStream;
|
||
|
|
module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'}));
|
||
|
|
module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true}));
|
||
|
|
module.exports.MaxBufferError = MaxBufferError;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 168:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
const alias = ['stdin', 'stdout', 'stderr'];
|
||
|
|
|
||
|
|
const hasAlias = opts => alias.some(x => Boolean(opts[x]));
|
||
|
|
|
||
|
|
module.exports = opts => {
|
||
|
|
if (!opts) {
|
||
|
|
return null;
|
||
|
|
}
|
||
|
|
|
||
|
|
if (opts.stdio && hasAlias(opts)) {
|
||
|
|
throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`);
|
||
|
|
}
|
||
|
|
|
||
|
|
if (typeof opts.stdio === 'string') {
|
||
|
|
return opts.stdio;
|
||
|
|
}
|
||
|
|
|
||
|
|
const stdio = opts.stdio || [];
|
||
|
|
|
||
|
|
if (!Array.isArray(stdio)) {
|
||
|
|
throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``);
|
||
|
|
}
|
||
|
|
|
||
|
|
const result = [];
|
||
|
|
const len = Math.max(stdio.length, alias.length);
|
||
|
|
|
||
|
|
for (let i = 0; i < len; i++) {
|
||
|
|
let value = null;
|
||
|
|
|
||
|
|
if (stdio[i] !== undefined) {
|
||
|
|
value = stdio[i];
|
||
|
|
} else if (opts[alias[i]] !== undefined) {
|
||
|
|
value = opts[alias[i]];
|
||
|
|
}
|
||
|
|
|
||
|
|
result[i] = value;
|
||
|
|
}
|
||
|
|
|
||
|
|
return result;
|
||
|
|
};
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 197:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
module.exports = isexe
|
||
|
|
isexe.sync = sync
|
||
|
|
|
||
|
|
var fs = __webpack_require__(747)
|
||
|
|
|
||
|
|
function isexe (path, options, cb) {
|
||
|
|
fs.stat(path, function (er, stat) {
|
||
|
|
cb(er, er ? false : checkStat(stat, options))
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
function sync (path, options) {
|
||
|
|
return checkStat(fs.statSync(path), options)
|
||
|
|
}
|
||
|
|
|
||
|
|
function checkStat (stat, options) {
|
||
|
|
return stat.isFile() && checkMode(stat, options)
|
||
|
|
}
|
||
|
|
|
||
|
|
function checkMode (stat, options) {
|
||
|
|
var mod = stat.mode
|
||
|
|
var uid = stat.uid
|
||
|
|
var gid = stat.gid
|
||
|
|
|
||
|
|
var myUid = options.uid !== undefined ?
|
||
|
|
options.uid : process.getuid && process.getuid()
|
||
|
|
var myGid = options.gid !== undefined ?
|
||
|
|
options.gid : process.getgid && process.getgid()
|
||
|
|
|
||
|
|
var u = parseInt('100', 8)
|
||
|
|
var g = parseInt('010', 8)
|
||
|
|
var o = parseInt('001', 8)
|
||
|
|
var ug = u | g
|
||
|
|
|
||
|
|
var ret = (mod & o) ||
|
||
|
|
(mod & g) && gid === myGid ||
|
||
|
|
(mod & u) && uid === myUid ||
|
||
|
|
(mod & ug) && myUid === 0
|
||
|
|
|
||
|
|
return ret
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 198:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
|
|
if (k2 === undefined) k2 = k;
|
||
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
|
|
}) : (function(o, m, k, k2) {
|
||
|
|
if (k2 === undefined) k2 = k;
|
||
|
|
o[k2] = m[k];
|
||
|
|
}));
|
||
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
|
|
}) : function(o, v) {
|
||
|
|
o["default"] = v;
|
||
|
|
});
|
||
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
|
if (mod && mod.__esModule) return mod;
|
||
|
|
var result = {};
|
||
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
|
|
__setModuleDefault(result, mod);
|
||
|
|
return result;
|
||
|
|
};
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
const core = __importStar(__webpack_require__(470));
|
||
|
|
const get_package_json_version = __importStar(__webpack_require__(43));
|
||
|
|
const dispatch_event = __importStar(__webpack_require__(863));
|
||
|
|
const sync_package_and_package_lock_version = __importStar(__webpack_require__(830));
|
||
|
|
const setup_repo_webhook_for_deno_land_publishing = __importStar(__webpack_require__(518));
|
||
|
|
const is_well_formed_and_available_module_name = __importStar(__webpack_require__(794));
|
||
|
|
const tell_if_project_uses_npm_or_yarn = __importStar(__webpack_require__(201));
|
||
|
|
const string_replace = __importStar(__webpack_require__(599));
|
||
|
|
const is_package_json_version_upgraded = __importStar(__webpack_require__(949));
|
||
|
|
const inputHelper_1 = __webpack_require__(649);
|
||
|
|
const update_changelog = __importStar(__webpack_require__(702));
|
||
|
|
function run() {
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
const action_name = inputHelper_1.getActionName();
|
||
|
|
switch (action_name) {
|
||
|
|
case "get_package_json_version":
|
||
|
|
get_package_json_version.setOutput(yield get_package_json_version.action(action_name, get_package_json_version.getActionParams(), core));
|
||
|
|
return null;
|
||
|
|
case "dispatch_event":
|
||
|
|
yield dispatch_event.action(action_name, dispatch_event.getActionParams(), core);
|
||
|
|
return null;
|
||
|
|
case "update_changelog":
|
||
|
|
yield update_changelog.action(action_name, update_changelog.getActionParams(), core);
|
||
|
|
return null;
|
||
|
|
case "sync_package_and_package_lock_version":
|
||
|
|
yield sync_package_and_package_lock_version.action(action_name, sync_package_and_package_lock_version.getActionParams(), core);
|
||
|
|
return null;
|
||
|
|
case "setup_repo_webhook_for_deno_land_publishing":
|
||
|
|
setup_repo_webhook_for_deno_land_publishing.setOutput(yield setup_repo_webhook_for_deno_land_publishing.action(action_name, setup_repo_webhook_for_deno_land_publishing.getActionParams(), core));
|
||
|
|
return null;
|
||
|
|
case "is_well_formed_and_available_module_name":
|
||
|
|
is_well_formed_and_available_module_name.setOutput(yield is_well_formed_and_available_module_name.action(action_name, is_well_formed_and_available_module_name.getActionParams(), core));
|
||
|
|
return null;
|
||
|
|
case "string_replace":
|
||
|
|
string_replace.setOutput(yield string_replace.action(action_name, string_replace.getActionParams(), core));
|
||
|
|
return null;
|
||
|
|
case "tell_if_project_uses_npm_or_yarn":
|
||
|
|
tell_if_project_uses_npm_or_yarn.setOutput(yield tell_if_project_uses_npm_or_yarn.action(action_name, tell_if_project_uses_npm_or_yarn.getActionParams(), core));
|
||
|
|
return null;
|
||
|
|
case "is_package_json_version_upgraded":
|
||
|
|
is_package_json_version_upgraded.setOutput(yield is_package_json_version_upgraded.action(action_name, is_package_json_version_upgraded.getActionParams(), core));
|
||
|
|
return null;
|
||
|
|
}
|
||
|
|
if (true) {
|
||
|
|
throw new Error(`${action_name} Not supported by this toolkit`);
|
||
|
|
}
|
||
|
|
});
|
||
|
|
}
|
||
|
|
(() => __awaiter(void 0, void 0, void 0, function* () {
|
||
|
|
try {
|
||
|
|
yield run();
|
||
|
|
}
|
||
|
|
catch (error) {
|
||
|
|
core.setFailed(error.message);
|
||
|
|
}
|
||
|
|
}))();
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 201:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.action = exports.setOutput = exports.getActionParams = void 0;
|
||
|
|
const node_fetch_1 = __importDefault(__webpack_require__(454));
|
||
|
|
const urlJoin = __webpack_require__(683);
|
||
|
|
const outputHelper_1 = __webpack_require__(762);
|
||
|
|
const inputHelper_1 = __webpack_require__(649);
|
||
|
|
exports.getActionParams = inputHelper_1.getActionParamsFactory({
|
||
|
|
"inputNameSubset": [
|
||
|
|
"owner",
|
||
|
|
"repo",
|
||
|
|
"branch"
|
||
|
|
]
|
||
|
|
}).getActionParams;
|
||
|
|
exports.setOutput = outputHelper_1.setOutputFactory().setOutput;
|
||
|
|
function action(_actionName, params, core) {
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
core.debug(JSON.stringify(params));
|
||
|
|
const { owner, repo } = params;
|
||
|
|
const branch = params.branch.split("/").reverse()[0];
|
||
|
|
const npm_or_yarn = yield node_fetch_1.default(urlJoin("https://raw.github.com", owner, repo, branch, "yarn.lock")).then(res => res.status === 404 ? "npm" : "yarn");
|
||
|
|
core.debug(`Version on ${owner}/${repo}#${branch} is using ${npm_or_yarn}`);
|
||
|
|
return { npm_or_yarn };
|
||
|
|
});
|
||
|
|
}
|
||
|
|
exports.action = action;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 211:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = require("https");
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 260:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
// Note: since nyc uses this module to output coverage, any lines
|
||
|
|
// that are in the direct sync flow of nyc's outputCoverage are
|
||
|
|
// ignored, since we can never get coverage for them.
|
||
|
|
var assert = __webpack_require__(357)
|
||
|
|
var signals = __webpack_require__(654)
|
||
|
|
var isWin = /^win/i.test(process.platform)
|
||
|
|
|
||
|
|
var EE = __webpack_require__(614)
|
||
|
|
/* istanbul ignore if */
|
||
|
|
if (typeof EE !== 'function') {
|
||
|
|
EE = EE.EventEmitter
|
||
|
|
}
|
||
|
|
|
||
|
|
var emitter
|
||
|
|
if (process.__signal_exit_emitter__) {
|
||
|
|
emitter = process.__signal_exit_emitter__
|
||
|
|
} else {
|
||
|
|
emitter = process.__signal_exit_emitter__ = new EE()
|
||
|
|
emitter.count = 0
|
||
|
|
emitter.emitted = {}
|
||
|
|
}
|
||
|
|
|
||
|
|
// Because this emitter is a global, we have to check to see if a
|
||
|
|
// previous version of this library failed to enable infinite listeners.
|
||
|
|
// I know what you're about to say. But literally everything about
|
||
|
|
// signal-exit is a compromise with evil. Get used to it.
|
||
|
|
if (!emitter.infinite) {
|
||
|
|
emitter.setMaxListeners(Infinity)
|
||
|
|
emitter.infinite = true
|
||
|
|
}
|
||
|
|
|
||
|
|
module.exports = function (cb, opts) {
|
||
|
|
assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler')
|
||
|
|
|
||
|
|
if (loaded === false) {
|
||
|
|
load()
|
||
|
|
}
|
||
|
|
|
||
|
|
var ev = 'exit'
|
||
|
|
if (opts && opts.alwaysLast) {
|
||
|
|
ev = 'afterexit'
|
||
|
|
}
|
||
|
|
|
||
|
|
var remove = function () {
|
||
|
|
emitter.removeListener(ev, cb)
|
||
|
|
if (emitter.listeners('exit').length === 0 &&
|
||
|
|
emitter.listeners('afterexit').length === 0) {
|
||
|
|
unload()
|
||
|
|
}
|
||
|
|
}
|
||
|
|
emitter.on(ev, cb)
|
||
|
|
|
||
|
|
return remove
|
||
|
|
}
|
||
|
|
|
||
|
|
module.exports.unload = unload
|
||
|
|
function unload () {
|
||
|
|
if (!loaded) {
|
||
|
|
return
|
||
|
|
}
|
||
|
|
loaded = false
|
||
|
|
|
||
|
|
signals.forEach(function (sig) {
|
||
|
|
try {
|
||
|
|
process.removeListener(sig, sigListeners[sig])
|
||
|
|
} catch (er) {}
|
||
|
|
})
|
||
|
|
process.emit = originalProcessEmit
|
||
|
|
process.reallyExit = originalProcessReallyExit
|
||
|
|
emitter.count -= 1
|
||
|
|
}
|
||
|
|
|
||
|
|
function emit (event, code, signal) {
|
||
|
|
if (emitter.emitted[event]) {
|
||
|
|
return
|
||
|
|
}
|
||
|
|
emitter.emitted[event] = true
|
||
|
|
emitter.emit(event, code, signal)
|
||
|
|
}
|
||
|
|
|
||
|
|
// { <signal>: <listener fn>, ... }
|
||
|
|
var sigListeners = {}
|
||
|
|
signals.forEach(function (sig) {
|
||
|
|
sigListeners[sig] = function listener () {
|
||
|
|
// If there are no other listeners, an exit is coming!
|
||
|
|
// Simplest way: remove us and then re-send the signal.
|
||
|
|
// We know that this will kill the process, so we can
|
||
|
|
// safely emit now.
|
||
|
|
var listeners = process.listeners(sig)
|
||
|
|
if (listeners.length === emitter.count) {
|
||
|
|
unload()
|
||
|
|
emit('exit', null, sig)
|
||
|
|
/* istanbul ignore next */
|
||
|
|
emit('afterexit', null, sig)
|
||
|
|
/* istanbul ignore next */
|
||
|
|
if (isWin && sig === 'SIGHUP') {
|
||
|
|
// "SIGHUP" throws an `ENOSYS` error on Windows,
|
||
|
|
// so use a supported signal instead
|
||
|
|
sig = 'SIGINT'
|
||
|
|
}
|
||
|
|
process.kill(process.pid, sig)
|
||
|
|
}
|
||
|
|
}
|
||
|
|
})
|
||
|
|
|
||
|
|
module.exports.signals = function () {
|
||
|
|
return signals
|
||
|
|
}
|
||
|
|
|
||
|
|
module.exports.load = load
|
||
|
|
|
||
|
|
var loaded = false
|
||
|
|
|
||
|
|
function load () {
|
||
|
|
if (loaded) {
|
||
|
|
return
|
||
|
|
}
|
||
|
|
loaded = true
|
||
|
|
|
||
|
|
// This is the number of onSignalExit's that are in play.
|
||
|
|
// It's important so that we can count the correct number of
|
||
|
|
// listeners on signals, and don't wait for the other one to
|
||
|
|
// handle it instead of us.
|
||
|
|
emitter.count += 1
|
||
|
|
|
||
|
|
signals = signals.filter(function (sig) {
|
||
|
|
try {
|
||
|
|
process.on(sig, sigListeners[sig])
|
||
|
|
return true
|
||
|
|
} catch (er) {
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
})
|
||
|
|
|
||
|
|
process.emit = processEmit
|
||
|
|
process.reallyExit = processReallyExit
|
||
|
|
}
|
||
|
|
|
||
|
|
var originalProcessReallyExit = process.reallyExit
|
||
|
|
function processReallyExit (code) {
|
||
|
|
process.exitCode = code || 0
|
||
|
|
emit('exit', process.exitCode, null)
|
||
|
|
/* istanbul ignore next */
|
||
|
|
emit('afterexit', process.exitCode, null)
|
||
|
|
/* istanbul ignore next */
|
||
|
|
originalProcessReallyExit.call(process, process.exitCode)
|
||
|
|
}
|
||
|
|
|
||
|
|
var originalProcessEmit = process.emit
|
||
|
|
function processEmit (ev, arg) {
|
||
|
|
if (ev === 'exit') {
|
||
|
|
if (arg !== undefined) {
|
||
|
|
process.exitCode = arg
|
||
|
|
}
|
||
|
|
var ret = originalProcessEmit.apply(this, arguments)
|
||
|
|
emit('exit', process.exitCode, null)
|
||
|
|
/* istanbul ignore next */
|
||
|
|
emit('afterexit', process.exitCode, null)
|
||
|
|
return ret
|
||
|
|
} else {
|
||
|
|
return originalProcessEmit.apply(this, arguments)
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 280:
|
||
|
|
/***/ (function(module, exports) {
|
||
|
|
|
||
|
|
exports = module.exports = SemVer
|
||
|
|
|
||
|
|
var debug
|
||
|
|
/* istanbul ignore next */
|
||
|
|
if (typeof process === 'object' &&
|
||
|
|
process.env &&
|
||
|
|
process.env.NODE_DEBUG &&
|
||
|
|
/\bsemver\b/i.test(process.env.NODE_DEBUG)) {
|
||
|
|
debug = function () {
|
||
|
|
var args = Array.prototype.slice.call(arguments, 0)
|
||
|
|
args.unshift('SEMVER')
|
||
|
|
console.log.apply(console, args)
|
||
|
|
}
|
||
|
|
} else {
|
||
|
|
debug = function () {}
|
||
|
|
}
|
||
|
|
|
||
|
|
// Note: this is the semver.org version of the spec that it implements
|
||
|
|
// Not necessarily the package version of this code.
|
||
|
|
exports.SEMVER_SPEC_VERSION = '2.0.0'
|
||
|
|
|
||
|
|
var MAX_LENGTH = 256
|
||
|
|
var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
||
|
|
/* istanbul ignore next */ 9007199254740991
|
||
|
|
|
||
|
|
// Max safe segment length for coercion.
|
||
|
|
var MAX_SAFE_COMPONENT_LENGTH = 16
|
||
|
|
|
||
|
|
// The actual regexps go on exports.re
|
||
|
|
var re = exports.re = []
|
||
|
|
var src = exports.src = []
|
||
|
|
var R = 0
|
||
|
|
|
||
|
|
// The following Regular Expressions can be used for tokenizing,
|
||
|
|
// validating, and parsing SemVer version strings.
|
||
|
|
|
||
|
|
// ## Numeric Identifier
|
||
|
|
// A single `0`, or a non-zero digit followed by zero or more digits.
|
||
|
|
|
||
|
|
var NUMERICIDENTIFIER = R++
|
||
|
|
src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
||
|
|
var NUMERICIDENTIFIERLOOSE = R++
|
||
|
|
src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
||
|
|
|
||
|
|
// ## Non-numeric Identifier
|
||
|
|
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
||
|
|
// more letters, digits, or hyphens.
|
||
|
|
|
||
|
|
var NONNUMERICIDENTIFIER = R++
|
||
|
|
src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
||
|
|
|
||
|
|
// ## Main Version
|
||
|
|
// Three dot-separated numeric identifiers.
|
||
|
|
|
||
|
|
var MAINVERSION = R++
|
||
|
|
src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' +
|
||
|
|
'(' + src[NUMERICIDENTIFIER] + ')\\.' +
|
||
|
|
'(' + src[NUMERICIDENTIFIER] + ')'
|
||
|
|
|
||
|
|
var MAINVERSIONLOOSE = R++
|
||
|
|
src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
|
||
|
|
'(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
|
||
|
|
'(' + src[NUMERICIDENTIFIERLOOSE] + ')'
|
||
|
|
|
||
|
|
// ## Pre-release Version Identifier
|
||
|
|
// A numeric identifier, or a non-numeric identifier.
|
||
|
|
|
||
|
|
var PRERELEASEIDENTIFIER = R++
|
||
|
|
src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] +
|
||
|
|
'|' + src[NONNUMERICIDENTIFIER] + ')'
|
||
|
|
|
||
|
|
var PRERELEASEIDENTIFIERLOOSE = R++
|
||
|
|
src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] +
|
||
|
|
'|' + src[NONNUMERICIDENTIFIER] + ')'
|
||
|
|
|
||
|
|
// ## Pre-release Version
|
||
|
|
// Hyphen, followed by one or more dot-separated pre-release version
|
||
|
|
// identifiers.
|
||
|
|
|
||
|
|
var PRERELEASE = R++
|
||
|
|
src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] +
|
||
|
|
'(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'
|
||
|
|
|
||
|
|
var PRERELEASELOOSE = R++
|
||
|
|
src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] +
|
||
|
|
'(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'
|
||
|
|
|
||
|
|
// ## Build Metadata Identifier
|
||
|
|
// Any combination of digits, letters, or hyphens.
|
||
|
|
|
||
|
|
var BUILDIDENTIFIER = R++
|
||
|
|
src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
||
|
|
|
||
|
|
// ## Build Metadata
|
||
|
|
// Plus sign, followed by one or more period-separated build metadata
|
||
|
|
// identifiers.
|
||
|
|
|
||
|
|
var BUILD = R++
|
||
|
|
src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] +
|
||
|
|
'(?:\\.' + src[BUILDIDENTIFIER] + ')*))'
|
||
|
|
|
||
|
|
// ## Full Version String
|
||
|
|
// A main version, followed optionally by a pre-release version and
|
||
|
|
// build metadata.
|
||
|
|
|
||
|
|
// Note that the only major, minor, patch, and pre-release sections of
|
||
|
|
// the version string are capturing groups. The build metadata is not a
|
||
|
|
// capturing group, because it should not ever be used in version
|
||
|
|
// comparison.
|
||
|
|
|
||
|
|
var FULL = R++
|
||
|
|
var FULLPLAIN = 'v?' + src[MAINVERSION] +
|
||
|
|
src[PRERELEASE] + '?' +
|
||
|
|
src[BUILD] + '?'
|
||
|
|
|
||
|
|
src[FULL] = '^' + FULLPLAIN + '$'
|
||
|
|
|
||
|
|
// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
|
||
|
|
// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
|
||
|
|
// common in the npm registry.
|
||
|
|
var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] +
|
||
|
|
src[PRERELEASELOOSE] + '?' +
|
||
|
|
src[BUILD] + '?'
|
||
|
|
|
||
|
|
var LOOSE = R++
|
||
|
|
src[LOOSE] = '^' + LOOSEPLAIN + '$'
|
||
|
|
|
||
|
|
var GTLT = R++
|
||
|
|
src[GTLT] = '((?:<|>)?=?)'
|
||
|
|
|
||
|
|
// Something like "2.*" or "1.2.x".
|
||
|
|
// Note that "x.x" is a valid xRange identifer, meaning "any version"
|
||
|
|
// Only the first item is strictly required.
|
||
|
|
var XRANGEIDENTIFIERLOOSE = R++
|
||
|
|
src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'
|
||
|
|
var XRANGEIDENTIFIER = R++
|
||
|
|
src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'
|
||
|
|
|
||
|
|
var XRANGEPLAIN = R++
|
||
|
|
src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' +
|
||
|
|
'(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
|
||
|
|
'(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
|
||
|
|
'(?:' + src[PRERELEASE] + ')?' +
|
||
|
|
src[BUILD] + '?' +
|
||
|
|
')?)?'
|
||
|
|
|
||
|
|
var XRANGEPLAINLOOSE = R++
|
||
|
|
src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
|
||
|
|
'(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
|
||
|
|
'(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
|
||
|
|
'(?:' + src[PRERELEASELOOSE] + ')?' +
|
||
|
|
src[BUILD] + '?' +
|
||
|
|
')?)?'
|
||
|
|
|
||
|
|
var XRANGE = R++
|
||
|
|
src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'
|
||
|
|
var XRANGELOOSE = R++
|
||
|
|
src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'
|
||
|
|
|
||
|
|
// Coercion.
|
||
|
|
// Extract anything that could conceivably be a part of a valid semver
|
||
|
|
var COERCE = R++
|
||
|
|
src[COERCE] = '(?:^|[^\\d])' +
|
||
|
|
'(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +
|
||
|
|
'(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
|
||
|
|
'(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
|
||
|
|
'(?:$|[^\\d])'
|
||
|
|
|
||
|
|
// Tilde ranges.
|
||
|
|
// Meaning is "reasonably at or greater than"
|
||
|
|
var LONETILDE = R++
|
||
|
|
src[LONETILDE] = '(?:~>?)'
|
||
|
|
|
||
|
|
var TILDETRIM = R++
|
||
|
|
src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'
|
||
|
|
re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g')
|
||
|
|
var tildeTrimReplace = '$1~'
|
||
|
|
|
||
|
|
var TILDE = R++
|
||
|
|
src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'
|
||
|
|
var TILDELOOSE = R++
|
||
|
|
src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'
|
||
|
|
|
||
|
|
// Caret ranges.
|
||
|
|
// Meaning is "at least and backwards compatible with"
|
||
|
|
var LONECARET = R++
|
||
|
|
src[LONECARET] = '(?:\\^)'
|
||
|
|
|
||
|
|
var CARETTRIM = R++
|
||
|
|
src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'
|
||
|
|
re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g')
|
||
|
|
var caretTrimReplace = '$1^'
|
||
|
|
|
||
|
|
var CARET = R++
|
||
|
|
src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'
|
||
|
|
var CARETLOOSE = R++
|
||
|
|
src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'
|
||
|
|
|
||
|
|
// A simple gt/lt/eq thing, or just "" to indicate "any version"
|
||
|
|
var COMPARATORLOOSE = R++
|
||
|
|
src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'
|
||
|
|
var COMPARATOR = R++
|
||
|
|
src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'
|
||
|
|
|
||
|
|
// An expression to strip any whitespace between the gtlt and the thing
|
||
|
|
// it modifies, so that `> 1.2.3` ==> `>1.2.3`
|
||
|
|
var COMPARATORTRIM = R++
|
||
|
|
src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] +
|
||
|
|
'\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'
|
||
|
|
|
||
|
|
// this one has to use the /g flag
|
||
|
|
re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g')
|
||
|
|
var comparatorTrimReplace = '$1$2$3'
|
||
|
|
|
||
|
|
// Something like `1.2.3 - 1.2.4`
|
||
|
|
// Note that these all use the loose form, because they'll be
|
||
|
|
// checked against either the strict or loose comparator form
|
||
|
|
// later.
|
||
|
|
var HYPHENRANGE = R++
|
||
|
|
src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' +
|
||
|
|
'\\s+-\\s+' +
|
||
|
|
'(' + src[XRANGEPLAIN] + ')' +
|
||
|
|
'\\s*$'
|
||
|
|
|
||
|
|
var HYPHENRANGELOOSE = R++
|
||
|
|
src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' +
|
||
|
|
'\\s+-\\s+' +
|
||
|
|
'(' + src[XRANGEPLAINLOOSE] + ')' +
|
||
|
|
'\\s*$'
|
||
|
|
|
||
|
|
// Star ranges basically just allow anything at all.
|
||
|
|
var STAR = R++
|
||
|
|
src[STAR] = '(<|>)?=?\\s*\\*'
|
||
|
|
|
||
|
|
// Compile to actual regexp objects.
|
||
|
|
// All are flag-free, unless they were created above with a flag.
|
||
|
|
for (var i = 0; i < R; i++) {
|
||
|
|
debug(i, src[i])
|
||
|
|
if (!re[i]) {
|
||
|
|
re[i] = new RegExp(src[i])
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.parse = parse
|
||
|
|
function parse (version, options) {
|
||
|
|
if (!options || typeof options !== 'object') {
|
||
|
|
options = {
|
||
|
|
loose: !!options,
|
||
|
|
includePrerelease: false
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
if (version instanceof SemVer) {
|
||
|
|
return version
|
||
|
|
}
|
||
|
|
|
||
|
|
if (typeof version !== 'string') {
|
||
|
|
return null
|
||
|
|
}
|
||
|
|
|
||
|
|
if (version.length > MAX_LENGTH) {
|
||
|
|
return null
|
||
|
|
}
|
||
|
|
|
||
|
|
var r = options.loose ? re[LOOSE] : re[FULL]
|
||
|
|
if (!r.test(version)) {
|
||
|
|
return null
|
||
|
|
}
|
||
|
|
|
||
|
|
try {
|
||
|
|
return new SemVer(version, options)
|
||
|
|
} catch (er) {
|
||
|
|
return null
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.valid = valid
|
||
|
|
function valid (version, options) {
|
||
|
|
var v = parse(version, options)
|
||
|
|
return v ? v.version : null
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.clean = clean
|
||
|
|
function clean (version, options) {
|
||
|
|
var s = parse(version.trim().replace(/^[=v]+/, ''), options)
|
||
|
|
return s ? s.version : null
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.SemVer = SemVer
|
||
|
|
|
||
|
|
function SemVer (version, options) {
|
||
|
|
if (!options || typeof options !== 'object') {
|
||
|
|
options = {
|
||
|
|
loose: !!options,
|
||
|
|
includePrerelease: false
|
||
|
|
}
|
||
|
|
}
|
||
|
|
if (version instanceof SemVer) {
|
||
|
|
if (version.loose === options.loose) {
|
||
|
|
return version
|
||
|
|
} else {
|
||
|
|
version = version.version
|
||
|
|
}
|
||
|
|
} else if (typeof version !== 'string') {
|
||
|
|
throw new TypeError('Invalid Version: ' + version)
|
||
|
|
}
|
||
|
|
|
||
|
|
if (version.length > MAX_LENGTH) {
|
||
|
|
throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
|
||
|
|
}
|
||
|
|
|
||
|
|
if (!(this instanceof SemVer)) {
|
||
|
|
return new SemVer(version, options)
|
||
|
|
}
|
||
|
|
|
||
|
|
debug('SemVer', version, options)
|
||
|
|
this.options = options
|
||
|
|
this.loose = !!options.loose
|
||
|
|
|
||
|
|
var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL])
|
||
|
|
|
||
|
|
if (!m) {
|
||
|
|
throw new TypeError('Invalid Version: ' + version)
|
||
|
|
}
|
||
|
|
|
||
|
|
this.raw = version
|
||
|
|
|
||
|
|
// these are actually numbers
|
||
|
|
this.major = +m[1]
|
||
|
|
this.minor = +m[2]
|
||
|
|
this.patch = +m[3]
|
||
|
|
|
||
|
|
if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
|
||
|
|
throw new TypeError('Invalid major version')
|
||
|
|
}
|
||
|
|
|
||
|
|
if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
|
||
|
|
throw new TypeError('Invalid minor version')
|
||
|
|
}
|
||
|
|
|
||
|
|
if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
|
||
|
|
throw new TypeError('Invalid patch version')
|
||
|
|
}
|
||
|
|
|
||
|
|
// numberify any prerelease numeric ids
|
||
|
|
if (!m[4]) {
|
||
|
|
this.prerelease = []
|
||
|
|
} else {
|
||
|
|
this.prerelease = m[4].split('.').map(function (id) {
|
||
|
|
if (/^[0-9]+$/.test(id)) {
|
||
|
|
var num = +id
|
||
|
|
if (num >= 0 && num < MAX_SAFE_INTEGER) {
|
||
|
|
return num
|
||
|
|
}
|
||
|
|
}
|
||
|
|
return id
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
this.build = m[5] ? m[5].split('.') : []
|
||
|
|
this.format()
|
||
|
|
}
|
||
|
|
|
||
|
|
SemVer.prototype.format = function () {
|
||
|
|
this.version = this.major + '.' + this.minor + '.' + this.patch
|
||
|
|
if (this.prerelease.length) {
|
||
|
|
this.version += '-' + this.prerelease.join('.')
|
||
|
|
}
|
||
|
|
return this.version
|
||
|
|
}
|
||
|
|
|
||
|
|
SemVer.prototype.toString = function () {
|
||
|
|
return this.version
|
||
|
|
}
|
||
|
|
|
||
|
|
SemVer.prototype.compare = function (other) {
|
||
|
|
debug('SemVer.compare', this.version, this.options, other)
|
||
|
|
if (!(other instanceof SemVer)) {
|
||
|
|
other = new SemVer(other, this.options)
|
||
|
|
}
|
||
|
|
|
||
|
|
return this.compareMain(other) || this.comparePre(other)
|
||
|
|
}
|
||
|
|
|
||
|
|
SemVer.prototype.compareMain = function (other) {
|
||
|
|
if (!(other instanceof SemVer)) {
|
||
|
|
other = new SemVer(other, this.options)
|
||
|
|
}
|
||
|
|
|
||
|
|
return compareIdentifiers(this.major, other.major) ||
|
||
|
|
compareIdentifiers(this.minor, other.minor) ||
|
||
|
|
compareIdentifiers(this.patch, other.patch)
|
||
|
|
}
|
||
|
|
|
||
|
|
SemVer.prototype.comparePre = function (other) {
|
||
|
|
if (!(other instanceof SemVer)) {
|
||
|
|
other = new SemVer(other, this.options)
|
||
|
|
}
|
||
|
|
|
||
|
|
// NOT having a prerelease is > having one
|
||
|
|
if (this.prerelease.length && !other.prerelease.length) {
|
||
|
|
return -1
|
||
|
|
} else if (!this.prerelease.length && other.prerelease.length) {
|
||
|
|
return 1
|
||
|
|
} else if (!this.prerelease.length && !other.prerelease.length) {
|
||
|
|
return 0
|
||
|
|
}
|
||
|
|
|
||
|
|
var i = 0
|
||
|
|
do {
|
||
|
|
var a = this.prerelease[i]
|
||
|
|
var b = other.prerelease[i]
|
||
|
|
debug('prerelease compare', i, a, b)
|
||
|
|
if (a === undefined && b === undefined) {
|
||
|
|
return 0
|
||
|
|
} else if (b === undefined) {
|
||
|
|
return 1
|
||
|
|
} else if (a === undefined) {
|
||
|
|
return -1
|
||
|
|
} else if (a === b) {
|
||
|
|
continue
|
||
|
|
} else {
|
||
|
|
return compareIdentifiers(a, b)
|
||
|
|
}
|
||
|
|
} while (++i)
|
||
|
|
}
|
||
|
|
|
||
|
|
// preminor will bump the version up to the next minor release, and immediately
|
||
|
|
// down to pre-release. premajor and prepatch work the same way.
|
||
|
|
SemVer.prototype.inc = function (release, identifier) {
|
||
|
|
switch (release) {
|
||
|
|
case 'premajor':
|
||
|
|
this.prerelease.length = 0
|
||
|
|
this.patch = 0
|
||
|
|
this.minor = 0
|
||
|
|
this.major++
|
||
|
|
this.inc('pre', identifier)
|
||
|
|
break
|
||
|
|
case 'preminor':
|
||
|
|
this.prerelease.length = 0
|
||
|
|
this.patch = 0
|
||
|
|
this.minor++
|
||
|
|
this.inc('pre', identifier)
|
||
|
|
break
|
||
|
|
case 'prepatch':
|
||
|
|
// If this is already a prerelease, it will bump to the next version
|
||
|
|
// drop any prereleases that might already exist, since they are not
|
||
|
|
// relevant at this point.
|
||
|
|
this.prerelease.length = 0
|
||
|
|
this.inc('patch', identifier)
|
||
|
|
this.inc('pre', identifier)
|
||
|
|
break
|
||
|
|
// If the input is a non-prerelease version, this acts the same as
|
||
|
|
// prepatch.
|
||
|
|
case 'prerelease':
|
||
|
|
if (this.prerelease.length === 0) {
|
||
|
|
this.inc('patch', identifier)
|
||
|
|
}
|
||
|
|
this.inc('pre', identifier)
|
||
|
|
break
|
||
|
|
|
||
|
|
case 'major':
|
||
|
|
// If this is a pre-major version, bump up to the same major version.
|
||
|
|
// Otherwise increment major.
|
||
|
|
// 1.0.0-5 bumps to 1.0.0
|
||
|
|
// 1.1.0 bumps to 2.0.0
|
||
|
|
if (this.minor !== 0 ||
|
||
|
|
this.patch !== 0 ||
|
||
|
|
this.prerelease.length === 0) {
|
||
|
|
this.major++
|
||
|
|
}
|
||
|
|
this.minor = 0
|
||
|
|
this.patch = 0
|
||
|
|
this.prerelease = []
|
||
|
|
break
|
||
|
|
case 'minor':
|
||
|
|
// If this is a pre-minor version, bump up to the same minor version.
|
||
|
|
// Otherwise increment minor.
|
||
|
|
// 1.2.0-5 bumps to 1.2.0
|
||
|
|
// 1.2.1 bumps to 1.3.0
|
||
|
|
if (this.patch !== 0 || this.prerelease.length === 0) {
|
||
|
|
this.minor++
|
||
|
|
}
|
||
|
|
this.patch = 0
|
||
|
|
this.prerelease = []
|
||
|
|
break
|
||
|
|
case 'patch':
|
||
|
|
// If this is not a pre-release version, it will increment the patch.
|
||
|
|
// If it is a pre-release it will bump up to the same patch version.
|
||
|
|
// 1.2.0-5 patches to 1.2.0
|
||
|
|
// 1.2.0 patches to 1.2.1
|
||
|
|
if (this.prerelease.length === 0) {
|
||
|
|
this.patch++
|
||
|
|
}
|
||
|
|
this.prerelease = []
|
||
|
|
break
|
||
|
|
// This probably shouldn't be used publicly.
|
||
|
|
// 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
|
||
|
|
case 'pre':
|
||
|
|
if (this.prerelease.length === 0) {
|
||
|
|
this.prerelease = [0]
|
||
|
|
} else {
|
||
|
|
var i = this.prerelease.length
|
||
|
|
while (--i >= 0) {
|
||
|
|
if (typeof this.prerelease[i] === 'number') {
|
||
|
|
this.prerelease[i]++
|
||
|
|
i = -2
|
||
|
|
}
|
||
|
|
}
|
||
|
|
if (i === -1) {
|
||
|
|
// didn't increment anything
|
||
|
|
this.prerelease.push(0)
|
||
|
|
}
|
||
|
|
}
|
||
|
|
if (identifier) {
|
||
|
|
// 1.2.0-beta.1 bumps to 1.2.0-beta.2,
|
||
|
|
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
|
||
|
|
if (this.prerelease[0] === identifier) {
|
||
|
|
if (isNaN(this.prerelease[1])) {
|
||
|
|
this.prerelease = [identifier, 0]
|
||
|
|
}
|
||
|
|
} else {
|
||
|
|
this.prerelease = [identifier, 0]
|
||
|
|
}
|
||
|
|
}
|
||
|
|
break
|
||
|
|
|
||
|
|
default:
|
||
|
|
throw new Error('invalid increment argument: ' + release)
|
||
|
|
}
|
||
|
|
this.format()
|
||
|
|
this.raw = this.version
|
||
|
|
return this
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.inc = inc
|
||
|
|
function inc (version, release, loose, identifier) {
|
||
|
|
if (typeof (loose) === 'string') {
|
||
|
|
identifier = loose
|
||
|
|
loose = undefined
|
||
|
|
}
|
||
|
|
|
||
|
|
try {
|
||
|
|
return new SemVer(version, loose).inc(release, identifier).version
|
||
|
|
} catch (er) {
|
||
|
|
return null
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.diff = diff
|
||
|
|
function diff (version1, version2) {
|
||
|
|
if (eq(version1, version2)) {
|
||
|
|
return null
|
||
|
|
} else {
|
||
|
|
var v1 = parse(version1)
|
||
|
|
var v2 = parse(version2)
|
||
|
|
var prefix = ''
|
||
|
|
if (v1.prerelease.length || v2.prerelease.length) {
|
||
|
|
prefix = 'pre'
|
||
|
|
var defaultResult = 'prerelease'
|
||
|
|
}
|
||
|
|
for (var key in v1) {
|
||
|
|
if (key === 'major' || key === 'minor' || key === 'patch') {
|
||
|
|
if (v1[key] !== v2[key]) {
|
||
|
|
return prefix + key
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
return defaultResult // may be undefined
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.compareIdentifiers = compareIdentifiers
|
||
|
|
|
||
|
|
var numeric = /^[0-9]+$/
|
||
|
|
function compareIdentifiers (a, b) {
|
||
|
|
var anum = numeric.test(a)
|
||
|
|
var bnum = numeric.test(b)
|
||
|
|
|
||
|
|
if (anum && bnum) {
|
||
|
|
a = +a
|
||
|
|
b = +b
|
||
|
|
}
|
||
|
|
|
||
|
|
return a === b ? 0
|
||
|
|
: (anum && !bnum) ? -1
|
||
|
|
: (bnum && !anum) ? 1
|
||
|
|
: a < b ? -1
|
||
|
|
: 1
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.rcompareIdentifiers = rcompareIdentifiers
|
||
|
|
function rcompareIdentifiers (a, b) {
|
||
|
|
return compareIdentifiers(b, a)
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.major = major
|
||
|
|
function major (a, loose) {
|
||
|
|
return new SemVer(a, loose).major
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.minor = minor
|
||
|
|
function minor (a, loose) {
|
||
|
|
return new SemVer(a, loose).minor
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.patch = patch
|
||
|
|
function patch (a, loose) {
|
||
|
|
return new SemVer(a, loose).patch
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.compare = compare
|
||
|
|
function compare (a, b, loose) {
|
||
|
|
return new SemVer(a, loose).compare(new SemVer(b, loose))
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.compareLoose = compareLoose
|
||
|
|
function compareLoose (a, b) {
|
||
|
|
return compare(a, b, true)
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.rcompare = rcompare
|
||
|
|
function rcompare (a, b, loose) {
|
||
|
|
return compare(b, a, loose)
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.sort = sort
|
||
|
|
function sort (list, loose) {
|
||
|
|
return list.sort(function (a, b) {
|
||
|
|
return exports.compare(a, b, loose)
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.rsort = rsort
|
||
|
|
function rsort (list, loose) {
|
||
|
|
return list.sort(function (a, b) {
|
||
|
|
return exports.rcompare(a, b, loose)
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.gt = gt
|
||
|
|
function gt (a, b, loose) {
|
||
|
|
return compare(a, b, loose) > 0
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.lt = lt
|
||
|
|
function lt (a, b, loose) {
|
||
|
|
return compare(a, b, loose) < 0
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.eq = eq
|
||
|
|
function eq (a, b, loose) {
|
||
|
|
return compare(a, b, loose) === 0
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.neq = neq
|
||
|
|
function neq (a, b, loose) {
|
||
|
|
return compare(a, b, loose) !== 0
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.gte = gte
|
||
|
|
function gte (a, b, loose) {
|
||
|
|
return compare(a, b, loose) >= 0
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.lte = lte
|
||
|
|
function lte (a, b, loose) {
|
||
|
|
return compare(a, b, loose) <= 0
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.cmp = cmp
|
||
|
|
function cmp (a, op, b, loose) {
|
||
|
|
switch (op) {
|
||
|
|
case '===':
|
||
|
|
if (typeof a === 'object')
|
||
|
|
a = a.version
|
||
|
|
if (typeof b === 'object')
|
||
|
|
b = b.version
|
||
|
|
return a === b
|
||
|
|
|
||
|
|
case '!==':
|
||
|
|
if (typeof a === 'object')
|
||
|
|
a = a.version
|
||
|
|
if (typeof b === 'object')
|
||
|
|
b = b.version
|
||
|
|
return a !== b
|
||
|
|
|
||
|
|
case '':
|
||
|
|
case '=':
|
||
|
|
case '==':
|
||
|
|
return eq(a, b, loose)
|
||
|
|
|
||
|
|
case '!=':
|
||
|
|
return neq(a, b, loose)
|
||
|
|
|
||
|
|
case '>':
|
||
|
|
return gt(a, b, loose)
|
||
|
|
|
||
|
|
case '>=':
|
||
|
|
return gte(a, b, loose)
|
||
|
|
|
||
|
|
case '<':
|
||
|
|
return lt(a, b, loose)
|
||
|
|
|
||
|
|
case '<=':
|
||
|
|
return lte(a, b, loose)
|
||
|
|
|
||
|
|
default:
|
||
|
|
throw new TypeError('Invalid operator: ' + op)
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.Comparator = Comparator
|
||
|
|
function Comparator (comp, options) {
|
||
|
|
if (!options || typeof options !== 'object') {
|
||
|
|
options = {
|
||
|
|
loose: !!options,
|
||
|
|
includePrerelease: false
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
if (comp instanceof Comparator) {
|
||
|
|
if (comp.loose === !!options.loose) {
|
||
|
|
return comp
|
||
|
|
} else {
|
||
|
|
comp = comp.value
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
if (!(this instanceof Comparator)) {
|
||
|
|
return new Comparator(comp, options)
|
||
|
|
}
|
||
|
|
|
||
|
|
debug('comparator', comp, options)
|
||
|
|
this.options = options
|
||
|
|
this.loose = !!options.loose
|
||
|
|
this.parse(comp)
|
||
|
|
|
||
|
|
if (this.semver === ANY) {
|
||
|
|
this.value = ''
|
||
|
|
} else {
|
||
|
|
this.value = this.operator + this.semver.version
|
||
|
|
}
|
||
|
|
|
||
|
|
debug('comp', this)
|
||
|
|
}
|
||
|
|
|
||
|
|
var ANY = {}
|
||
|
|
Comparator.prototype.parse = function (comp) {
|
||
|
|
var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
|
||
|
|
var m = comp.match(r)
|
||
|
|
|
||
|
|
if (!m) {
|
||
|
|
throw new TypeError('Invalid comparator: ' + comp)
|
||
|
|
}
|
||
|
|
|
||
|
|
this.operator = m[1]
|
||
|
|
if (this.operator === '=') {
|
||
|
|
this.operator = ''
|
||
|
|
}
|
||
|
|
|
||
|
|
// if it literally is just '>' or '' then allow anything.
|
||
|
|
if (!m[2]) {
|
||
|
|
this.semver = ANY
|
||
|
|
} else {
|
||
|
|
this.semver = new SemVer(m[2], this.options.loose)
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
Comparator.prototype.toString = function () {
|
||
|
|
return this.value
|
||
|
|
}
|
||
|
|
|
||
|
|
Comparator.prototype.test = function (version) {
|
||
|
|
debug('Comparator.test', version, this.options.loose)
|
||
|
|
|
||
|
|
if (this.semver === ANY) {
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
|
||
|
|
if (typeof version === 'string') {
|
||
|
|
version = new SemVer(version, this.options)
|
||
|
|
}
|
||
|
|
|
||
|
|
return cmp(version, this.operator, this.semver, this.options)
|
||
|
|
}
|
||
|
|
|
||
|
|
Comparator.prototype.intersects = function (comp, options) {
|
||
|
|
if (!(comp instanceof Comparator)) {
|
||
|
|
throw new TypeError('a Comparator is required')
|
||
|
|
}
|
||
|
|
|
||
|
|
if (!options || typeof options !== 'object') {
|
||
|
|
options = {
|
||
|
|
loose: !!options,
|
||
|
|
includePrerelease: false
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
var rangeTmp
|
||
|
|
|
||
|
|
if (this.operator === '') {
|
||
|
|
rangeTmp = new Range(comp.value, options)
|
||
|
|
return satisfies(this.value, rangeTmp, options)
|
||
|
|
} else if (comp.operator === '') {
|
||
|
|
rangeTmp = new Range(this.value, options)
|
||
|
|
return satisfies(comp.semver, rangeTmp, options)
|
||
|
|
}
|
||
|
|
|
||
|
|
var sameDirectionIncreasing =
|
||
|
|
(this.operator === '>=' || this.operator === '>') &&
|
||
|
|
(comp.operator === '>=' || comp.operator === '>')
|
||
|
|
var sameDirectionDecreasing =
|
||
|
|
(this.operator === '<=' || this.operator === '<') &&
|
||
|
|
(comp.operator === '<=' || comp.operator === '<')
|
||
|
|
var sameSemVer = this.semver.version === comp.semver.version
|
||
|
|
var differentDirectionsInclusive =
|
||
|
|
(this.operator === '>=' || this.operator === '<=') &&
|
||
|
|
(comp.operator === '>=' || comp.operator === '<=')
|
||
|
|
var oppositeDirectionsLessThan =
|
||
|
|
cmp(this.semver, '<', comp.semver, options) &&
|
||
|
|
((this.operator === '>=' || this.operator === '>') &&
|
||
|
|
(comp.operator === '<=' || comp.operator === '<'))
|
||
|
|
var oppositeDirectionsGreaterThan =
|
||
|
|
cmp(this.semver, '>', comp.semver, options) &&
|
||
|
|
((this.operator === '<=' || this.operator === '<') &&
|
||
|
|
(comp.operator === '>=' || comp.operator === '>'))
|
||
|
|
|
||
|
|
return sameDirectionIncreasing || sameDirectionDecreasing ||
|
||
|
|
(sameSemVer && differentDirectionsInclusive) ||
|
||
|
|
oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.Range = Range
|
||
|
|
function Range (range, options) {
|
||
|
|
if (!options || typeof options !== 'object') {
|
||
|
|
options = {
|
||
|
|
loose: !!options,
|
||
|
|
includePrerelease: false
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
if (range instanceof Range) {
|
||
|
|
if (range.loose === !!options.loose &&
|
||
|
|
range.includePrerelease === !!options.includePrerelease) {
|
||
|
|
return range
|
||
|
|
} else {
|
||
|
|
return new Range(range.raw, options)
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
if (range instanceof Comparator) {
|
||
|
|
return new Range(range.value, options)
|
||
|
|
}
|
||
|
|
|
||
|
|
if (!(this instanceof Range)) {
|
||
|
|
return new Range(range, options)
|
||
|
|
}
|
||
|
|
|
||
|
|
this.options = options
|
||
|
|
this.loose = !!options.loose
|
||
|
|
this.includePrerelease = !!options.includePrerelease
|
||
|
|
|
||
|
|
// First, split based on boolean or ||
|
||
|
|
this.raw = range
|
||
|
|
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
||
|
|
return this.parseRange(range.trim())
|
||
|
|
}, this).filter(function (c) {
|
||
|
|
// throw out any that are not relevant for whatever reason
|
||
|
|
return c.length
|
||
|
|
})
|
||
|
|
|
||
|
|
if (!this.set.length) {
|
||
|
|
throw new TypeError('Invalid SemVer Range: ' + range)
|
||
|
|
}
|
||
|
|
|
||
|
|
this.format()
|
||
|
|
}
|
||
|
|
|
||
|
|
Range.prototype.format = function () {
|
||
|
|
this.range = this.set.map(function (comps) {
|
||
|
|
return comps.join(' ').trim()
|
||
|
|
}).join('||').trim()
|
||
|
|
return this.range
|
||
|
|
}
|
||
|
|
|
||
|
|
Range.prototype.toString = function () {
|
||
|
|
return this.range
|
||
|
|
}
|
||
|
|
|
||
|
|
Range.prototype.parseRange = function (range) {
|
||
|
|
var loose = this.options.loose
|
||
|
|
range = range.trim()
|
||
|
|
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
||
|
|
var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]
|
||
|
|
range = range.replace(hr, hyphenReplace)
|
||
|
|
debug('hyphen replace', range)
|
||
|
|
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
||
|
|
range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace)
|
||
|
|
debug('comparator trim', range, re[COMPARATORTRIM])
|
||
|
|
|
||
|
|
// `~ 1.2.3` => `~1.2.3`
|
||
|
|
range = range.replace(re[TILDETRIM], tildeTrimReplace)
|
||
|
|
|
||
|
|
// `^ 1.2.3` => `^1.2.3`
|
||
|
|
range = range.replace(re[CARETTRIM], caretTrimReplace)
|
||
|
|
|
||
|
|
// normalize spaces
|
||
|
|
range = range.split(/\s+/).join(' ')
|
||
|
|
|
||
|
|
// At this point, the range is completely trimmed and
|
||
|
|
// ready to be split into comparators.
|
||
|
|
|
||
|
|
var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
|
||
|
|
var set = range.split(' ').map(function (comp) {
|
||
|
|
return parseComparator(comp, this.options)
|
||
|
|
}, this).join(' ').split(/\s+/)
|
||
|
|
if (this.options.loose) {
|
||
|
|
// in loose mode, throw out any that are not valid comparators
|
||
|
|
set = set.filter(function (comp) {
|
||
|
|
return !!comp.match(compRe)
|
||
|
|
})
|
||
|
|
}
|
||
|
|
set = set.map(function (comp) {
|
||
|
|
return new Comparator(comp, this.options)
|
||
|
|
}, this)
|
||
|
|
|
||
|
|
return set
|
||
|
|
}
|
||
|
|
|
||
|
|
Range.prototype.intersects = function (range, options) {
|
||
|
|
if (!(range instanceof Range)) {
|
||
|
|
throw new TypeError('a Range is required')
|
||
|
|
}
|
||
|
|
|
||
|
|
return this.set.some(function (thisComparators) {
|
||
|
|
return thisComparators.every(function (thisComparator) {
|
||
|
|
return range.set.some(function (rangeComparators) {
|
||
|
|
return rangeComparators.every(function (rangeComparator) {
|
||
|
|
return thisComparator.intersects(rangeComparator, options)
|
||
|
|
})
|
||
|
|
})
|
||
|
|
})
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
// Mostly just for testing and legacy API reasons
|
||
|
|
exports.toComparators = toComparators
|
||
|
|
function toComparators (range, options) {
|
||
|
|
return new Range(range, options).set.map(function (comp) {
|
||
|
|
return comp.map(function (c) {
|
||
|
|
return c.value
|
||
|
|
}).join(' ').trim().split(' ')
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
// comprised of xranges, tildes, stars, and gtlt's at this point.
|
||
|
|
// already replaced the hyphen ranges
|
||
|
|
// turn into a set of JUST comparators.
|
||
|
|
function parseComparator (comp, options) {
|
||
|
|
debug('comp', comp, options)
|
||
|
|
comp = replaceCarets(comp, options)
|
||
|
|
debug('caret', comp)
|
||
|
|
comp = replaceTildes(comp, options)
|
||
|
|
debug('tildes', comp)
|
||
|
|
comp = replaceXRanges(comp, options)
|
||
|
|
debug('xrange', comp)
|
||
|
|
comp = replaceStars(comp, options)
|
||
|
|
debug('stars', comp)
|
||
|
|
return comp
|
||
|
|
}
|
||
|
|
|
||
|
|
function isX (id) {
|
||
|
|
return !id || id.toLowerCase() === 'x' || id === '*'
|
||
|
|
}
|
||
|
|
|
||
|
|
// ~, ~> --> * (any, kinda silly)
|
||
|
|
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
|
||
|
|
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
|
||
|
|
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
|
||
|
|
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
|
||
|
|
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
|
||
|
|
function replaceTildes (comp, options) {
|
||
|
|
return comp.trim().split(/\s+/).map(function (comp) {
|
||
|
|
return replaceTilde(comp, options)
|
||
|
|
}).join(' ')
|
||
|
|
}
|
||
|
|
|
||
|
|
function replaceTilde (comp, options) {
|
||
|
|
var r = options.loose ? re[TILDELOOSE] : re[TILDE]
|
||
|
|
return comp.replace(r, function (_, M, m, p, pr) {
|
||
|
|
debug('tilde', comp, _, M, m, p, pr)
|
||
|
|
var ret
|
||
|
|
|
||
|
|
if (isX(M)) {
|
||
|
|
ret = ''
|
||
|
|
} else if (isX(m)) {
|
||
|
|
ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
|
||
|
|
} else if (isX(p)) {
|
||
|
|
// ~1.2 == >=1.2.0 <1.3.0
|
||
|
|
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
|
||
|
|
} else if (pr) {
|
||
|
|
debug('replaceTilde pr', pr)
|
||
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
||
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
||
|
|
} else {
|
||
|
|
// ~1.2.3 == >=1.2.3 <1.3.0
|
||
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
||
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
||
|
|
}
|
||
|
|
|
||
|
|
debug('tilde return', ret)
|
||
|
|
return ret
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
// ^ --> * (any, kinda silly)
|
||
|
|
// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
|
||
|
|
// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
|
||
|
|
// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
|
||
|
|
// ^1.2.3 --> >=1.2.3 <2.0.0
|
||
|
|
// ^1.2.0 --> >=1.2.0 <2.0.0
|
||
|
|
function replaceCarets (comp, options) {
|
||
|
|
return comp.trim().split(/\s+/).map(function (comp) {
|
||
|
|
return replaceCaret(comp, options)
|
||
|
|
}).join(' ')
|
||
|
|
}
|
||
|
|
|
||
|
|
function replaceCaret (comp, options) {
|
||
|
|
debug('caret', comp, options)
|
||
|
|
var r = options.loose ? re[CARETLOOSE] : re[CARET]
|
||
|
|
return comp.replace(r, function (_, M, m, p, pr) {
|
||
|
|
debug('caret', comp, _, M, m, p, pr)
|
||
|
|
var ret
|
||
|
|
|
||
|
|
if (isX(M)) {
|
||
|
|
ret = ''
|
||
|
|
} else if (isX(m)) {
|
||
|
|
ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
|
||
|
|
} else if (isX(p)) {
|
||
|
|
if (M === '0') {
|
||
|
|
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
|
||
|
|
} else {
|
||
|
|
ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'
|
||
|
|
}
|
||
|
|
} else if (pr) {
|
||
|
|
debug('replaceCaret pr', pr)
|
||
|
|
if (M === '0') {
|
||
|
|
if (m === '0') {
|
||
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
||
|
|
' <' + M + '.' + m + '.' + (+p + 1)
|
||
|
|
} else {
|
||
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
||
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
||
|
|
}
|
||
|
|
} else {
|
||
|
|
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
|
||
|
|
' <' + (+M + 1) + '.0.0'
|
||
|
|
}
|
||
|
|
} else {
|
||
|
|
debug('no pr')
|
||
|
|
if (M === '0') {
|
||
|
|
if (m === '0') {
|
||
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
||
|
|
' <' + M + '.' + m + '.' + (+p + 1)
|
||
|
|
} else {
|
||
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
||
|
|
' <' + M + '.' + (+m + 1) + '.0'
|
||
|
|
}
|
||
|
|
} else {
|
||
|
|
ret = '>=' + M + '.' + m + '.' + p +
|
||
|
|
' <' + (+M + 1) + '.0.0'
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
debug('caret return', ret)
|
||
|
|
return ret
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
function replaceXRanges (comp, options) {
|
||
|
|
debug('replaceXRanges', comp, options)
|
||
|
|
return comp.split(/\s+/).map(function (comp) {
|
||
|
|
return replaceXRange(comp, options)
|
||
|
|
}).join(' ')
|
||
|
|
}
|
||
|
|
|
||
|
|
function replaceXRange (comp, options) {
|
||
|
|
comp = comp.trim()
|
||
|
|
var r = options.loose ? re[XRANGELOOSE] : re[XRANGE]
|
||
|
|
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
||
|
|
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
||
|
|
var xM = isX(M)
|
||
|
|
var xm = xM || isX(m)
|
||
|
|
var xp = xm || isX(p)
|
||
|
|
var anyX = xp
|
||
|
|
|
||
|
|
if (gtlt === '=' && anyX) {
|
||
|
|
gtlt = ''
|
||
|
|
}
|
||
|
|
|
||
|
|
if (xM) {
|
||
|
|
if (gtlt === '>' || gtlt === '<') {
|
||
|
|
// nothing is allowed
|
||
|
|
ret = '<0.0.0'
|
||
|
|
} else {
|
||
|
|
// nothing is forbidden
|
||
|
|
ret = '*'
|
||
|
|
}
|
||
|
|
} else if (gtlt && anyX) {
|
||
|
|
// we know patch is an x, because we have any x at all.
|
||
|
|
// replace X with 0
|
||
|
|
if (xm) {
|
||
|
|
m = 0
|
||
|
|
}
|
||
|
|
p = 0
|
||
|
|
|
||
|
|
if (gtlt === '>') {
|
||
|
|
// >1 => >=2.0.0
|
||
|
|
// >1.2 => >=1.3.0
|
||
|
|
// >1.2.3 => >= 1.2.4
|
||
|
|
gtlt = '>='
|
||
|
|
if (xm) {
|
||
|
|
M = +M + 1
|
||
|
|
m = 0
|
||
|
|
p = 0
|
||
|
|
} else {
|
||
|
|
m = +m + 1
|
||
|
|
p = 0
|
||
|
|
}
|
||
|
|
} else if (gtlt === '<=') {
|
||
|
|
// <=0.7.x is actually <0.8.0, since any 0.7.x should
|
||
|
|
// pass. Similarly, <=7.x is actually <8.0.0, etc.
|
||
|
|
gtlt = '<'
|
||
|
|
if (xm) {
|
||
|
|
M = +M + 1
|
||
|
|
} else {
|
||
|
|
m = +m + 1
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
ret = gtlt + M + '.' + m + '.' + p
|
||
|
|
} else if (xm) {
|
||
|
|
ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
|
||
|
|
} else if (xp) {
|
||
|
|
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
|
||
|
|
}
|
||
|
|
|
||
|
|
debug('xRange return', ret)
|
||
|
|
|
||
|
|
return ret
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
// Because * is AND-ed with everything else in the comparator,
|
||
|
|
// and '' means "any version", just remove the *s entirely.
|
||
|
|
function replaceStars (comp, options) {
|
||
|
|
debug('replaceStars', comp, options)
|
||
|
|
// Looseness is ignored here. star is always as loose as it gets!
|
||
|
|
return comp.trim().replace(re[STAR], '')
|
||
|
|
}
|
||
|
|
|
||
|
|
// This function is passed to string.replace(re[HYPHENRANGE])
|
||
|
|
// M, m, patch, prerelease, build
|
||
|
|
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
|
||
|
|
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
|
||
|
|
// 1.2 - 3.4 => >=1.2.0 <3.5.0
|
||
|
|
function hyphenReplace ($0,
|
||
|
|
from, fM, fm, fp, fpr, fb,
|
||
|
|
to, tM, tm, tp, tpr, tb) {
|
||
|
|
if (isX(fM)) {
|
||
|
|
from = ''
|
||
|
|
} else if (isX(fm)) {
|
||
|
|
from = '>=' + fM + '.0.0'
|
||
|
|
} else if (isX(fp)) {
|
||
|
|
from = '>=' + fM + '.' + fm + '.0'
|
||
|
|
} else {
|
||
|
|
from = '>=' + from
|
||
|
|
}
|
||
|
|
|
||
|
|
if (isX(tM)) {
|
||
|
|
to = ''
|
||
|
|
} else if (isX(tm)) {
|
||
|
|
to = '<' + (+tM + 1) + '.0.0'
|
||
|
|
} else if (isX(tp)) {
|
||
|
|
to = '<' + tM + '.' + (+tm + 1) + '.0'
|
||
|
|
} else if (tpr) {
|
||
|
|
to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
|
||
|
|
} else {
|
||
|
|
to = '<=' + to
|
||
|
|
}
|
||
|
|
|
||
|
|
return (from + ' ' + to).trim()
|
||
|
|
}
|
||
|
|
|
||
|
|
// if ANY of the sets match ALL of its comparators, then pass
|
||
|
|
Range.prototype.test = function (version) {
|
||
|
|
if (!version) {
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
|
||
|
|
if (typeof version === 'string') {
|
||
|
|
version = new SemVer(version, this.options)
|
||
|
|
}
|
||
|
|
|
||
|
|
for (var i = 0; i < this.set.length; i++) {
|
||
|
|
if (testSet(this.set[i], version, this.options)) {
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
|
||
|
|
function testSet (set, version, options) {
|
||
|
|
for (var i = 0; i < set.length; i++) {
|
||
|
|
if (!set[i].test(version)) {
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
if (version.prerelease.length && !options.includePrerelease) {
|
||
|
|
// Find the set of versions that are allowed to have prereleases
|
||
|
|
// For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
|
||
|
|
// That should allow `1.2.3-pr.2` to pass.
|
||
|
|
// However, `1.2.4-alpha.notready` should NOT be allowed,
|
||
|
|
// even though it's within the range set by the comparators.
|
||
|
|
for (i = 0; i < set.length; i++) {
|
||
|
|
debug(set[i].semver)
|
||
|
|
if (set[i].semver === ANY) {
|
||
|
|
continue
|
||
|
|
}
|
||
|
|
|
||
|
|
if (set[i].semver.prerelease.length > 0) {
|
||
|
|
var allowed = set[i].semver
|
||
|
|
if (allowed.major === version.major &&
|
||
|
|
allowed.minor === version.minor &&
|
||
|
|
allowed.patch === version.patch) {
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// Version has a -pre, but it's not one of the ones we like.
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.satisfies = satisfies
|
||
|
|
function satisfies (version, range, options) {
|
||
|
|
try {
|
||
|
|
range = new Range(range, options)
|
||
|
|
} catch (er) {
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
return range.test(version)
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.maxSatisfying = maxSatisfying
|
||
|
|
function maxSatisfying (versions, range, options) {
|
||
|
|
var max = null
|
||
|
|
var maxSV = null
|
||
|
|
try {
|
||
|
|
var rangeObj = new Range(range, options)
|
||
|
|
} catch (er) {
|
||
|
|
return null
|
||
|
|
}
|
||
|
|
versions.forEach(function (v) {
|
||
|
|
if (rangeObj.test(v)) {
|
||
|
|
// satisfies(v, range, options)
|
||
|
|
if (!max || maxSV.compare(v) === -1) {
|
||
|
|
// compare(max, v, true)
|
||
|
|
max = v
|
||
|
|
maxSV = new SemVer(max, options)
|
||
|
|
}
|
||
|
|
}
|
||
|
|
})
|
||
|
|
return max
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.minSatisfying = minSatisfying
|
||
|
|
function minSatisfying (versions, range, options) {
|
||
|
|
var min = null
|
||
|
|
var minSV = null
|
||
|
|
try {
|
||
|
|
var rangeObj = new Range(range, options)
|
||
|
|
} catch (er) {
|
||
|
|
return null
|
||
|
|
}
|
||
|
|
versions.forEach(function (v) {
|
||
|
|
if (rangeObj.test(v)) {
|
||
|
|
// satisfies(v, range, options)
|
||
|
|
if (!min || minSV.compare(v) === 1) {
|
||
|
|
// compare(min, v, true)
|
||
|
|
min = v
|
||
|
|
minSV = new SemVer(min, options)
|
||
|
|
}
|
||
|
|
}
|
||
|
|
})
|
||
|
|
return min
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.minVersion = minVersion
|
||
|
|
function minVersion (range, loose) {
|
||
|
|
range = new Range(range, loose)
|
||
|
|
|
||
|
|
var minver = new SemVer('0.0.0')
|
||
|
|
if (range.test(minver)) {
|
||
|
|
return minver
|
||
|
|
}
|
||
|
|
|
||
|
|
minver = new SemVer('0.0.0-0')
|
||
|
|
if (range.test(minver)) {
|
||
|
|
return minver
|
||
|
|
}
|
||
|
|
|
||
|
|
minver = null
|
||
|
|
for (var i = 0; i < range.set.length; ++i) {
|
||
|
|
var comparators = range.set[i]
|
||
|
|
|
||
|
|
comparators.forEach(function (comparator) {
|
||
|
|
// Clone to avoid manipulating the comparator's semver object.
|
||
|
|
var compver = new SemVer(comparator.semver.version)
|
||
|
|
switch (comparator.operator) {
|
||
|
|
case '>':
|
||
|
|
if (compver.prerelease.length === 0) {
|
||
|
|
compver.patch++
|
||
|
|
} else {
|
||
|
|
compver.prerelease.push(0)
|
||
|
|
}
|
||
|
|
compver.raw = compver.format()
|
||
|
|
/* fallthrough */
|
||
|
|
case '':
|
||
|
|
case '>=':
|
||
|
|
if (!minver || gt(minver, compver)) {
|
||
|
|
minver = compver
|
||
|
|
}
|
||
|
|
break
|
||
|
|
case '<':
|
||
|
|
case '<=':
|
||
|
|
/* Ignore maximum versions */
|
||
|
|
break
|
||
|
|
/* istanbul ignore next */
|
||
|
|
default:
|
||
|
|
throw new Error('Unexpected operation: ' + comparator.operator)
|
||
|
|
}
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
if (minver && range.test(minver)) {
|
||
|
|
return minver
|
||
|
|
}
|
||
|
|
|
||
|
|
return null
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.validRange = validRange
|
||
|
|
function validRange (range, options) {
|
||
|
|
try {
|
||
|
|
// Return '*' instead of '' so that truthiness works.
|
||
|
|
// This will throw if it's invalid anyway
|
||
|
|
return new Range(range, options).range || '*'
|
||
|
|
} catch (er) {
|
||
|
|
return null
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// Determine if version is less than all the versions possible in the range
|
||
|
|
exports.ltr = ltr
|
||
|
|
function ltr (version, range, options) {
|
||
|
|
return outside(version, range, '<', options)
|
||
|
|
}
|
||
|
|
|
||
|
|
// Determine if version is greater than all the versions possible in the range.
|
||
|
|
exports.gtr = gtr
|
||
|
|
function gtr (version, range, options) {
|
||
|
|
return outside(version, range, '>', options)
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.outside = outside
|
||
|
|
function outside (version, range, hilo, options) {
|
||
|
|
version = new SemVer(version, options)
|
||
|
|
range = new Range(range, options)
|
||
|
|
|
||
|
|
var gtfn, ltefn, ltfn, comp, ecomp
|
||
|
|
switch (hilo) {
|
||
|
|
case '>':
|
||
|
|
gtfn = gt
|
||
|
|
ltefn = lte
|
||
|
|
ltfn = lt
|
||
|
|
comp = '>'
|
||
|
|
ecomp = '>='
|
||
|
|
break
|
||
|
|
case '<':
|
||
|
|
gtfn = lt
|
||
|
|
ltefn = gte
|
||
|
|
ltfn = gt
|
||
|
|
comp = '<'
|
||
|
|
ecomp = '<='
|
||
|
|
break
|
||
|
|
default:
|
||
|
|
throw new TypeError('Must provide a hilo val of "<" or ">"')
|
||
|
|
}
|
||
|
|
|
||
|
|
// If it satisifes the range it is not outside
|
||
|
|
if (satisfies(version, range, options)) {
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
|
||
|
|
// From now on, variable terms are as if we're in "gtr" mode.
|
||
|
|
// but note that everything is flipped for the "ltr" function.
|
||
|
|
|
||
|
|
for (var i = 0; i < range.set.length; ++i) {
|
||
|
|
var comparators = range.set[i]
|
||
|
|
|
||
|
|
var high = null
|
||
|
|
var low = null
|
||
|
|
|
||
|
|
comparators.forEach(function (comparator) {
|
||
|
|
if (comparator.semver === ANY) {
|
||
|
|
comparator = new Comparator('>=0.0.0')
|
||
|
|
}
|
||
|
|
high = high || comparator
|
||
|
|
low = low || comparator
|
||
|
|
if (gtfn(comparator.semver, high.semver, options)) {
|
||
|
|
high = comparator
|
||
|
|
} else if (ltfn(comparator.semver, low.semver, options)) {
|
||
|
|
low = comparator
|
||
|
|
}
|
||
|
|
})
|
||
|
|
|
||
|
|
// If the edge version comparator has a operator then our version
|
||
|
|
// isn't outside it
|
||
|
|
if (high.operator === comp || high.operator === ecomp) {
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
|
||
|
|
// If the lowest version comparator has an operator and our version
|
||
|
|
// is less than it then it isn't higher than the range
|
||
|
|
if ((!low.operator || low.operator === comp) &&
|
||
|
|
ltefn(version, low.semver)) {
|
||
|
|
return false
|
||
|
|
} else if (low.operator === ecomp && ltfn(version, low.semver)) {
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
}
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.prerelease = prerelease
|
||
|
|
function prerelease (version, options) {
|
||
|
|
var parsed = parse(version, options)
|
||
|
|
return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.intersects = intersects
|
||
|
|
function intersects (r1, r2, options) {
|
||
|
|
r1 = new Range(r1, options)
|
||
|
|
r2 = new Range(r2, options)
|
||
|
|
return r1.intersects(r2)
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.coerce = coerce
|
||
|
|
function coerce (version) {
|
||
|
|
if (version instanceof SemVer) {
|
||
|
|
return version
|
||
|
|
}
|
||
|
|
|
||
|
|
if (typeof version !== 'string') {
|
||
|
|
return null
|
||
|
|
}
|
||
|
|
|
||
|
|
var match = version.match(re[COERCE])
|
||
|
|
|
||
|
|
if (match == null) {
|
||
|
|
return null
|
||
|
|
}
|
||
|
|
|
||
|
|
return parse(match[1] +
|
||
|
|
'.' + (match[2] || '0') +
|
||
|
|
'.' + (match[3] || '0'))
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 299:
|
||
|
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
const VERSION = "2.11.0";
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Some “list” response that can be paginated have a different response structure
|
||
|
|
*
|
||
|
|
* They have a `total_count` key in the response (search also has `incomplete_results`,
|
||
|
|
* /installation/repositories also has `repository_selection`), as well as a key with
|
||
|
|
* the list of the items which name varies from endpoint to endpoint.
|
||
|
|
*
|
||
|
|
* Octokit normalizes these responses so that paginated results are always returned following
|
||
|
|
* the same structure. One challenge is that if the list response has only one page, no Link
|
||
|
|
* header is provided, so this header alone is not sufficient to check wether a response is
|
||
|
|
* paginated or not.
|
||
|
|
*
|
||
|
|
* We check if a "total_count" key is present in the response data, but also make sure that
|
||
|
|
* a "url" property is not, as the "Get the combined status for a specific ref" endpoint would
|
||
|
|
* otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
|
||
|
|
*/
|
||
|
|
function normalizePaginatedListResponse(response) {
|
||
|
|
const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
|
||
|
|
if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way
|
||
|
|
// to retrieve the same information.
|
||
|
|
|
||
|
|
const incompleteResults = response.data.incomplete_results;
|
||
|
|
const repositorySelection = response.data.repository_selection;
|
||
|
|
const totalCount = response.data.total_count;
|
||
|
|
delete response.data.incomplete_results;
|
||
|
|
delete response.data.repository_selection;
|
||
|
|
delete response.data.total_count;
|
||
|
|
const namespaceKey = Object.keys(response.data)[0];
|
||
|
|
const data = response.data[namespaceKey];
|
||
|
|
response.data = data;
|
||
|
|
|
||
|
|
if (typeof incompleteResults !== "undefined") {
|
||
|
|
response.data.incomplete_results = incompleteResults;
|
||
|
|
}
|
||
|
|
|
||
|
|
if (typeof repositorySelection !== "undefined") {
|
||
|
|
response.data.repository_selection = repositorySelection;
|
||
|
|
}
|
||
|
|
|
||
|
|
response.data.total_count = totalCount;
|
||
|
|
return response;
|
||
|
|
}
|
||
|
|
|
||
|
|
function iterator(octokit, route, parameters) {
|
||
|
|
const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);
|
||
|
|
const requestMethod = typeof route === "function" ? route : octokit.request;
|
||
|
|
const method = options.method;
|
||
|
|
const headers = options.headers;
|
||
|
|
let url = options.url;
|
||
|
|
return {
|
||
|
|
[Symbol.asyncIterator]: () => ({
|
||
|
|
async next() {
|
||
|
|
if (!url) return {
|
||
|
|
done: true
|
||
|
|
};
|
||
|
|
const response = await requestMethod({
|
||
|
|
method,
|
||
|
|
url,
|
||
|
|
headers
|
||
|
|
});
|
||
|
|
const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:
|
||
|
|
// '<https://api.github.com/users/aseemk/followers?page=2>; rel="next", <https://api.github.com/users/aseemk/followers?page=2>; rel="last"'
|
||
|
|
// sets `url` to undefined if "next" URL is not present or `link` header is not set
|
||
|
|
|
||
|
|
url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1];
|
||
|
|
return {
|
||
|
|
value: normalizedResponse
|
||
|
|
};
|
||
|
|
}
|
||
|
|
|
||
|
|
})
|
||
|
|
};
|
||
|
|
}
|
||
|
|
|
||
|
|
function paginate(octokit, route, parameters, mapFn) {
|
||
|
|
if (typeof parameters === "function") {
|
||
|
|
mapFn = parameters;
|
||
|
|
parameters = undefined;
|
||
|
|
}
|
||
|
|
|
||
|
|
return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);
|
||
|
|
}
|
||
|
|
|
||
|
|
function gather(octokit, results, iterator, mapFn) {
|
||
|
|
return iterator.next().then(result => {
|
||
|
|
if (result.done) {
|
||
|
|
return results;
|
||
|
|
}
|
||
|
|
|
||
|
|
let earlyExit = false;
|
||
|
|
|
||
|
|
function done() {
|
||
|
|
earlyExit = true;
|
||
|
|
}
|
||
|
|
|
||
|
|
results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);
|
||
|
|
|
||
|
|
if (earlyExit) {
|
||
|
|
return results;
|
||
|
|
}
|
||
|
|
|
||
|
|
return gather(octokit, results, iterator, mapFn);
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
const composePaginateRest = Object.assign(paginate, {
|
||
|
|
iterator
|
||
|
|
});
|
||
|
|
|
||
|
|
/**
|
||
|
|
* @param octokit Octokit instance
|
||
|
|
* @param options Options passed to Octokit constructor
|
||
|
|
*/
|
||
|
|
|
||
|
|
function paginateRest(octokit) {
|
||
|
|
return {
|
||
|
|
paginate: Object.assign(paginate.bind(null, octokit), {
|
||
|
|
iterator: iterator.bind(null, octokit)
|
||
|
|
})
|
||
|
|
};
|
||
|
|
}
|
||
|
|
paginateRest.VERSION = VERSION;
|
||
|
|
|
||
|
|
exports.composePaginateRest = composePaginateRest;
|
||
|
|
exports.paginateRest = paginateRest;
|
||
|
|
//# sourceMappingURL=index.js.map
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 301:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||
|
|
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||
|
|
var m = o[Symbol.asyncIterator], i;
|
||
|
|
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||
|
|
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||
|
|
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.listCommitFactory = void 0;
|
||
|
|
const getCommitAsyncIterable_1 = __webpack_require__(32);
|
||
|
|
/** Return the list of commit since given sha (excluded)
|
||
|
|
* ordered from the oldest to the newest */
|
||
|
|
function listCommitFactory(params) {
|
||
|
|
const { octokit } = params;
|
||
|
|
const { getCommitAsyncIterable } = getCommitAsyncIterable_1.getCommitAsyncIterableFactory({ octokit });
|
||
|
|
function listCommit(params) {
|
||
|
|
var e_1, _a;
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
const { owner, repo, branch, sha } = params;
|
||
|
|
const commitAsyncIterable = getCommitAsyncIterable({
|
||
|
|
owner,
|
||
|
|
repo,
|
||
|
|
branch
|
||
|
|
});
|
||
|
|
const commits = [];
|
||
|
|
try {
|
||
|
|
for (var commitAsyncIterable_1 = __asyncValues(commitAsyncIterable), commitAsyncIterable_1_1; commitAsyncIterable_1_1 = yield commitAsyncIterable_1.next(), !commitAsyncIterable_1_1.done;) {
|
||
|
|
const commit = commitAsyncIterable_1_1.value;
|
||
|
|
if (commit.sha === sha) {
|
||
|
|
break;
|
||
|
|
}
|
||
|
|
commits.push(commit);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||
|
|
finally {
|
||
|
|
try {
|
||
|
|
if (commitAsyncIterable_1_1 && !commitAsyncIterable_1_1.done && (_a = commitAsyncIterable_1.return)) yield _a.call(commitAsyncIterable_1);
|
||
|
|
}
|
||
|
|
finally { if (e_1) throw e_1.error; }
|
||
|
|
}
|
||
|
|
return commits.reverse();
|
||
|
|
});
|
||
|
|
}
|
||
|
|
return { listCommit };
|
||
|
|
}
|
||
|
|
exports.listCommitFactory = listCommitFactory;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 303:
|
||
|
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
/**
|
||
|
|
*
|
||
|
|
* Let's say we have this function declared somewhere.
|
||
|
|
*
|
||
|
|
* function get__filename(){
|
||
|
|
* return get_caller_file_path();
|
||
|
|
* }
|
||
|
|
*
|
||
|
|
* Then we can assert that:
|
||
|
|
*
|
||
|
|
* get__filename() === __filename
|
||
|
|
*
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function get_caller_file_path() {
|
||
|
|
//NOTE: Cannot be move to the index file.
|
||
|
|
//The function can't be declared and used
|
||
|
|
//in the same file.
|
||
|
|
var prepareStackTraceBackup = Error.prepareStackTrace;
|
||
|
|
var callerFile = "";
|
||
|
|
try {
|
||
|
|
var error = new Error();
|
||
|
|
Error.prepareStackTrace = function (_, stack) { return stack; };
|
||
|
|
error.stack.shift().getFileName();
|
||
|
|
var fileImportedFrom = error.stack.shift().getFileName();
|
||
|
|
while (error.stack.length) {
|
||
|
|
var fileName = error.stack.shift().getFileName();
|
||
|
|
if (fileName === callerFile) {
|
||
|
|
break;
|
||
|
|
}
|
||
|
|
callerFile = fileName;
|
||
|
|
if (fileImportedFrom !== callerFile) {
|
||
|
|
break;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
catch (_a) { }
|
||
|
|
Error.prepareStackTrace = prepareStackTraceBackup;
|
||
|
|
return callerFile;
|
||
|
|
}
|
||
|
|
exports.get_caller_file_path = get_caller_file_path;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 323:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
var isStream = module.exports = function (stream) {
|
||
|
|
return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function';
|
||
|
|
};
|
||
|
|
|
||
|
|
isStream.writable = function (stream) {
|
||
|
|
return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object';
|
||
|
|
};
|
||
|
|
|
||
|
|
isStream.readable = function (stream) {
|
||
|
|
return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object';
|
||
|
|
};
|
||
|
|
|
||
|
|
isStream.duplex = function (stream) {
|
||
|
|
return isStream.writable(stream) && isStream.readable(stream);
|
||
|
|
};
|
||
|
|
|
||
|
|
isStream.transform = function (stream) {
|
||
|
|
return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object';
|
||
|
|
};
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 356:
|
||
|
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
/*!
|
||
|
|
* is-plain-object <https://github.com/jonschlinkert/is-plain-object>
|
||
|
|
*
|
||
|
|
* Copyright (c) 2014-2017, Jon Schlinkert.
|
||
|
|
* Released under the MIT License.
|
||
|
|
*/
|
||
|
|
|
||
|
|
function isObject(o) {
|
||
|
|
return Object.prototype.toString.call(o) === '[object Object]';
|
||
|
|
}
|
||
|
|
|
||
|
|
function isPlainObject(o) {
|
||
|
|
var ctor,prot;
|
||
|
|
|
||
|
|
if (isObject(o) === false) return false;
|
||
|
|
|
||
|
|
// If has modified constructor
|
||
|
|
ctor = o.constructor;
|
||
|
|
if (ctor === undefined) return true;
|
||
|
|
|
||
|
|
// If has modified prototype
|
||
|
|
prot = ctor.prototype;
|
||
|
|
if (isObject(prot) === false) return false;
|
||
|
|
|
||
|
|
// If constructor does not have an Object-specific method
|
||
|
|
if (prot.hasOwnProperty('isPrototypeOf') === false) {
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
|
||
|
|
// Most likely a plain Object
|
||
|
|
return true;
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.isPlainObject = isPlainObject;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 357:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = require("assert");
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 363:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = register;
|
||
|
|
|
||
|
|
function register(state, name, method, options) {
|
||
|
|
if (typeof method !== "function") {
|
||
|
|
throw new Error("method for before hook must be a function");
|
||
|
|
}
|
||
|
|
|
||
|
|
if (!options) {
|
||
|
|
options = {};
|
||
|
|
}
|
||
|
|
|
||
|
|
if (Array.isArray(name)) {
|
||
|
|
return name.reverse().reduce(function (callback, name) {
|
||
|
|
return register.bind(null, state, name, callback, options);
|
||
|
|
}, method)();
|
||
|
|
}
|
||
|
|
|
||
|
|
return Promise.resolve().then(function () {
|
||
|
|
if (!state.registry[name]) {
|
||
|
|
return method(options);
|
||
|
|
}
|
||
|
|
|
||
|
|
return state.registry[name].reduce(function (method, registered) {
|
||
|
|
return registered.hook.bind(null, method, options);
|
||
|
|
}, method)();
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 385:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
var isPlainObject = __webpack_require__(356);
|
||
|
|
var universalUserAgent = __webpack_require__(562);
|
||
|
|
|
||
|
|
function lowercaseKeys(object) {
|
||
|
|
if (!object) {
|
||
|
|
return {};
|
||
|
|
}
|
||
|
|
|
||
|
|
return Object.keys(object).reduce((newObj, key) => {
|
||
|
|
newObj[key.toLowerCase()] = object[key];
|
||
|
|
return newObj;
|
||
|
|
}, {});
|
||
|
|
}
|
||
|
|
|
||
|
|
function mergeDeep(defaults, options) {
|
||
|
|
const result = Object.assign({}, defaults);
|
||
|
|
Object.keys(options).forEach(key => {
|
||
|
|
if (isPlainObject.isPlainObject(options[key])) {
|
||
|
|
if (!(key in defaults)) Object.assign(result, {
|
||
|
|
[key]: options[key]
|
||
|
|
});else result[key] = mergeDeep(defaults[key], options[key]);
|
||
|
|
} else {
|
||
|
|
Object.assign(result, {
|
||
|
|
[key]: options[key]
|
||
|
|
});
|
||
|
|
}
|
||
|
|
});
|
||
|
|
return result;
|
||
|
|
}
|
||
|
|
|
||
|
|
function removeUndefinedProperties(obj) {
|
||
|
|
for (const key in obj) {
|
||
|
|
if (obj[key] === undefined) {
|
||
|
|
delete obj[key];
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
return obj;
|
||
|
|
}
|
||
|
|
|
||
|
|
function merge(defaults, route, options) {
|
||
|
|
if (typeof route === "string") {
|
||
|
|
let [method, url] = route.split(" ");
|
||
|
|
options = Object.assign(url ? {
|
||
|
|
method,
|
||
|
|
url
|
||
|
|
} : {
|
||
|
|
url: method
|
||
|
|
}, options);
|
||
|
|
} else {
|
||
|
|
options = Object.assign({}, route);
|
||
|
|
} // lowercase header names before merging with defaults to avoid duplicates
|
||
|
|
|
||
|
|
|
||
|
|
options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging
|
||
|
|
|
||
|
|
removeUndefinedProperties(options);
|
||
|
|
removeUndefinedProperties(options.headers);
|
||
|
|
const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten
|
||
|
|
|
||
|
|
if (defaults && defaults.mediaType.previews.length) {
|
||
|
|
mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);
|
||
|
|
}
|
||
|
|
|
||
|
|
mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, ""));
|
||
|
|
return mergedOptions;
|
||
|
|
}
|
||
|
|
|
||
|
|
function addQueryParameters(url, parameters) {
|
||
|
|
const separator = /\?/.test(url) ? "&" : "?";
|
||
|
|
const names = Object.keys(parameters);
|
||
|
|
|
||
|
|
if (names.length === 0) {
|
||
|
|
return url;
|
||
|
|
}
|
||
|
|
|
||
|
|
return url + separator + names.map(name => {
|
||
|
|
if (name === "q") {
|
||
|
|
return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
|
||
|
|
}
|
||
|
|
|
||
|
|
return `${name}=${encodeURIComponent(parameters[name])}`;
|
||
|
|
}).join("&");
|
||
|
|
}
|
||
|
|
|
||
|
|
const urlVariableRegex = /\{[^}]+\}/g;
|
||
|
|
|
||
|
|
function removeNonChars(variableName) {
|
||
|
|
return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
|
||
|
|
}
|
||
|
|
|
||
|
|
function extractUrlVariableNames(url) {
|
||
|
|
const matches = url.match(urlVariableRegex);
|
||
|
|
|
||
|
|
if (!matches) {
|
||
|
|
return [];
|
||
|
|
}
|
||
|
|
|
||
|
|
return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
|
||
|
|
}
|
||
|
|
|
||
|
|
function omit(object, keysToOmit) {
|
||
|
|
return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {
|
||
|
|
obj[key] = object[key];
|
||
|
|
return obj;
|
||
|
|
}, {});
|
||
|
|
}
|
||
|
|
|
||
|
|
// Based on https://github.com/bramstein/url-template, licensed under BSD
|
||
|
|
// TODO: create separate package.
|
||
|
|
//
|
||
|
|
// Copyright (c) 2012-2014, Bram Stein
|
||
|
|
// All rights reserved.
|
||
|
|
// Redistribution and use in source and binary forms, with or without
|
||
|
|
// modification, are permitted provided that the following conditions
|
||
|
|
// are met:
|
||
|
|
// 1. Redistributions of source code must retain the above copyright
|
||
|
|
// notice, this list of conditions and the following disclaimer.
|
||
|
|
// 2. Redistributions in binary form must reproduce the above copyright
|
||
|
|
// notice, this list of conditions and the following disclaimer in the
|
||
|
|
// documentation and/or other materials provided with the distribution.
|
||
|
|
// 3. The name of the author may not be used to endorse or promote products
|
||
|
|
// derived from this software without specific prior written permission.
|
||
|
|
// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
|
||
|
|
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||
|
|
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||
|
|
// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
|
||
|
|
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||
|
|
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||
|
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||
|
|
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||
|
|
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
||
|
|
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||
|
|
|
||
|
|
/* istanbul ignore file */
|
||
|
|
function encodeReserved(str) {
|
||
|
|
return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {
|
||
|
|
if (!/%[0-9A-Fa-f]/.test(part)) {
|
||
|
|
part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
|
||
|
|
}
|
||
|
|
|
||
|
|
return part;
|
||
|
|
}).join("");
|
||
|
|
}
|
||
|
|
|
||
|
|
function encodeUnreserved(str) {
|
||
|
|
return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {
|
||
|
|
return "%" + c.charCodeAt(0).toString(16).toUpperCase();
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
function encodeValue(operator, value, key) {
|
||
|
|
value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
|
||
|
|
|
||
|
|
if (key) {
|
||
|
|
return encodeUnreserved(key) + "=" + value;
|
||
|
|
} else {
|
||
|
|
return value;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
function isDefined(value) {
|
||
|
|
return value !== undefined && value !== null;
|
||
|
|
}
|
||
|
|
|
||
|
|
function isKeyOperator(operator) {
|
||
|
|
return operator === ";" || operator === "&" || operator === "?";
|
||
|
|
}
|
||
|
|
|
||
|
|
function getValues(context, operator, key, modifier) {
|
||
|
|
var value = context[key],
|
||
|
|
result = [];
|
||
|
|
|
||
|
|
if (isDefined(value) && value !== "") {
|
||
|
|
if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
|
||
|
|
value = value.toString();
|
||
|
|
|
||
|
|
if (modifier && modifier !== "*") {
|
||
|
|
value = value.substring(0, parseInt(modifier, 10));
|
||
|
|
}
|
||
|
|
|
||
|
|
result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
|
||
|
|
} else {
|
||
|
|
if (modifier === "*") {
|
||
|
|
if (Array.isArray(value)) {
|
||
|
|
value.filter(isDefined).forEach(function (value) {
|
||
|
|
result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
|
||
|
|
});
|
||
|
|
} else {
|
||
|
|
Object.keys(value).forEach(function (k) {
|
||
|
|
if (isDefined(value[k])) {
|
||
|
|
result.push(encodeValue(operator, value[k], k));
|
||
|
|
}
|
||
|
|
});
|
||
|
|
}
|
||
|
|
} else {
|
||
|
|
const tmp = [];
|
||
|
|
|
||
|
|
if (Array.isArray(value)) {
|
||
|
|
value.filter(isDefined).forEach(function (value) {
|
||
|
|
tmp.push(encodeValue(operator, value));
|
||
|
|
});
|
||
|
|
} else {
|
||
|
|
Object.keys(value).forEach(function (k) {
|
||
|
|
if (isDefined(value[k])) {
|
||
|
|
tmp.push(encodeUnreserved(k));
|
||
|
|
tmp.push(encodeValue(operator, value[k].toString()));
|
||
|
|
}
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
if (isKeyOperator(operator)) {
|
||
|
|
result.push(encodeUnreserved(key) + "=" + tmp.join(","));
|
||
|
|
} else if (tmp.length !== 0) {
|
||
|
|
result.push(tmp.join(","));
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
} else {
|
||
|
|
if (operator === ";") {
|
||
|
|
if (isDefined(value)) {
|
||
|
|
result.push(encodeUnreserved(key));
|
||
|
|
}
|
||
|
|
} else if (value === "" && (operator === "&" || operator === "?")) {
|
||
|
|
result.push(encodeUnreserved(key) + "=");
|
||
|
|
} else if (value === "") {
|
||
|
|
result.push("");
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
return result;
|
||
|
|
}
|
||
|
|
|
||
|
|
function parseUrl(template) {
|
||
|
|
return {
|
||
|
|
expand: expand.bind(null, template)
|
||
|
|
};
|
||
|
|
}
|
||
|
|
|
||
|
|
function expand(template, context) {
|
||
|
|
var operators = ["+", "#", ".", "/", ";", "?", "&"];
|
||
|
|
return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) {
|
||
|
|
if (expression) {
|
||
|
|
let operator = "";
|
||
|
|
const values = [];
|
||
|
|
|
||
|
|
if (operators.indexOf(expression.charAt(0)) !== -1) {
|
||
|
|
operator = expression.charAt(0);
|
||
|
|
expression = expression.substr(1);
|
||
|
|
}
|
||
|
|
|
||
|
|
expression.split(/,/g).forEach(function (variable) {
|
||
|
|
var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
|
||
|
|
values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
|
||
|
|
});
|
||
|
|
|
||
|
|
if (operator && operator !== "+") {
|
||
|
|
var separator = ",";
|
||
|
|
|
||
|
|
if (operator === "?") {
|
||
|
|
separator = "&";
|
||
|
|
} else if (operator !== "#") {
|
||
|
|
separator = operator;
|
||
|
|
}
|
||
|
|
|
||
|
|
return (values.length !== 0 ? operator : "") + values.join(separator);
|
||
|
|
} else {
|
||
|
|
return values.join(",");
|
||
|
|
}
|
||
|
|
} else {
|
||
|
|
return encodeReserved(literal);
|
||
|
|
}
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
function parse(options) {
|
||
|
|
// https://fetch.spec.whatwg.org/#methods
|
||
|
|
let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible
|
||
|
|
|
||
|
|
let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}");
|
||
|
|
let headers = Object.assign({}, options.headers);
|
||
|
|
let body;
|
||
|
|
let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later
|
||
|
|
|
||
|
|
const urlVariableNames = extractUrlVariableNames(url);
|
||
|
|
url = parseUrl(url).expand(parameters);
|
||
|
|
|
||
|
|
if (!/^http/.test(url)) {
|
||
|
|
url = options.baseUrl + url;
|
||
|
|
}
|
||
|
|
|
||
|
|
const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl");
|
||
|
|
const remainingParameters = omit(parameters, omittedParameters);
|
||
|
|
const isBinaryRequest = /application\/octet-stream/i.test(headers.accept);
|
||
|
|
|
||
|
|
if (!isBinaryRequest) {
|
||
|
|
if (options.mediaType.format) {
|
||
|
|
// e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
|
||
|
|
headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(",");
|
||
|
|
}
|
||
|
|
|
||
|
|
if (options.mediaType.previews.length) {
|
||
|
|
const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
|
||
|
|
headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {
|
||
|
|
const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
|
||
|
|
return `application/vnd.github.${preview}-preview${format}`;
|
||
|
|
}).join(",");
|
||
|
|
}
|
||
|
|
} // for GET/HEAD requests, set URL query parameters from remaining parameters
|
||
|
|
// for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
|
||
|
|
|
||
|
|
|
||
|
|
if (["GET", "HEAD"].includes(method)) {
|
||
|
|
url = addQueryParameters(url, remainingParameters);
|
||
|
|
} else {
|
||
|
|
if ("data" in remainingParameters) {
|
||
|
|
body = remainingParameters.data;
|
||
|
|
} else {
|
||
|
|
if (Object.keys(remainingParameters).length) {
|
||
|
|
body = remainingParameters;
|
||
|
|
} else {
|
||
|
|
headers["content-length"] = 0;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
} // default content-type for JSON if body is set
|
||
|
|
|
||
|
|
|
||
|
|
if (!headers["content-type"] && typeof body !== "undefined") {
|
||
|
|
headers["content-type"] = "application/json; charset=utf-8";
|
||
|
|
} // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
|
||
|
|
// fetch does not allow to set `content-length` header, but we can set body to an empty string
|
||
|
|
|
||
|
|
|
||
|
|
if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
|
||
|
|
body = "";
|
||
|
|
} // Only return body/request keys if present
|
||
|
|
|
||
|
|
|
||
|
|
return Object.assign({
|
||
|
|
method,
|
||
|
|
url,
|
||
|
|
headers
|
||
|
|
}, typeof body !== "undefined" ? {
|
||
|
|
body
|
||
|
|
} : null, options.request ? {
|
||
|
|
request: options.request
|
||
|
|
} : null);
|
||
|
|
}
|
||
|
|
|
||
|
|
function endpointWithDefaults(defaults, route, options) {
|
||
|
|
return parse(merge(defaults, route, options));
|
||
|
|
}
|
||
|
|
|
||
|
|
function withDefaults(oldDefaults, newDefaults) {
|
||
|
|
const DEFAULTS = merge(oldDefaults, newDefaults);
|
||
|
|
const endpoint = endpointWithDefaults.bind(null, DEFAULTS);
|
||
|
|
return Object.assign(endpoint, {
|
||
|
|
DEFAULTS,
|
||
|
|
defaults: withDefaults.bind(null, DEFAULTS),
|
||
|
|
merge: merge.bind(null, DEFAULTS),
|
||
|
|
parse
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
const VERSION = "6.0.11";
|
||
|
|
|
||
|
|
const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.
|
||
|
|
// So we use RequestParameters and add method as additional required property.
|
||
|
|
|
||
|
|
const DEFAULTS = {
|
||
|
|
method: "GET",
|
||
|
|
baseUrl: "https://api.github.com",
|
||
|
|
headers: {
|
||
|
|
accept: "application/vnd.github.v3+json",
|
||
|
|
"user-agent": userAgent
|
||
|
|
},
|
||
|
|
mediaType: {
|
||
|
|
format: "",
|
||
|
|
previews: []
|
||
|
|
}
|
||
|
|
};
|
||
|
|
|
||
|
|
const endpoint = withDefaults(null, DEFAULTS);
|
||
|
|
|
||
|
|
exports.endpoint = endpoint;
|
||
|
|
//# sourceMappingURL=index.js.map
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 389:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
const fs = __webpack_require__(747);
|
||
|
|
const shebangCommand = __webpack_require__(866);
|
||
|
|
|
||
|
|
function readShebang(command) {
|
||
|
|
// Read the first 150 bytes from the file
|
||
|
|
const size = 150;
|
||
|
|
let buffer;
|
||
|
|
|
||
|
|
if (Buffer.alloc) {
|
||
|
|
// Node.js v4.5+ / v5.10+
|
||
|
|
buffer = Buffer.alloc(size);
|
||
|
|
} else {
|
||
|
|
// Old Node.js API
|
||
|
|
buffer = new Buffer(size);
|
||
|
|
buffer.fill(0); // zero-fill
|
||
|
|
}
|
||
|
|
|
||
|
|
let fd;
|
||
|
|
|
||
|
|
try {
|
||
|
|
fd = fs.openSync(command, 'r');
|
||
|
|
fs.readSync(fd, buffer, 0, size, 0);
|
||
|
|
fs.closeSync(fd);
|
||
|
|
} catch (e) { /* Empty */ }
|
||
|
|
|
||
|
|
// Attempt to extract shebang (null is returned if not a shebang)
|
||
|
|
return shebangCommand(buffer.toString());
|
||
|
|
}
|
||
|
|
|
||
|
|
module.exports = readShebang;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 392:
|
||
|
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
function getUserAgent() {
|
||
|
|
if (typeof navigator === "object" && "userAgent" in navigator) {
|
||
|
|
return navigator.userAgent;
|
||
|
|
}
|
||
|
|
|
||
|
|
if (typeof process === "object" && "version" in process) {
|
||
|
|
return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;
|
||
|
|
}
|
||
|
|
|
||
|
|
return "<environment undetectable>";
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.getUserAgent = getUserAgent;
|
||
|
|
//# sourceMappingURL=index.js.map
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 395:
|
||
|
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.NpmModuleVersion = void 0;
|
||
|
|
var NpmModuleVersion;
|
||
|
|
(function (NpmModuleVersion) {
|
||
|
|
function parse(versionStr) {
|
||
|
|
const match = versionStr.match(/^([0-9]+)\.([0-9]+)\.([0-9]+)(?:-beta.([0-9]+))?/);
|
||
|
|
if (!match) {
|
||
|
|
throw new Error(`${versionStr} is not a valid NPM version`);
|
||
|
|
}
|
||
|
|
return {
|
||
|
|
"major": parseInt(match[1]),
|
||
|
|
"minor": parseInt(match[2]),
|
||
|
|
"patch": parseInt(match[3])
|
||
|
|
};
|
||
|
|
}
|
||
|
|
NpmModuleVersion.parse = parse;
|
||
|
|
;
|
||
|
|
function stringify(v) {
|
||
|
|
return `${v.major}.${v.minor}.${v.patch}`;
|
||
|
|
}
|
||
|
|
NpmModuleVersion.stringify = stringify;
|
||
|
|
/**
|
||
|
|
*
|
||
|
|
* v1 < v2 => -1
|
||
|
|
* v1 === v2 => 0
|
||
|
|
* v1 > v2 => 1
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function compare(v1, v2) {
|
||
|
|
const sign = (n) => n === 0 ? 0 : (n < 0 ? -1 : 1);
|
||
|
|
if (v1.major === v2.major) {
|
||
|
|
if (v1.minor === v2.minor) {
|
||
|
|
return sign(v1.patch - v2.patch);
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
return sign(v1.minor - v2.minor);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
return sign(v1.major - v2.major);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
NpmModuleVersion.compare = compare;
|
||
|
|
function bumpType(params) {
|
||
|
|
const versionAhead = parse(params.versionAheadStr);
|
||
|
|
const versionBehind = parse(params.versionBehindStr);
|
||
|
|
if (compare(versionBehind, versionAhead) === 1) {
|
||
|
|
throw new Error(`Version regression ${versionBehind} -> ${versionAhead}`);
|
||
|
|
}
|
||
|
|
if (versionBehind.major !== versionAhead.major) {
|
||
|
|
return "MAJOR";
|
||
|
|
}
|
||
|
|
else if (versionBehind.minor !== versionAhead.minor) {
|
||
|
|
return "MINOR";
|
||
|
|
}
|
||
|
|
else if (versionBehind.patch !== versionAhead.patch) {
|
||
|
|
return "PATCH";
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
return "SAME";
|
||
|
|
}
|
||
|
|
}
|
||
|
|
NpmModuleVersion.bumpType = bumpType;
|
||
|
|
})(NpmModuleVersion = exports.NpmModuleVersion || (exports.NpmModuleVersion = {}));
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 413:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = require("stream");
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 417:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = require("crypto");
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 425:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __extends = (this && this.__extends) || (function () {
|
||
|
|
var extendStatics = function (d, b) {
|
||
|
|
extendStatics = Object.setPrototypeOf ||
|
||
|
|
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||
|
|
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
||
|
|
return extendStatics(d, b);
|
||
|
|
};
|
||
|
|
return function (d, b) {
|
||
|
|
extendStatics(d, b);
|
||
|
|
function __() { this.constructor = d; }
|
||
|
|
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||
|
|
};
|
||
|
|
})();
|
||
|
|
var __assign = (this && this.__assign) || function () {
|
||
|
|
__assign = Object.assign || function(t) {
|
||
|
|
for (var s, i = 1, n = arguments.length; i < n; i++) {
|
||
|
|
s = arguments[i];
|
||
|
|
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
|
||
|
|
t[p] = s[p];
|
||
|
|
}
|
||
|
|
return t;
|
||
|
|
};
|
||
|
|
return __assign.apply(this, arguments);
|
||
|
|
};
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||
|
|
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||
|
|
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||
|
|
function verb(n) { return function (v) { return step([n, v]); }; }
|
||
|
|
function step(op) {
|
||
|
|
if (f) throw new TypeError("Generator is already executing.");
|
||
|
|
while (_) try {
|
||
|
|
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||
|
|
if (y = 0, t) op = [op[0] & 2, t.value];
|
||
|
|
switch (op[0]) {
|
||
|
|
case 0: case 1: t = op; break;
|
||
|
|
case 4: _.label++; return { value: op[1], done: false };
|
||
|
|
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||
|
|
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||
|
|
default:
|
||
|
|
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||
|
|
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||
|
|
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||
|
|
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||
|
|
if (t[2]) _.ops.pop();
|
||
|
|
_.trys.pop(); continue;
|
||
|
|
}
|
||
|
|
op = body.call(thisArg, _);
|
||
|
|
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||
|
|
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||
|
|
}
|
||
|
|
};
|
||
|
|
var __read = (this && this.__read) || function (o, n) {
|
||
|
|
var m = typeof Symbol === "function" && o[Symbol.iterator];
|
||
|
|
if (!m) return o;
|
||
|
|
var i = m.call(o), r, ar = [], e;
|
||
|
|
try {
|
||
|
|
while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
|
||
|
|
}
|
||
|
|
catch (error) { e = { error: error }; }
|
||
|
|
finally {
|
||
|
|
try {
|
||
|
|
if (r && !r.done && (m = i["return"])) m.call(i);
|
||
|
|
}
|
||
|
|
finally { if (e) throw e.error; }
|
||
|
|
}
|
||
|
|
return ar;
|
||
|
|
};
|
||
|
|
var __values = (this && this.__values) || function (o) {
|
||
|
|
var m = typeof Symbol === "function" && o[Symbol.iterator], i = 0;
|
||
|
|
if (m) return m.call(o);
|
||
|
|
return {
|
||
|
|
next: function () {
|
||
|
|
if (o && i >= o.length) o = void 0;
|
||
|
|
return { value: o && o[i++], done: !o };
|
||
|
|
}
|
||
|
|
};
|
||
|
|
};
|
||
|
|
var __spread = (this && this.__spread) || function () {
|
||
|
|
for (var ar = [], i = 0; i < arguments.length; i++) ar = ar.concat(__read(arguments[i]));
|
||
|
|
return ar;
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
var child_process = __webpack_require__(129);
|
||
|
|
var readline = __webpack_require__(58);
|
||
|
|
var fs = __webpack_require__(747);
|
||
|
|
var path = __webpack_require__(622);
|
||
|
|
var https = __webpack_require__(211);
|
||
|
|
var http = __webpack_require__(605);
|
||
|
|
var util = __webpack_require__(669);
|
||
|
|
var os = __webpack_require__(87);
|
||
|
|
var crypto = __webpack_require__(417);
|
||
|
|
/**
|
||
|
|
* After this function is called every call to execSync
|
||
|
|
* or exec will print the unix commands being executed.
|
||
|
|
* */
|
||
|
|
function enableCmdTrace() {
|
||
|
|
traceCmdIfEnabled.enabled = true;
|
||
|
|
}
|
||
|
|
exports.enableCmdTrace = enableCmdTrace;
|
||
|
|
function traceCmdIfEnabled(cmd, options) {
|
||
|
|
if (!traceCmdIfEnabled.enabled) {
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
console.log(colorize("$ " + cmd + " ", "YELLOW") + (!!options ? JSON.stringify(options) + "\n" : ""));
|
||
|
|
}
|
||
|
|
(function (traceCmdIfEnabled) {
|
||
|
|
traceCmdIfEnabled.enabled = false;
|
||
|
|
})(traceCmdIfEnabled || (traceCmdIfEnabled = {}));
|
||
|
|
function get_uid(unix_user) {
|
||
|
|
return parseInt(sh_eval("id -u " + unix_user));
|
||
|
|
}
|
||
|
|
exports.get_uid = get_uid;
|
||
|
|
function get_gid(unix_user) {
|
||
|
|
return parseInt(sh_eval("id -g " + unix_user));
|
||
|
|
}
|
||
|
|
exports.get_gid = get_gid;
|
||
|
|
function colorize(str, color) {
|
||
|
|
var color_code = (function () {
|
||
|
|
switch (color) {
|
||
|
|
case "GREEN": return "\x1b[32m";
|
||
|
|
case "RED": return "\x1b[31m";
|
||
|
|
case "YELLOW": return "\x1b[33m";
|
||
|
|
}
|
||
|
|
})();
|
||
|
|
return "" + color_code + str + "\u001B[0m";
|
||
|
|
}
|
||
|
|
exports.colorize = colorize;
|
||
|
|
/**
|
||
|
|
*
|
||
|
|
* The stderr is forwarded to the console realtime.
|
||
|
|
*
|
||
|
|
* The returned value is the concatenated data received on stdout.
|
||
|
|
*
|
||
|
|
* If the return code of the cmd is not 0 an exception is thrown
|
||
|
|
* and the message cmd + the concatenated data received on stderr.
|
||
|
|
*
|
||
|
|
* If enableTrace() have been called the command called will be printed.
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function execSync(cmd, options) {
|
||
|
|
traceCmdIfEnabled(cmd, options);
|
||
|
|
return child_process.execSync(cmd, __assign({}, (options || {}), { "encoding": "utf8" }));
|
||
|
|
}
|
||
|
|
exports.execSync = execSync;
|
||
|
|
/**
|
||
|
|
*
|
||
|
|
* The cmd is printed before execution
|
||
|
|
* stdout and stderr are forwarded to the console realtime.
|
||
|
|
* Return nothing.
|
||
|
|
*
|
||
|
|
* stdio is set to "inherit" and thus should not be redefined.
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function execSyncTrace(cmd, options) {
|
||
|
|
traceCmdIfEnabled(cmd, options);
|
||
|
|
child_process.execSync(cmd, __assign({}, (options || {}), { "stdio": "inherit" }));
|
||
|
|
}
|
||
|
|
exports.execSyncTrace = execSyncTrace;
|
||
|
|
/** Same as execSync except that it dose not print cmd even if cmdTrace have been enabled */
|
||
|
|
var execSyncNoCmdTrace = function () {
|
||
|
|
var args = [];
|
||
|
|
for (var _i = 0; _i < arguments.length; _i++) {
|
||
|
|
args[_i] = arguments[_i];
|
||
|
|
}
|
||
|
|
var enabled_back = traceCmdIfEnabled.enabled;
|
||
|
|
traceCmdIfEnabled.enabled = false;
|
||
|
|
try {
|
||
|
|
var out = execSync.apply(null, args);
|
||
|
|
traceCmdIfEnabled.enabled = enabled_back;
|
||
|
|
return out;
|
||
|
|
}
|
||
|
|
catch (error) {
|
||
|
|
traceCmdIfEnabled.enabled = enabled_back;
|
||
|
|
throw error;
|
||
|
|
}
|
||
|
|
};
|
||
|
|
/**
|
||
|
|
*
|
||
|
|
* Like execSync but stderr is not forwarded.
|
||
|
|
* WARNING: If mean that when the cmd return 0
|
||
|
|
* all data that may have been wrote on stderr
|
||
|
|
* are lost into oblivion.
|
||
|
|
*
|
||
|
|
* stdio is set to "pipe" and thus should not be redefined.
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function execSyncQuiet(cmd, options) {
|
||
|
|
return execSync(cmd, __assign({}, (options || {}), { "stdio": "pipe" }));
|
||
|
|
}
|
||
|
|
exports.execSyncQuiet = execSyncQuiet;
|
||
|
|
/** Same as execSync but async */
|
||
|
|
function exec(cmd, options) {
|
||
|
|
var _this = this;
|
||
|
|
traceCmdIfEnabled(cmd, options);
|
||
|
|
return new Promise(function (resolve, reject) { return __awaiter(_this, void 0, void 0, function () {
|
||
|
|
return __generator(this, function (_a) {
|
||
|
|
child_process.exec(cmd, __assign({}, (options || {}), { "encoding": "utf8" }), function (error, stdout, stderr) {
|
||
|
|
if (!!error) {
|
||
|
|
error["stderr"] = stderr;
|
||
|
|
reject(error);
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
resolve(stdout);
|
||
|
|
}
|
||
|
|
});
|
||
|
|
return [2 /*return*/];
|
||
|
|
});
|
||
|
|
}); });
|
||
|
|
}
|
||
|
|
exports.exec = exec;
|
||
|
|
/**
|
||
|
|
* Spawn a process that continue running after current process exit.
|
||
|
|
* This process will be ignored by stopSubProcessesAsapSync.
|
||
|
|
* If a logfile_path if provided stdout and stderr will be redirected to this file.
|
||
|
|
*
|
||
|
|
* detached, and stdio options should not be set as they are set internally.
|
||
|
|
* */
|
||
|
|
function spawnAndDetach(command, args, options, logfile_path) {
|
||
|
|
var out = !!logfile_path ? fs.openSync(logfile_path, "a") : "ignore";
|
||
|
|
var subprocess = child_process.spawn(command, args, __assign({}, (options || {}), { "detached": true, "stdio": ["ignore", out, out] }));
|
||
|
|
stopProcessSync.stopSubProcessesAsapSync.ignorePids.add(subprocess.pid);
|
||
|
|
subprocess.unref();
|
||
|
|
return subprocess;
|
||
|
|
}
|
||
|
|
exports.spawnAndDetach = spawnAndDetach;
|
||
|
|
/**
|
||
|
|
*
|
||
|
|
* Print a message and enable a moving loading bar.
|
||
|
|
* WARNING: Nothing should be printed to stdout until we stop showing the moving loading.
|
||
|
|
*
|
||
|
|
* returns:
|
||
|
|
* -exec: A proxy to the exec fnc that will call onError before it throw the error.
|
||
|
|
* -onSuccess: Stop showing the moving loading and pretty print a success message ("ok" by default)
|
||
|
|
* -onError: Stop showing the moving loading and pretty print error message.
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function start_long_running_process(message) {
|
||
|
|
process.stdout.write(message + "... ");
|
||
|
|
var moveBack = (function () {
|
||
|
|
var cp = message.length + 3;
|
||
|
|
return function () { return readline.cursorTo(process.stdout, cp); };
|
||
|
|
})();
|
||
|
|
var p = ["\\", "|", "/", "-"].map(function (i) { return colorize(i, "GREEN"); });
|
||
|
|
var x = 0;
|
||
|
|
var timer = setInterval(function () {
|
||
|
|
moveBack();
|
||
|
|
process.stdout.write(p[x++]);
|
||
|
|
x = x % p.length;
|
||
|
|
}, 250);
|
||
|
|
var onComplete = function (message) {
|
||
|
|
clearInterval(timer);
|
||
|
|
moveBack();
|
||
|
|
process.stdout.write(message + "\n");
|
||
|
|
};
|
||
|
|
var onError = function (errorMessage) { return onComplete(colorize(errorMessage, "RED")); };
|
||
|
|
var onSuccess = function (message) { return onComplete(colorize(message || "ok", "GREEN")); };
|
||
|
|
if (traceCmdIfEnabled.enabled) {
|
||
|
|
onComplete("");
|
||
|
|
onComplete = function (message) { return console.log(message); };
|
||
|
|
}
|
||
|
|
return {
|
||
|
|
onError: onError,
|
||
|
|
onSuccess: onSuccess,
|
||
|
|
"exec": function () {
|
||
|
|
var args = [];
|
||
|
|
for (var _i = 0; _i < arguments.length; _i++) {
|
||
|
|
args[_i] = arguments[_i];
|
||
|
|
}
|
||
|
|
return __awaiter(this, void 0, void 0, function () {
|
||
|
|
var error_1;
|
||
|
|
return __generator(this, function (_a) {
|
||
|
|
switch (_a.label) {
|
||
|
|
case 0:
|
||
|
|
_a.trys.push([0, 2, , 3]);
|
||
|
|
return [4 /*yield*/, exec.apply(null, args)];
|
||
|
|
case 1: return [2 /*return*/, _a.sent()];
|
||
|
|
case 2:
|
||
|
|
error_1 = _a.sent();
|
||
|
|
onError(error_1.message);
|
||
|
|
throw error_1;
|
||
|
|
case 3: return [2 /*return*/];
|
||
|
|
}
|
||
|
|
});
|
||
|
|
});
|
||
|
|
}
|
||
|
|
};
|
||
|
|
}
|
||
|
|
exports.start_long_running_process = start_long_running_process;
|
||
|
|
;
|
||
|
|
/**
|
||
|
|
* Apt package if not already installed,
|
||
|
|
* if prog is provided and prog is in the PATH the package will not be installed
|
||
|
|
* */
|
||
|
|
function apt_get_install_if_missing(package_name, prog) {
|
||
|
|
return __awaiter(this, void 0, void 0, function () {
|
||
|
|
return __generator(this, function (_a) {
|
||
|
|
switch (_a.label) {
|
||
|
|
case 0:
|
||
|
|
process.stdout.write("Looking for " + package_name + " ... ");
|
||
|
|
if (!!prog && apt_get_install_if_missing.doesHaveProg(prog)) {
|
||
|
|
console.log(prog + " executable found. " + colorize("OK", "GREEN"));
|
||
|
|
return [2 /*return*/];
|
||
|
|
}
|
||
|
|
if (apt_get_install_if_missing.isPkgInstalled(package_name)) {
|
||
|
|
console.log(package_name + " is installed. " + colorize("OK", "GREEN"));
|
||
|
|
return [2 /*return*/];
|
||
|
|
}
|
||
|
|
readline.clearLine(process.stdout, 0);
|
||
|
|
process.stdout.write("\r");
|
||
|
|
return [4 /*yield*/, apt_get_install(package_name)];
|
||
|
|
case 1: return [2 /*return*/, _a.sent()];
|
||
|
|
}
|
||
|
|
});
|
||
|
|
});
|
||
|
|
}
|
||
|
|
exports.apt_get_install_if_missing = apt_get_install_if_missing;
|
||
|
|
(function (apt_get_install_if_missing) {
|
||
|
|
function isPkgInstalled(package_name) {
|
||
|
|
try {
|
||
|
|
console.assert(!!execSyncNoCmdTrace("dpkg-query -W -f='${Status}' " + package_name, { "stdio": "pipe" })
|
||
|
|
.match(/^install ok installed$/));
|
||
|
|
}
|
||
|
|
catch (_a) {
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
return true;
|
||
|
|
}
|
||
|
|
apt_get_install_if_missing.isPkgInstalled = isPkgInstalled;
|
||
|
|
function doesHaveProg(prog) {
|
||
|
|
try {
|
||
|
|
execSyncNoCmdTrace("which " + prog);
|
||
|
|
}
|
||
|
|
catch (_a) {
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
return true;
|
||
|
|
}
|
||
|
|
apt_get_install_if_missing.doesHaveProg = doesHaveProg;
|
||
|
|
})(apt_get_install_if_missing = exports.apt_get_install_if_missing || (exports.apt_get_install_if_missing = {}));
|
||
|
|
/** Install or upgrade package via APT */
|
||
|
|
function apt_get_install(package_name) {
|
||
|
|
return __awaiter(this, void 0, void 0, function () {
|
||
|
|
var _a, onSuccess, exec, was_installed_before, error_2;
|
||
|
|
return __generator(this, function (_b) {
|
||
|
|
switch (_b.label) {
|
||
|
|
case 0:
|
||
|
|
_a = start_long_running_process("Installing or upgrading " + package_name + " package"), onSuccess = _a.onSuccess, exec = _a.exec;
|
||
|
|
_b.label = 1;
|
||
|
|
case 1:
|
||
|
|
_b.trys.push([1, 5, , 6]);
|
||
|
|
if (!apt_get_install.isFirst) return [3 /*break*/, 3];
|
||
|
|
return [4 /*yield*/, exec("apt-get update || true")];
|
||
|
|
case 2:
|
||
|
|
_b.sent();
|
||
|
|
apt_get_install.isFirst = false;
|
||
|
|
_b.label = 3;
|
||
|
|
case 3:
|
||
|
|
was_installed_before = apt_get_install_if_missing.isPkgInstalled(package_name);
|
||
|
|
return [4 /*yield*/, exec("apt-get -y install " + package_name)];
|
||
|
|
case 4:
|
||
|
|
_b.sent();
|
||
|
|
if (!was_installed_before) {
|
||
|
|
apt_get_install.onInstallSuccess(package_name);
|
||
|
|
}
|
||
|
|
return [3 /*break*/, 6];
|
||
|
|
case 5:
|
||
|
|
error_2 = _b.sent();
|
||
|
|
apt_get_install.onError(error_2);
|
||
|
|
return [3 /*break*/, 6];
|
||
|
|
case 6:
|
||
|
|
onSuccess("DONE");
|
||
|
|
return [2 /*return*/];
|
||
|
|
}
|
||
|
|
});
|
||
|
|
});
|
||
|
|
}
|
||
|
|
exports.apt_get_install = apt_get_install;
|
||
|
|
(function (apt_get_install) {
|
||
|
|
apt_get_install.isFirst = true;
|
||
|
|
function record_installed_package(file_json_path, package_name) {
|
||
|
|
execSyncNoCmdTrace("touch " + file_json_path);
|
||
|
|
var raw = fs.readFileSync(file_json_path).toString("utf8");
|
||
|
|
var list = raw === "" ? [] : JSON.parse(raw);
|
||
|
|
if (!list.find(function (p) { return p === package_name; })) {
|
||
|
|
list.push(package_name);
|
||
|
|
fs.writeFileSync(file_json_path, Buffer.from(JSON.stringify(list, null, 2), "utf8"));
|
||
|
|
}
|
||
|
|
}
|
||
|
|
apt_get_install.record_installed_package = record_installed_package;
|
||
|
|
apt_get_install.onError = function (error) { throw error; };
|
||
|
|
apt_get_install.onInstallSuccess = function (package_name) { };
|
||
|
|
})(apt_get_install = exports.apt_get_install || (exports.apt_get_install = {}));
|
||
|
|
function exit_if_not_root() {
|
||
|
|
if (process.getuid() !== 0) {
|
||
|
|
console.log(colorize("Error: root privilege required ", "RED"));
|
||
|
|
process.exit(1);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
exports.exit_if_not_root = exit_if_not_root;
|
||
|
|
/**
|
||
|
|
*
|
||
|
|
* Locate a given module in a node_modules directory.
|
||
|
|
* If the module is required in different version and thus
|
||
|
|
* present multiple times will be returned the shorter path.
|
||
|
|
* This ensure that if a given module is in package.json 's dependencies
|
||
|
|
* section the returned path will be the one we looking for.
|
||
|
|
*
|
||
|
|
* @param module_name The name of the module.
|
||
|
|
* @param module_dir_path Path to the root of the module ( will search in ./node_modules ).
|
||
|
|
*
|
||
|
|
* Throw if the module is not found.
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function find_module_path(module_name, module_dir_path) {
|
||
|
|
if (path.basename(module_dir_path) === module_name) {
|
||
|
|
return module_dir_path;
|
||
|
|
}
|
||
|
|
var node_module_path = path.join(module_dir_path, "node_modules");
|
||
|
|
if (!fs.existsSync(node_module_path)) {
|
||
|
|
throw new Error("No node_modules in " + module_dir_path);
|
||
|
|
}
|
||
|
|
var _a = __read(fs.readdirSync(node_module_path)
|
||
|
|
.map(function (file_name) { return path.join(node_module_path, file_name); })
|
||
|
|
.filter(function (file_or_dir_path) { return fs.existsSync(path.join(file_or_dir_path, "package.json")); })
|
||
|
|
.map(function (module_dir_path) {
|
||
|
|
try {
|
||
|
|
return find_module_path(module_name, module_dir_path);
|
||
|
|
}
|
||
|
|
catch (_a) {
|
||
|
|
return "";
|
||
|
|
}
|
||
|
|
})
|
||
|
|
.filter(function (module_dir_path) { return !!module_dir_path; })
|
||
|
|
.sort(function (a, b) { return a.length - b.length; }), 1), out = _a[0];
|
||
|
|
if (out === undefined) {
|
||
|
|
throw new Error("module " + module_name + " not installed in " + module_dir_path);
|
||
|
|
}
|
||
|
|
return out;
|
||
|
|
}
|
||
|
|
exports.find_module_path = find_module_path;
|
||
|
|
/**
|
||
|
|
*
|
||
|
|
* Test if two file of folder are same.
|
||
|
|
* Does not consider stat ( ownership and permission ).
|
||
|
|
* transparent handling of symlinks.
|
||
|
|
*
|
||
|
|
* Example
|
||
|
|
*
|
||
|
|
* /foo1/bar/file.txt
|
||
|
|
* /foo2/bar/file.txt
|
||
|
|
*
|
||
|
|
* to compare the two version of file.txt
|
||
|
|
* call with "/foo1", "/foo2", "./bar/file.txt";
|
||
|
|
* or with "/foo1/bar/file.txt", "/foo2/bar/file.txt"
|
||
|
|
*
|
||
|
|
* @param relative_from_path1 absolute path ex: '/foo1'
|
||
|
|
* @param relative_from_path2 absolute path ex: '/foo2'
|
||
|
|
* @param relative_to_path relative path ex: './bar/file.txt" or 'bar/file.txt'
|
||
|
|
* for convenience relative_to_path can be absolute as long as it has relative_from_path1
|
||
|
|
* or relative_from_path2 as parent.
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function fs_areSame(relative_from_path1, relative_from_path2, relative_to_path) {
|
||
|
|
if (relative_to_path === void 0) { relative_to_path = "."; }
|
||
|
|
relative_to_path = fs_areSame.get_relative_to_path(relative_from_path1, relative_from_path2, relative_to_path);
|
||
|
|
try {
|
||
|
|
execSyncNoCmdTrace([
|
||
|
|
"diff -r",
|
||
|
|
path.join(relative_from_path1, relative_to_path),
|
||
|
|
path.join(relative_from_path2, relative_to_path)
|
||
|
|
].join(" "), { "stdio": "pipe" });
|
||
|
|
}
|
||
|
|
catch (_a) {
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
return true;
|
||
|
|
}
|
||
|
|
exports.fs_areSame = fs_areSame;
|
||
|
|
(function (fs_areSame) {
|
||
|
|
function get_relative_to_path(dir_path1, dir_path2, to_path) {
|
||
|
|
if (path.isAbsolute(to_path)) {
|
||
|
|
var dir_path = [dir_path1, dir_path2]
|
||
|
|
.filter(function (v) { return to_path.startsWith(v); })
|
||
|
|
.sort(function (a, b) { return b.length - a.length; })[0];
|
||
|
|
if (!dir_path) {
|
||
|
|
throw new Error("Not relative!");
|
||
|
|
}
|
||
|
|
return path.relative(dir_path, to_path);
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
return to_path;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
fs_areSame.get_relative_to_path = get_relative_to_path;
|
||
|
|
})(fs_areSame = exports.fs_areSame || (exports.fs_areSame = {}));
|
||
|
|
/**
|
||
|
|
*
|
||
|
|
* Move or copy file of folder.
|
||
|
|
* -If dest is identical to source nothing is copied nor moved.
|
||
|
|
* -If dest exist and is different of source it will be deleted prior to proceeding with action.
|
||
|
|
* -In move mode if dest identical to source source will be removed.
|
||
|
|
* -When copy is effectively performed the stat are conserved.
|
||
|
|
* -If dirname of dest does not exist in fs, it will be created.
|
||
|
|
* -Unlike cp or mv "/src/file.txt" "/dest" will NOT place file.txt in dest but dest will become file.txt
|
||
|
|
*
|
||
|
|
* calling [action] "/src/foo" "/dst/foo" is equivalent
|
||
|
|
* to calling [action] "/src" "/dst" "./foo" ( or "foo" )
|
||
|
|
* or [action] "/src" "/dst" "src/foo"
|
||
|
|
* or [action] "/src" "/dst" "dst/foo"
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function fs_move(action, relative_from_path_src, relative_from_path_dest, relative_to_path) {
|
||
|
|
if (relative_to_path === void 0) { relative_to_path = "."; }
|
||
|
|
relative_to_path = fs_areSame.get_relative_to_path(relative_from_path_src, relative_from_path_dest, relative_to_path);
|
||
|
|
var src_path = path.join(relative_from_path_src, relative_to_path);
|
||
|
|
var dst_path = path.join(relative_from_path_dest, relative_to_path);
|
||
|
|
if (!fs_areSame(src_path, dst_path)) {
|
||
|
|
if (!fs.existsSync(dst_path)) {
|
||
|
|
execSyncNoCmdTrace("mkdir -p " + dst_path);
|
||
|
|
}
|
||
|
|
execSyncNoCmdTrace("rm -rf " + dst_path);
|
||
|
|
execSyncNoCmdTrace([
|
||
|
|
action === "COPY" ? "cp -rp" : "mv",
|
||
|
|
src_path,
|
||
|
|
dst_path
|
||
|
|
].join(" "));
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
if (action === "MOVE") {
|
||
|
|
execSyncNoCmdTrace("rm -r " + src_path);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
exports.fs_move = fs_move;
|
||
|
|
/**
|
||
|
|
* Download and extract a tarball. throws web_get.DownloadError and Error
|
||
|
|
*
|
||
|
|
* Example
|
||
|
|
*
|
||
|
|
* website.com/rel.tar.gz
|
||
|
|
* ./file1.txt
|
||
|
|
* ./dir/file2.txt
|
||
|
|
*
|
||
|
|
* /foo/
|
||
|
|
* ./file3.txt
|
||
|
|
* ./dir/file4.txt
|
||
|
|
*
|
||
|
|
* calling with "website.com/rel.tar.gz", "MERGE" will result in:
|
||
|
|
*
|
||
|
|
* /foo/
|
||
|
|
* ./file1.txt
|
||
|
|
* ./file3.txt
|
||
|
|
* ./dir/file4.txt
|
||
|
|
*
|
||
|
|
* calling with "website.com/rel.tar.gz", "OVERWRITE IF EXIST" will result in:
|
||
|
|
*
|
||
|
|
* /foo/
|
||
|
|
* ./file1.txt
|
||
|
|
* ./dir/file2.txt
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function download_and_extract_tarball(url, dest_dir_path, mode) {
|
||
|
|
return __awaiter(this, void 0, void 0, function () {
|
||
|
|
var _a, exec, onSuccess, onError, tarball_dir_path, tarball_path, error_3, _b, _c, name;
|
||
|
|
var e_1, _d;
|
||
|
|
return __generator(this, function (_e) {
|
||
|
|
switch (_e.label) {
|
||
|
|
case 0:
|
||
|
|
_a = start_long_running_process("Downloading " + url + " and extracting to " + dest_dir_path), exec = _a.exec, onSuccess = _a.onSuccess, onError = _a.onError;
|
||
|
|
tarball_dir_path = (function () {
|
||
|
|
var hash = crypto.createHash("sha1");
|
||
|
|
hash.write(url);
|
||
|
|
hash.end();
|
||
|
|
return "/tmp/_" + hash.read().toString("hex");
|
||
|
|
})();
|
||
|
|
tarball_path = tarball_dir_path + ".tar.gz";
|
||
|
|
if (!(fs.existsSync(tarball_dir_path) || fs.existsSync(tarball_path))) return [3 /*break*/, 2];
|
||
|
|
return [4 /*yield*/, exec("rm -rf " + tarball_dir_path + " " + tarball_path)];
|
||
|
|
case 1:
|
||
|
|
_e.sent();
|
||
|
|
_e.label = 2;
|
||
|
|
case 2:
|
||
|
|
_e.trys.push([2, 4, , 5]);
|
||
|
|
return [4 /*yield*/, web_get(url, tarball_path)];
|
||
|
|
case 3:
|
||
|
|
_e.sent();
|
||
|
|
return [3 /*break*/, 5];
|
||
|
|
case 4:
|
||
|
|
error_3 = _e.sent();
|
||
|
|
onError(error_3.message);
|
||
|
|
throw error_3;
|
||
|
|
case 5: return [4 /*yield*/, exec("mkdir " + tarball_dir_path)];
|
||
|
|
case 6:
|
||
|
|
_e.sent();
|
||
|
|
return [4 /*yield*/, exec("tar -xzf " + tarball_path + " -C " + tarball_dir_path)];
|
||
|
|
case 7:
|
||
|
|
_e.sent();
|
||
|
|
return [4 /*yield*/, exec("rm " + tarball_path)];
|
||
|
|
case 8:
|
||
|
|
_e.sent();
|
||
|
|
if (!(mode === "MERGE")) return [3 /*break*/, 10];
|
||
|
|
try {
|
||
|
|
for (_b = __values(fs_ls(tarball_dir_path)), _c = _b.next(); !_c.done; _c = _b.next()) {
|
||
|
|
name = _c.value;
|
||
|
|
fs_move("MOVE", tarball_dir_path, dest_dir_path, name);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||
|
|
finally {
|
||
|
|
try {
|
||
|
|
if (_c && !_c.done && (_d = _b.return)) _d.call(_b);
|
||
|
|
}
|
||
|
|
finally { if (e_1) throw e_1.error; }
|
||
|
|
}
|
||
|
|
return [4 /*yield*/, exec("rm -r " + tarball_dir_path)];
|
||
|
|
case 9:
|
||
|
|
_e.sent();
|
||
|
|
return [3 /*break*/, 11];
|
||
|
|
case 10:
|
||
|
|
fs_move("MOVE", tarball_dir_path, dest_dir_path);
|
||
|
|
_e.label = 11;
|
||
|
|
case 11:
|
||
|
|
onSuccess();
|
||
|
|
return [2 /*return*/];
|
||
|
|
}
|
||
|
|
});
|
||
|
|
});
|
||
|
|
}
|
||
|
|
exports.download_and_extract_tarball = download_and_extract_tarball;
|
||
|
|
function web_get(url, file_path) {
|
||
|
|
if (!url.startsWith("http")) {
|
||
|
|
url = "http://" + url;
|
||
|
|
}
|
||
|
|
return new Promise(function (resolve, reject) {
|
||
|
|
var get = url.startsWith("https") ?
|
||
|
|
https.get.bind(https) : http.get.bind(http);
|
||
|
|
var timer = setTimeout(function () {
|
||
|
|
clientRequest.abort();
|
||
|
|
reject(new web_get.DownloadError(url, "CONNECTION ERROR", "web_get connection error: timeout"));
|
||
|
|
}, 20000);
|
||
|
|
var clientRequest = get(url, function (res) {
|
||
|
|
clearTimeout(timer);
|
||
|
|
if (("" + res.statusCode).startsWith("30")) {
|
||
|
|
var url_redirect = res.headers.location;
|
||
|
|
if (!!url_redirect) {
|
||
|
|
web_get(url_redirect, file_path)
|
||
|
|
.then(function (out) { return resolve(out); })
|
||
|
|
.catch(function (error) { return reject(error); });
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
if (!("" + res.statusCode).startsWith("2")) {
|
||
|
|
reject(new web_get.DownloadErrorHttpErrorCode(url, res.statusCode));
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
var contentLength = undefined;
|
||
|
|
var receivedBytes = 0;
|
||
|
|
if (res.headers["content-length"] !== undefined) {
|
||
|
|
contentLength = parseInt(res.headers["content-length"]);
|
||
|
|
res.on("data", function (chunk) { return receivedBytes += chunk.length; });
|
||
|
|
(function () {
|
||
|
|
var resolve_src = resolve;
|
||
|
|
resolve = function () {
|
||
|
|
var args = [];
|
||
|
|
for (var _i = 0; _i < arguments.length; _i++) {
|
||
|
|
args[_i] = arguments[_i];
|
||
|
|
}
|
||
|
|
if (receivedBytes !== contentLength) {
|
||
|
|
reject(new web_get.DownloadErrorIncomplete(url, contentLength, receivedBytes));
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
resolve_src.apply(null, args);
|
||
|
|
};
|
||
|
|
})();
|
||
|
|
}
|
||
|
|
res.socket.setTimeout(60000, function () { return res.socket.destroy(new web_get.DownloadErrorIncomplete(url, contentLength, receivedBytes, "socket timeout")); });
|
||
|
|
if (!!file_path) {
|
||
|
|
(function () {
|
||
|
|
var reject_src = reject;
|
||
|
|
reject = function () {
|
||
|
|
var args = [];
|
||
|
|
for (var _i = 0; _i < arguments.length; _i++) {
|
||
|
|
args[_i] = arguments[_i];
|
||
|
|
}
|
||
|
|
return fs.unlink(file_path, function () { return reject_src.apply(null, args); });
|
||
|
|
};
|
||
|
|
})();
|
||
|
|
fs.writeFileSync(file_path, new Buffer(0));
|
||
|
|
var fsWriteStream = fs.createWriteStream(file_path);
|
||
|
|
res.pipe(fsWriteStream);
|
||
|
|
fsWriteStream.once("finish", function () { return resolve(); });
|
||
|
|
res.once("error", function (error) { return reject(new web_get.DownloadErrorIncomplete(url, contentLength, receivedBytes, error.message)); });
|
||
|
|
fsWriteStream.once("error", function (error) { return reject(new web_get.DownloadErrorIncomplete(url, contentLength, receivedBytes, error.message)); });
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
var data_1 = new Buffer(0);
|
||
|
|
res.on("data", function (chunk) { return data_1 = Buffer.concat([data_1, chunk]); });
|
||
|
|
res.once("end", function () { return resolve(data_1.toString("utf8")); });
|
||
|
|
}
|
||
|
|
});
|
||
|
|
clientRequest.once("error", function (error) {
|
||
|
|
clearTimeout(timer);
|
||
|
|
reject(new web_get.DownloadError(url, "CONNECTION ERROR", error.message));
|
||
|
|
});
|
||
|
|
});
|
||
|
|
}
|
||
|
|
exports.web_get = web_get;
|
||
|
|
(function (web_get) {
|
||
|
|
var DownloadError = /** @class */ (function (_super) {
|
||
|
|
__extends(DownloadError, _super);
|
||
|
|
function DownloadError(url, cause, message) {
|
||
|
|
var _newTarget = this.constructor;
|
||
|
|
var _this = _super.call(this, message) || this;
|
||
|
|
_this.url = url;
|
||
|
|
_this.cause = cause;
|
||
|
|
Object.setPrototypeOf(_this, _newTarget.prototype);
|
||
|
|
return _this;
|
||
|
|
}
|
||
|
|
return DownloadError;
|
||
|
|
}(Error));
|
||
|
|
web_get.DownloadError = DownloadError;
|
||
|
|
var DownloadErrorIncomplete = /** @class */ (function (_super) {
|
||
|
|
__extends(DownloadErrorIncomplete, _super);
|
||
|
|
function DownloadErrorIncomplete(url, contentLength, receivedBytes, info) {
|
||
|
|
var _newTarget = this.constructor;
|
||
|
|
var _this = _super.call(this, url, "INCOMPLETE", "web_get failed, download incomplete " + receivedBytes + "/" + contentLength + ", " + (!!info ? info : "")) || this;
|
||
|
|
_this.contentLength = contentLength;
|
||
|
|
_this.receivedBytes = receivedBytes;
|
||
|
|
Object.setPrototypeOf(_this, _newTarget.prototype);
|
||
|
|
return _this;
|
||
|
|
}
|
||
|
|
return DownloadErrorIncomplete;
|
||
|
|
}(DownloadError));
|
||
|
|
web_get.DownloadErrorIncomplete = DownloadErrorIncomplete;
|
||
|
|
var DownloadErrorHttpErrorCode = /** @class */ (function (_super) {
|
||
|
|
__extends(DownloadErrorHttpErrorCode, _super);
|
||
|
|
function DownloadErrorHttpErrorCode(url, code) {
|
||
|
|
var _newTarget = this.constructor;
|
||
|
|
var _this = _super.call(this, url, "HTTP ERROR CODE", "web_get failed, HTTP error code: " + code) || this;
|
||
|
|
_this.code = code;
|
||
|
|
Object.setPrototypeOf(_this, _newTarget.prototype);
|
||
|
|
return _this;
|
||
|
|
}
|
||
|
|
return DownloadErrorHttpErrorCode;
|
||
|
|
}(DownloadError));
|
||
|
|
web_get.DownloadErrorHttpErrorCode = DownloadErrorHttpErrorCode;
|
||
|
|
})(web_get = exports.web_get || (exports.web_get = {}));
|
||
|
|
function fs_ls(dir_path, mode, showHidden) {
|
||
|
|
if (mode === void 0) { mode = "FILENAME"; }
|
||
|
|
if (showHidden === void 0) { showHidden = false; }
|
||
|
|
return execSyncNoCmdTrace("ls" + (showHidden ? " -a" : ""), { "cwd": dir_path })
|
||
|
|
.slice(0, -1)
|
||
|
|
.split("\n")
|
||
|
|
.map(function (name) { return mode === "ABSOLUTE PATH" ? path.join(dir_path, name) : name; });
|
||
|
|
}
|
||
|
|
exports.fs_ls = fs_ls;
|
||
|
|
/**
|
||
|
|
*
|
||
|
|
* Create a symbolic link.
|
||
|
|
* If dst exist it is removed.
|
||
|
|
* directories leading to dest are created if necessary.
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function createSymlink(src_path, dst_path) {
|
||
|
|
if (!fs.existsSync(dst_path)) {
|
||
|
|
execSyncNoCmdTrace("mkdir -p " + dst_path);
|
||
|
|
}
|
||
|
|
execSyncNoCmdTrace("rm -rf " + dst_path);
|
||
|
|
execSync("ln -s " + src_path + " " + dst_path);
|
||
|
|
}
|
||
|
|
exports.createSymlink = createSymlink;
|
||
|
|
/** Create a executable file */
|
||
|
|
function createScript(file_path, content) {
|
||
|
|
if (traceCmdIfEnabled.enabled) {
|
||
|
|
console.log("Creating script " + file_path);
|
||
|
|
}
|
||
|
|
fs.writeFileSync(file_path, Buffer.from(content, "utf8"));
|
||
|
|
execSyncNoCmdTrace("chmod +x " + file_path);
|
||
|
|
}
|
||
|
|
exports.createScript = createScript;
|
||
|
|
var unixUser;
|
||
|
|
(function (unixUser) {
|
||
|
|
function create(unix_user, home_dir_path) {
|
||
|
|
if (home_dir_path === void 0) { home_dir_path = "/tmp"; }
|
||
|
|
execSyncNoCmdTrace("useradd -M " + unix_user + " -s /bin/false -d " + home_dir_path);
|
||
|
|
}
|
||
|
|
unixUser.create = create;
|
||
|
|
function remove(unix_user) {
|
||
|
|
execSyncNoCmdTrace("userdel " + unix_user, { "stdio": "pipe" });
|
||
|
|
}
|
||
|
|
unixUser.remove = remove;
|
||
|
|
})(unixUser = exports.unixUser || (exports.unixUser = {}));
|
||
|
|
var get_caller_file_path_1 = __webpack_require__(303);
|
||
|
|
exports.get_caller_file_path = get_caller_file_path_1.get_caller_file_path;
|
||
|
|
var get_caller_file_path_2 = __webpack_require__(303);
|
||
|
|
/**
|
||
|
|
*
|
||
|
|
* DO NOT USE TEST PURPOSE ONLY
|
||
|
|
*
|
||
|
|
* return __filename
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function get__filename() {
|
||
|
|
return get_caller_file_path_2.get_caller_file_path();
|
||
|
|
}
|
||
|
|
exports.get__filename = get__filename;
|
||
|
|
/**
|
||
|
|
*
|
||
|
|
* Equivalent to the pattern $() in bash.
|
||
|
|
* Strip final LF if present.
|
||
|
|
* If cmd fail no error is thrown, an empty string is returned.
|
||
|
|
* Does not print to stdout.
|
||
|
|
*
|
||
|
|
* Typical usage: uname -r or which pkill
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function sh_eval(cmd) {
|
||
|
|
var res;
|
||
|
|
try {
|
||
|
|
res = execSyncNoCmdTrace(cmd, { "stdio": "pipe" });
|
||
|
|
}
|
||
|
|
catch (_a) {
|
||
|
|
return "";
|
||
|
|
}
|
||
|
|
return res.replace(/\n$/, "");
|
||
|
|
}
|
||
|
|
exports.sh_eval = sh_eval;
|
||
|
|
/**
|
||
|
|
* Run a command and return true if the return code was 0.
|
||
|
|
* Does not print to stdout.
|
||
|
|
*/
|
||
|
|
function sh_if(cmd) {
|
||
|
|
try {
|
||
|
|
execSyncNoCmdTrace(cmd, { "stdio": "pipe" });
|
||
|
|
}
|
||
|
|
catch (_a) {
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
return true;
|
||
|
|
}
|
||
|
|
exports.sh_if = sh_if;
|
||
|
|
/**
|
||
|
|
* Return a promise that resolve as the source promise when fulfilled
|
||
|
|
* or resolve with the error when reject.
|
||
|
|
* If a timeout is specified the returned promise resolve with an error after [timeout]ms
|
||
|
|
* if the source promise did not completed before.
|
||
|
|
* The message of the timeout error is safePr.timeoutErrorMessage
|
||
|
|
*/
|
||
|
|
function safePr(pr, timeout) {
|
||
|
|
var prSafe = pr.then(function (val) { return val; }, function (error) { return error; });
|
||
|
|
if (timeout !== undefined) {
|
||
|
|
var timer_1;
|
||
|
|
return Promise.race([
|
||
|
|
new Promise(function (resolve) { return timer_1 = setTimeout(function () { return resolve(new Error(safePr.timeoutErrorMessage)); }, timeout); }),
|
||
|
|
prSafe.then(function (val) {
|
||
|
|
clearTimeout(timer_1);
|
||
|
|
return val;
|
||
|
|
})
|
||
|
|
]);
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
return prSafe;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
exports.safePr = safePr;
|
||
|
|
;
|
||
|
|
(function (safePr) {
|
||
|
|
safePr.timeoutErrorMessage = "safePr timeout";
|
||
|
|
})(safePr = exports.safePr || (exports.safePr = {}));
|
||
|
|
/**
|
||
|
|
*
|
||
|
|
* Allow to schedule action function to perform before exiting.
|
||
|
|
*
|
||
|
|
* The task function will always be called before the process stop
|
||
|
|
* unless process.exit is explicitly called somewhere or
|
||
|
|
* if the process receive any signal other than the ones specified
|
||
|
|
* in the ExitCause.Signal["signal"] type.
|
||
|
|
*
|
||
|
|
* The process may stop for tree reasons:
|
||
|
|
* 1) If there is no more work scheduled ( natural termination ).
|
||
|
|
* 2) If an uncaught exception it thrown ( or a unhandled promise rejection )
|
||
|
|
* 3) If a signal ( one of the handled ) is sent to the process.
|
||
|
|
*
|
||
|
|
* To manually exit the process there is two option:
|
||
|
|
* - Call process.exit(X) but the task function will not be called.
|
||
|
|
* - Emit "beforeExit" on process object ( process.emit("beforeExit, process.exitCode= X) );
|
||
|
|
* Doing so you simulate 1st stop condition ( natural termination ).
|
||
|
|
*
|
||
|
|
* To define the return code set process.exitCode. The exit code can be set
|
||
|
|
* before emitting "beforeExit" or in the task function.
|
||
|
|
* If exitCode has not be defined the process will exit with 0 if
|
||
|
|
* there was nothing else to do and 1 otherwise.
|
||
|
|
*
|
||
|
|
* The task function can be synchronous or asynchronous.
|
||
|
|
* The task function has [timeout] ms to complete.
|
||
|
|
* If it has not completed within this delay the process will
|
||
|
|
* be terminated anyway. (Default 4000 ms )
|
||
|
|
* Setting [timeout] to a negative value will disable the timer.
|
||
|
|
* WARNING: It is important not to perform sync operation that can
|
||
|
|
* hang for a long time in the task function ( e.g. execSync("sleep 1000"); )
|
||
|
|
* because while the sync operation are performed the timeout can't be triggered.
|
||
|
|
*
|
||
|
|
* As soon as the task function is called all the other exitCause that
|
||
|
|
* may auccur will be ignored so that the task function have time to complete.
|
||
|
|
* Anyway the task function is called only once.
|
||
|
|
*
|
||
|
|
* Whether the task function complete by successfully or throw
|
||
|
|
* an exception the process will terminate with exit code set
|
||
|
|
* in process.exitCode at the time of the completion.
|
||
|
|
*
|
||
|
|
* Provide shouldExitIf function to filter what should be
|
||
|
|
* considered a case to terminate the process.
|
||
|
|
* Only exception and supported signals can be bypassed,
|
||
|
|
* Nothing else to do will always terminate the process.
|
||
|
|
* By default exiting on any signal or uncaught errors.
|
||
|
|
*
|
||
|
|
* Before exiting all subprocess will be killed.
|
||
|
|
*
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function setProcessExitHandler(task, timeout, shouldExitIf) {
|
||
|
|
var e_2, _a, e_3, _b;
|
||
|
|
var _this = this;
|
||
|
|
if (timeout === void 0) { timeout = 4000; }
|
||
|
|
if (shouldExitIf === void 0) { shouldExitIf = function () { return true; }; }
|
||
|
|
var log = function () {
|
||
|
|
var args = [];
|
||
|
|
for (var _i = 0; _i < arguments.length; _i++) {
|
||
|
|
args[_i] = arguments[_i];
|
||
|
|
}
|
||
|
|
return setProcessExitHandler.log("[ exit handler ] " + util.format.apply(util, args));
|
||
|
|
};
|
||
|
|
var handler = function (exitCause) { return __awaiter(_this, void 0, void 0, function () {
|
||
|
|
var process_exit, actionOut, error_4;
|
||
|
|
return __generator(this, function (_a) {
|
||
|
|
switch (_a.label) {
|
||
|
|
case 0:
|
||
|
|
if (exitCause.type !== "NOTHING ELSE TO DO" && !shouldExitIf(exitCause)) {
|
||
|
|
log("Choosing ( c.f shouldExitIf ) not to terminate the process despite: ", exitCause);
|
||
|
|
return [2 /*return*/];
|
||
|
|
}
|
||
|
|
handler = function (exitCause) { return log("Ignored extra exit cause", exitCause); };
|
||
|
|
process_exit = function () {
|
||
|
|
if (typeof process.exitCode !== "number" || isNaN(process.exitCode)) {
|
||
|
|
if (exitCause.type === "NOTHING ELSE TO DO") {
|
||
|
|
process.exitCode = 0;
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
log("Exit cause " + exitCause.type + " and not exitCode have been set, using exit code 1");
|
||
|
|
process.exitCode = 1;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
log("Exit code have been set to " + process.exitCode);
|
||
|
|
}
|
||
|
|
log("Stopping subprocess asap if any...");
|
||
|
|
stopProcessSync.stopSubProcessesAsapSync();
|
||
|
|
log("exiting now with code " + process.exitCode);
|
||
|
|
process.exit();
|
||
|
|
};
|
||
|
|
log("Cause of process termination: ", exitCause);
|
||
|
|
if (timeout >= 0) {
|
||
|
|
setTimeout(function () {
|
||
|
|
log("Exit task timeout");
|
||
|
|
process.exitCode = 1;
|
||
|
|
process_exit();
|
||
|
|
}, timeout);
|
||
|
|
}
|
||
|
|
try {
|
||
|
|
actionOut = task(exitCause);
|
||
|
|
}
|
||
|
|
catch (error) {
|
||
|
|
log("Exit task thrown error", error);
|
||
|
|
process_exit();
|
||
|
|
return [2 /*return*/];
|
||
|
|
}
|
||
|
|
if (!(actionOut instanceof Promise)) return [3 /*break*/, 4];
|
||
|
|
_a.label = 1;
|
||
|
|
case 1:
|
||
|
|
_a.trys.push([1, 3, , 4]);
|
||
|
|
return [4 /*yield*/, actionOut];
|
||
|
|
case 2:
|
||
|
|
_a.sent();
|
||
|
|
return [3 /*break*/, 4];
|
||
|
|
case 3:
|
||
|
|
error_4 = _a.sent();
|
||
|
|
log("Exit task returned a promise that rejected", error_4);
|
||
|
|
process_exit();
|
||
|
|
return [2 /*return*/];
|
||
|
|
case 4:
|
||
|
|
log("Exit task complete successfully.");
|
||
|
|
process_exit();
|
||
|
|
return [2 /*return*/];
|
||
|
|
}
|
||
|
|
});
|
||
|
|
}); };
|
||
|
|
var _loop_1 = function (signal) {
|
||
|
|
process.on(signal, function () { return handler({ "type": "SIGNAL", signal: signal }); });
|
||
|
|
};
|
||
|
|
try {
|
||
|
|
for (var _c = __values(setProcessExitHandler.ExitCause.Signal.list), _d = _c.next(); !_d.done; _d = _c.next()) {
|
||
|
|
var signal = _d.value;
|
||
|
|
_loop_1(signal);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
catch (e_2_1) { e_2 = { error: e_2_1 }; }
|
||
|
|
finally {
|
||
|
|
try {
|
||
|
|
if (_d && !_d.done && (_a = _c.return)) _a.call(_c);
|
||
|
|
}
|
||
|
|
finally { if (e_2) throw e_2.error; }
|
||
|
|
}
|
||
|
|
try {
|
||
|
|
for (var _e = __values(["uncaughtException", "unhandledRejection"]), _f = _e.next(); !_f.done; _f = _e.next()) {
|
||
|
|
var eventName = _f.value;
|
||
|
|
process.on(eventName, function (error) { return handler({ "type": "EXCEPTION", error: error }); });
|
||
|
|
}
|
||
|
|
}
|
||
|
|
catch (e_3_1) { e_3 = { error: e_3_1 }; }
|
||
|
|
finally {
|
||
|
|
try {
|
||
|
|
if (_f && !_f.done && (_b = _e.return)) _b.call(_e);
|
||
|
|
}
|
||
|
|
finally { if (e_3) throw e_3.error; }
|
||
|
|
}
|
||
|
|
process.on("beforeExit", function () { return handler({ "type": "NOTHING ELSE TO DO" }); });
|
||
|
|
}
|
||
|
|
exports.setProcessExitHandler = setProcessExitHandler;
|
||
|
|
(function (setProcessExitHandler) {
|
||
|
|
var ExitCause;
|
||
|
|
(function (ExitCause) {
|
||
|
|
var Signal;
|
||
|
|
(function (Signal) {
|
||
|
|
Signal._obj = { "SIGINT": null, "SIGUSR2": null, "SIGHUP": null };
|
||
|
|
Signal.list = Object.keys(Signal._obj);
|
||
|
|
})(Signal = ExitCause.Signal || (ExitCause.Signal = {}));
|
||
|
|
})(ExitCause = setProcessExitHandler.ExitCause || (setProcessExitHandler.ExitCause = {}));
|
||
|
|
setProcessExitHandler.log = function () { };
|
||
|
|
})(setProcessExitHandler = exports.setProcessExitHandler || (exports.setProcessExitHandler = {}));
|
||
|
|
/**
|
||
|
|
*
|
||
|
|
* Stop a process by sending a specific signal to a target process.
|
||
|
|
* When the function return the main process and all it's descendent processes are terminated.
|
||
|
|
*
|
||
|
|
* The default signal is SIGUSR2 which is the signal used to gracefully terminate
|
||
|
|
* Process created by the createService function.
|
||
|
|
*
|
||
|
|
* Optionally runfiles_path can be provided to define a set of files
|
||
|
|
* that should be suppressed before returning.
|
||
|
|
*
|
||
|
|
* If pid is provided under the form of a pidfile path it will
|
||
|
|
* be added to the runfiles set.
|
||
|
|
*
|
||
|
|
* If all the processes does not terminate within [delay_before_sigkill]ms
|
||
|
|
* (default 50000) then KILL signal will be sent to all processes still alive.
|
||
|
|
*
|
||
|
|
* If the PID provided is the same that the PID of the process running the function
|
||
|
|
* PidMatchCurrentProcessError will be thrown.
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function stopProcessSync(pidfile_path_or_pid, signal, delay_before_sigkill, runfiles_path) {
|
||
|
|
if (signal === void 0) { signal = "SIGUSR2"; }
|
||
|
|
if (delay_before_sigkill === void 0) { delay_before_sigkill = 5000; }
|
||
|
|
if (runfiles_path === void 0) { runfiles_path = []; }
|
||
|
|
var log = function () {
|
||
|
|
var args = [];
|
||
|
|
for (var _i = 0; _i < arguments.length; _i++) {
|
||
|
|
args[_i] = arguments[_i];
|
||
|
|
}
|
||
|
|
return stopProcessSync.log("[ stop process sync ] " + util.format.apply(util, args));
|
||
|
|
};
|
||
|
|
var cleanupRunfiles = function () {
|
||
|
|
var e_4, _a;
|
||
|
|
try {
|
||
|
|
for (var runfiles_path_1 = __values(runfiles_path), runfiles_path_1_1 = runfiles_path_1.next(); !runfiles_path_1_1.done; runfiles_path_1_1 = runfiles_path_1.next()) {
|
||
|
|
var runfile_path = runfiles_path_1_1.value;
|
||
|
|
if (fs.existsSync(runfile_path)) {
|
||
|
|
try {
|
||
|
|
fs.unlinkSync(runfile_path);
|
||
|
|
log(path.basename(runfile_path) + " runfile manually cleaned up.");
|
||
|
|
}
|
||
|
|
catch (_b) {
|
||
|
|
log(colorize("Could not remove runfile " + runfile_path, "RED"));
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
catch (e_4_1) { e_4 = { error: e_4_1 }; }
|
||
|
|
finally {
|
||
|
|
try {
|
||
|
|
if (runfiles_path_1_1 && !runfiles_path_1_1.done && (_a = runfiles_path_1.return)) _a.call(runfiles_path_1);
|
||
|
|
}
|
||
|
|
finally { if (e_4) throw e_4.error; }
|
||
|
|
}
|
||
|
|
};
|
||
|
|
var pid;
|
||
|
|
if (typeof pidfile_path_or_pid === "number") {
|
||
|
|
pid = pidfile_path_or_pid;
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
var pidfile_path = pidfile_path_or_pid;
|
||
|
|
runfiles_path = __spread([pidfile_path], runfiles_path);
|
||
|
|
if (!fs.existsSync(pidfile_path)) {
|
||
|
|
log("Pidfile does not exist, assuming process not running");
|
||
|
|
cleanupRunfiles();
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
try {
|
||
|
|
pid = parseInt(fs.readFileSync(pidfile_path).toString("utf8").replace(/\n$/, ""));
|
||
|
|
if (isNaN(pid)) {
|
||
|
|
throw new Error("pid is NaN");
|
||
|
|
}
|
||
|
|
}
|
||
|
|
catch (_a) {
|
||
|
|
log("Pidfile does does not contain pid");
|
||
|
|
cleanupRunfiles();
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
if (pid === process.pid) {
|
||
|
|
throw new stopProcessSync.PidMatchCurrentProcessError(cleanupRunfiles);
|
||
|
|
}
|
||
|
|
var pids = __spread(stopProcessSync.getSubProcesses(pid, "FULL PROCESS TREE"), [
|
||
|
|
pid
|
||
|
|
]);
|
||
|
|
var startTime = Date.now();
|
||
|
|
if (stopProcessSync.isProcessRunning(pid)) {
|
||
|
|
log("Sending " + signal + " to target process (" + pid + ")");
|
||
|
|
stopProcessSync.kill(pid, signal);
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
log("Target process (" + pid + ") is not running");
|
||
|
|
}
|
||
|
|
var _loop_2 = function () {
|
||
|
|
var e_5, _a;
|
||
|
|
var runningPids = pids.filter(function (pid) { return stopProcessSync.isProcessRunning(pid); });
|
||
|
|
if (runningPids.length === 0) {
|
||
|
|
log("Target process (" + pid + ") and all it's sub processes are now terminated");
|
||
|
|
return "break";
|
||
|
|
}
|
||
|
|
else if (Date.now() >= startTime + delay_before_sigkill) {
|
||
|
|
log((function () {
|
||
|
|
if (delay_before_sigkill === 0) {
|
||
|
|
return "Immediately sending SIGKILL to " + runningPids.length + " remaining sub processes of target process (" + pid + ")";
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
return [
|
||
|
|
!!runningPids.find(function (_pid) { return _pid === pid; }) ?
|
||
|
|
"Target process (" + pid + ") and " + (runningPids.length - 1) + " of it's sub processes" :
|
||
|
|
runningPids.length + " sub processes of the target process (" + pid + ")",
|
||
|
|
"did not terminate in time, sending KILL signals."
|
||
|
|
].join(" ");
|
||
|
|
}
|
||
|
|
})());
|
||
|
|
try {
|
||
|
|
for (var runningPids_1 = (e_5 = void 0, __values(runningPids)), runningPids_1_1 = runningPids_1.next(); !runningPids_1_1.done; runningPids_1_1 = runningPids_1.next()) {
|
||
|
|
var pid_1 = runningPids_1_1.value;
|
||
|
|
stopProcessSync.kill(pid_1, "SIGKILL");
|
||
|
|
}
|
||
|
|
}
|
||
|
|
catch (e_5_1) { e_5 = { error: e_5_1 }; }
|
||
|
|
finally {
|
||
|
|
try {
|
||
|
|
if (runningPids_1_1 && !runningPids_1_1.done && (_a = runningPids_1.return)) _a.call(runningPids_1);
|
||
|
|
}
|
||
|
|
finally { if (e_5) throw e_5.error; }
|
||
|
|
}
|
||
|
|
return "continue";
|
||
|
|
}
|
||
|
|
execSyncNoCmdTrace("sleep 0.1");
|
||
|
|
};
|
||
|
|
while (true) {
|
||
|
|
var state_1 = _loop_2();
|
||
|
|
if (state_1 === "break")
|
||
|
|
break;
|
||
|
|
}
|
||
|
|
cleanupRunfiles();
|
||
|
|
}
|
||
|
|
exports.stopProcessSync = stopProcessSync;
|
||
|
|
(function (stopProcessSync) {
|
||
|
|
var PidMatchCurrentProcessError = /** @class */ (function (_super) {
|
||
|
|
__extends(PidMatchCurrentProcessError, _super);
|
||
|
|
function PidMatchCurrentProcessError(cleanupRunfiles) {
|
||
|
|
var _newTarget = this.constructor;
|
||
|
|
var _this = _super.call(this, "StopProcessSync error, provided PID is the PID of the current process") || this;
|
||
|
|
_this.cleanupRunfiles = cleanupRunfiles;
|
||
|
|
Object.setPrototypeOf(_this, _newTarget.prototype);
|
||
|
|
return _this;
|
||
|
|
}
|
||
|
|
return PidMatchCurrentProcessError;
|
||
|
|
}(Error));
|
||
|
|
stopProcessSync.PidMatchCurrentProcessError = PidMatchCurrentProcessError;
|
||
|
|
/**
|
||
|
|
* Stopping process As Soon As Possible,
|
||
|
|
* stopProcessSync with signal SIGKILL and timeout 0
|
||
|
|
* */
|
||
|
|
function stopProcessAsapSync(pidfile_path_or_pid, runfiles_path) {
|
||
|
|
if (runfiles_path === void 0) { runfiles_path = []; }
|
||
|
|
stopProcessSync(pidfile_path_or_pid, "SIGKILL", 0, runfiles_path);
|
||
|
|
}
|
||
|
|
stopProcessSync.stopProcessAsapSync = stopProcessAsapSync;
|
||
|
|
/**
|
||
|
|
* Terminate all child process of current process ASAP.
|
||
|
|
*
|
||
|
|
* NOTE: Directly after this function ( in the current tick )
|
||
|
|
* direct parents process that had sub processes will be Zombies.
|
||
|
|
* However they will be reaped by the current process on next tick.
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function stopSubProcessesAsapSync() {
|
||
|
|
var e_6, _a;
|
||
|
|
try {
|
||
|
|
for (var _b = __values(getSubProcesses(process.pid, "DIRECT SUB PROCESSES ONLY")), _c = _b.next(); !_c.done; _c = _b.next()) {
|
||
|
|
var pid = _c.value;
|
||
|
|
if (stopSubProcessesAsapSync.ignorePids.has(pid)) {
|
||
|
|
continue;
|
||
|
|
}
|
||
|
|
stopProcessSync(pid, "SIGKILL", 0);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
catch (e_6_1) { e_6 = { error: e_6_1 }; }
|
||
|
|
finally {
|
||
|
|
try {
|
||
|
|
if (_c && !_c.done && (_a = _b.return)) _a.call(_b);
|
||
|
|
}
|
||
|
|
finally { if (e_6) throw e_6.error; }
|
||
|
|
}
|
||
|
|
}
|
||
|
|
stopProcessSync.stopSubProcessesAsapSync = stopSubProcessesAsapSync;
|
||
|
|
(function (stopSubProcessesAsapSync) {
|
||
|
|
stopSubProcessesAsapSync.ignorePids = new Set();
|
||
|
|
})(stopSubProcessesAsapSync = stopProcessSync.stopSubProcessesAsapSync || (stopProcessSync.stopSubProcessesAsapSync = {}));
|
||
|
|
/** Invoke kill, can't throw */
|
||
|
|
function kill(pid, signal) {
|
||
|
|
try {
|
||
|
|
execSyncNoCmdTrace("kill -" + signal + " " + pid, { "stdio": "pipe", "shell": "/bin/bash" });
|
||
|
|
}
|
||
|
|
catch (_a) {
|
||
|
|
}
|
||
|
|
}
|
||
|
|
stopProcessSync.kill = kill;
|
||
|
|
/**
|
||
|
|
* Get the list of subprocess of a process ( return a list of pid )
|
||
|
|
*/
|
||
|
|
function getSubProcesses(pid, depth) {
|
||
|
|
var _a = child_process.spawnSync("/bin/ps", ["--ppid", "" + pid, "-o", "pid,state"], { "shell": false }), stdout = _a.stdout, ps_pid = _a.pid, ps_exitCode = _a.status;
|
||
|
|
if (ps_exitCode !== 0) {
|
||
|
|
return [];
|
||
|
|
}
|
||
|
|
var pids = stdout
|
||
|
|
.toString("utf8")
|
||
|
|
.split("\n")
|
||
|
|
.filter(function (v) { return !v.match(/Z/); })
|
||
|
|
.map(function (v) { return v.replace(/[^0-9]/g, ""); })
|
||
|
|
.filter(function (v) { return !!v; })
|
||
|
|
.map(function (v) { return parseInt(v); })
|
||
|
|
.filter(function (pid) { return pid !== ps_pid; });
|
||
|
|
switch (depth) {
|
||
|
|
case "DIRECT SUB PROCESSES ONLY": return pids;
|
||
|
|
case "FULL PROCESS TREE": return (function () {
|
||
|
|
var e_7, _a;
|
||
|
|
var out = [];
|
||
|
|
try {
|
||
|
|
for (var pids_1 = __values(pids), pids_1_1 = pids_1.next(); !pids_1_1.done; pids_1_1 = pids_1.next()) {
|
||
|
|
var pid_2 = pids_1_1.value;
|
||
|
|
out = __spread(out, getSubProcesses(pid_2, "FULL PROCESS TREE"), [pid_2]);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
catch (e_7_1) { e_7 = { error: e_7_1 }; }
|
||
|
|
finally {
|
||
|
|
try {
|
||
|
|
if (pids_1_1 && !pids_1_1.done && (_a = pids_1.return)) _a.call(pids_1);
|
||
|
|
}
|
||
|
|
finally { if (e_7) throw e_7.error; }
|
||
|
|
}
|
||
|
|
return out;
|
||
|
|
})();
|
||
|
|
}
|
||
|
|
}
|
||
|
|
stopProcessSync.getSubProcesses = getSubProcesses;
|
||
|
|
/** Return true only if exist and is not a daemon */
|
||
|
|
function isProcessRunning(pid) {
|
||
|
|
var psCmdOut;
|
||
|
|
try {
|
||
|
|
psCmdOut = execSyncNoCmdTrace("ps --pid " + pid + " -o state");
|
||
|
|
}
|
||
|
|
catch (_a) {
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
return !psCmdOut.match(/Z/);
|
||
|
|
}
|
||
|
|
stopProcessSync.isProcessRunning = isProcessRunning;
|
||
|
|
/** Debug function to print the process tree of the current process. */
|
||
|
|
function _printProcessTree(log) {
|
||
|
|
if (log === void 0) { log = console.log.bind(console); }
|
||
|
|
var rec = function (node) {
|
||
|
|
var e_8, _a;
|
||
|
|
var pids = getSubProcesses(node.pid, "DIRECT SUB PROCESSES ONLY");
|
||
|
|
if (pids.length === 0) {
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
node.sub = [];
|
||
|
|
try {
|
||
|
|
for (var pids_2 = __values(pids), pids_2_1 = pids_2.next(); !pids_2_1.done; pids_2_1 = pids_2.next()) {
|
||
|
|
var pid = pids_2_1.value;
|
||
|
|
var sub_node = { pid: pid };
|
||
|
|
node.sub.push(sub_node);
|
||
|
|
rec(sub_node);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
catch (e_8_1) { e_8 = { error: e_8_1 }; }
|
||
|
|
finally {
|
||
|
|
try {
|
||
|
|
if (pids_2_1 && !pids_2_1.done && (_a = pids_2.return)) _a.call(pids_2);
|
||
|
|
}
|
||
|
|
finally { if (e_8) throw e_8.error; }
|
||
|
|
}
|
||
|
|
};
|
||
|
|
var tree = { "pid": process.pid };
|
||
|
|
rec(tree);
|
||
|
|
log(JSON.stringify(tree, null, 3));
|
||
|
|
}
|
||
|
|
stopProcessSync._printProcessTree = _printProcessTree;
|
||
|
|
stopProcessSync.log = function () { };
|
||
|
|
})(stopProcessSync = exports.stopProcessSync || (exports.stopProcessSync = {}));
|
||
|
|
/**
|
||
|
|
*
|
||
|
|
* Function to create the entry point (main.js) of a node service that can:
|
||
|
|
* -Restart on crash (without relying on systemd to do so).
|
||
|
|
* -Execute as specific unix user but can perform tasks as root before start.
|
||
|
|
* -Be stopped gracefully by sending USR2 signal on the root process ( identified by pidfile ).
|
||
|
|
* -Be started via a shell and gracefully stopped with CTRL-C (INT signal).
|
||
|
|
* -Ensure only one instance of the service run at the same time.
|
||
|
|
* ( if at the time the main is called there is an other instance of the service
|
||
|
|
* running it is gracefully terminated )
|
||
|
|
* -Ensure that the process will terminate in at most [ stop_timeout ] ms after
|
||
|
|
* receiving INT or USR2 signal. (default 5second)
|
||
|
|
* -Forward daemon processes stdout to root process stdout.
|
||
|
|
* -Can fork multiple daemon process.
|
||
|
|
*
|
||
|
|
* The root process forward command line arguments and environnement variable to
|
||
|
|
* the daemon processes.
|
||
|
|
*
|
||
|
|
* => rootProcess function should return ( when not default ):
|
||
|
|
* -pidfile_path: where to store the pid of the root process.
|
||
|
|
* take to terminate after requested to exit gracefully.
|
||
|
|
* -srv_name: Name of the service to overwrite the process names. (Default: not overwriting)
|
||
|
|
* -stop_timeout: The maximum amount of time ( in ms ) the
|
||
|
|
* that beforeExitTask can take to complete before being killed by force by root process.
|
||
|
|
* After receiving USR2 signal or CTRL, the root process will be closed within [trop_timeout]+1000ms
|
||
|
|
* -assert_unix_user: enforce that the main be called by a specific user.
|
||
|
|
* -isQuiet?: set to true to disable process debug info logging on stdout. Prefixed by [ service ]. ( default false )
|
||
|
|
* -doForwardDaemonStdout?: set to true to forward everything the daemon
|
||
|
|
* process write to stdout to the root process stdout. ( default true )
|
||
|
|
* -daemon_unix_user?: User who should own the daemon process.
|
||
|
|
* -daemon_node_path?: Node.js executable that should be used to by the daemon process.
|
||
|
|
* -daemon_cwd?: working directory of the daemon process.
|
||
|
|
* -daemon_restart_after_crash_delay?: ( Default to 500ms. )Delay in ms before restarting the daemon
|
||
|
|
* after it terminate without being requested to. If set to a negative number the daemons
|
||
|
|
* will not be restarted after it terminate for the first time and :
|
||
|
|
* If all daemons process exited with 0 and there is no other daemon process the root process
|
||
|
|
* will end with a clean exit code.
|
||
|
|
* If any of the daemon exit with an unclean code the root process will be terminated with an error code
|
||
|
|
* even if there is some other daemon running.
|
||
|
|
* -daemon_count: Number of instance of daemon process that should be forked, default 1.
|
||
|
|
* -max_consecutive_restart: Number of time a daemon should be restarted after crashing right after start.
|
||
|
|
* (Default ~Infinity).
|
||
|
|
* -preForkTask: Task to perform before forking a daemon process.
|
||
|
|
* It is called just before forking the daemon process. ( called again on every restart. )
|
||
|
|
* If the function is async the daemon will not be forked until the returned promise resolve.
|
||
|
|
* If the function throw exception root process will exit with code 1.
|
||
|
|
* (pidfile will be deleted)
|
||
|
|
* If the function is async and if it need to spawn child processes then
|
||
|
|
* an implementation for terminateSubProcess ( passed as reference ) should be provided so that
|
||
|
|
* if when called it kill all the child processes then resolve once they are terminated.
|
||
|
|
* The to which the promise resolve will be used as exit code for the root process.
|
||
|
|
* Note that terminateSubProcess should never be called, it is a OUT parameter.
|
||
|
|
* However if the implementation provided is just to send a SIGKILL to the forked processes
|
||
|
|
* then there is no need to provide an implementation as all the root process's sub processes tree
|
||
|
|
* will be killed before exiting anyway.
|
||
|
|
*
|
||
|
|
* => daemonProcess
|
||
|
|
* It should return:
|
||
|
|
* -launch: the function that the daemon process need to call to start the actual job that the service is meant to perform.
|
||
|
|
* -beforeExitTask: function that should be called before the daemon process exit. ( e.g. creating crash report ).
|
||
|
|
* If the daemon process is terminating due to an error the error will be passed as argument.
|
||
|
|
* There is two scenario that will led to this function NOT being called:
|
||
|
|
* 1)The daemon process receive KILL or other deadly signal that can't be overridden.
|
||
|
|
* 2)The root process terminate.
|
||
|
|
* daemon_number represent the instance index of the daemon among the total of [damon_count] process forked.
|
||
|
|
* It can be user to use a different logfile for each daemon process instance.
|
||
|
|
*
|
||
|
|
* NOTE: If the root process receive a deadly signal other than INT, USR2 or HUP
|
||
|
|
* ( e.g. KILL or STOP ) the root and daemon processes will immediately terminate without
|
||
|
|
* executing beforeExit tasks or removing pidfile.
|
||
|
|
*
|
||
|
|
* NOTE: because setting listener on "message" and "disconnect" process event prevent the
|
||
|
|
* thread from terminating naturally where is nothing more to do if you wish to manually
|
||
|
|
* terminate the daemon process without termination being requested from the parent you can:
|
||
|
|
* 1) emit "beforeExit" on process setting the desired exit code ( process.emit("beforeExit", process.exitCode= X);
|
||
|
|
* 2) throw an exception.
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function createService(params) {
|
||
|
|
var _this = this;
|
||
|
|
var log = (function () { });
|
||
|
|
var getLog = function (prefix) {
|
||
|
|
return (function () {
|
||
|
|
var args = [];
|
||
|
|
for (var _i = 0; _i < arguments.length; _i++) {
|
||
|
|
args[_i] = arguments[_i];
|
||
|
|
}
|
||
|
|
return process.stdout.write(Buffer.from("[service] ( " + prefix + " ) " + util.format.apply(util, args) + "\n", "utf8"));
|
||
|
|
});
|
||
|
|
};
|
||
|
|
var rootProcess = params.rootProcess, daemonProcess = params.daemonProcess;
|
||
|
|
var main_root = function (main_js_path) { return __awaiter(_this, void 0, void 0, function () {
|
||
|
|
var _a, pidfile_path, srv_name, _stop_timeout, assert_unix_user, isQuiet, _doForwardDaemonStdout, daemon_unix_user, daemon_node_path, daemon_cwd, _daemon_restart_after_crash_delay, max_consecutive_restart, preForkTask, _daemon_count, stop_timeout, doForwardDaemonStdout, daemon_restart_after_crash_delay, daemon_count, daemonContexts, isTerminating, args, _b, daemon_uid, daemon_gid, makeForkOptions, forkDaemon, daemon_number;
|
||
|
|
var _this = this;
|
||
|
|
return __generator(this, function (_c) {
|
||
|
|
switch (_c.label) {
|
||
|
|
case 0: return [4 /*yield*/, rootProcess()];
|
||
|
|
case 1:
|
||
|
|
_a = _c.sent(), pidfile_path = _a.pidfile_path, srv_name = _a.srv_name, _stop_timeout = _a.stop_timeout, assert_unix_user = _a.assert_unix_user, isQuiet = _a.isQuiet, _doForwardDaemonStdout = _a.doForwardDaemonStdout, daemon_unix_user = _a.daemon_unix_user, daemon_node_path = _a.daemon_node_path, daemon_cwd = _a.daemon_cwd, _daemon_restart_after_crash_delay = _a.daemon_restart_after_crash_delay, max_consecutive_restart = _a.max_consecutive_restart, preForkTask = _a.preForkTask, _daemon_count = _a.daemon_count;
|
||
|
|
if (srv_name !== undefined) {
|
||
|
|
process.title = srv_name + " root process";
|
||
|
|
}
|
||
|
|
stop_timeout = _stop_timeout !== undefined ?
|
||
|
|
_stop_timeout : 5000;
|
||
|
|
doForwardDaemonStdout = _doForwardDaemonStdout !== undefined ?
|
||
|
|
_doForwardDaemonStdout : true;
|
||
|
|
daemon_restart_after_crash_delay = _daemon_restart_after_crash_delay !== undefined ?
|
||
|
|
_daemon_restart_after_crash_delay : 500;
|
||
|
|
daemon_count = _daemon_count !== undefined ?
|
||
|
|
_daemon_count : 1;
|
||
|
|
if (assert_unix_user !== undefined && os.userInfo().username !== assert_unix_user) {
|
||
|
|
console.log(colorize("Must be run as " + assert_unix_user, "RED"));
|
||
|
|
process.exit(1);
|
||
|
|
return [2 /*return*/];
|
||
|
|
}
|
||
|
|
if (!isQuiet) {
|
||
|
|
log = getLog("root process");
|
||
|
|
}
|
||
|
|
stopProcessSync.log = log;
|
||
|
|
try {
|
||
|
|
stopProcessSync(pidfile_path);
|
||
|
|
}
|
||
|
|
catch (error) {
|
||
|
|
if (!(error instanceof stopProcessSync.PidMatchCurrentProcessError)) {
|
||
|
|
throw error;
|
||
|
|
}
|
||
|
|
error.cleanupRunfiles();
|
||
|
|
}
|
||
|
|
if (fs.existsSync(pidfile_path)) {
|
||
|
|
throw Error("Other instance launched simultaneously");
|
||
|
|
}
|
||
|
|
(function createPidfile() {
|
||
|
|
var pidfile_dir_path = path.dirname(pidfile_path);
|
||
|
|
if (!fs.existsSync(pidfile_dir_path)) {
|
||
|
|
execSyncNoCmdTrace("mkdir -p " + pidfile_dir_path);
|
||
|
|
}
|
||
|
|
fs.writeFileSync(pidfile_path, process.pid.toString());
|
||
|
|
})();
|
||
|
|
log("PID: " + process.pid);
|
||
|
|
daemonContexts = new Map((new Array(daemon_count))
|
||
|
|
.fill(null)
|
||
|
|
.map(function (_, index) {
|
||
|
|
var context = [
|
||
|
|
index + 1,
|
||
|
|
{
|
||
|
|
"daemonProcess": undefined,
|
||
|
|
"terminatePreForkChildProcesses": { "impl": function () { return Promise.resolve(); } },
|
||
|
|
"restart_attempt_remaining": max_consecutive_restart || NaN,
|
||
|
|
"reset_restart_attempt_timer": setTimeout(function () { }, 0)
|
||
|
|
}
|
||
|
|
];
|
||
|
|
return context;
|
||
|
|
}));
|
||
|
|
isTerminating = false;
|
||
|
|
setProcessExitHandler(function (exitCause) { return __awaiter(_this, void 0, void 0, function () {
|
||
|
|
var childProcessExitCode;
|
||
|
|
return __generator(this, function (_a) {
|
||
|
|
switch (_a.label) {
|
||
|
|
case 0:
|
||
|
|
isTerminating = true;
|
||
|
|
return [4 /*yield*/, (function terminateAllChildProcesses() {
|
||
|
|
return __awaiter(this, void 0, void 0, function () {
|
||
|
|
var terminateDaemonProcess, terminatePreForkChildProcessesSafeCall, tasks, _loop_3, _a, _b, _c, daemonProcess_1, terminatePreForkChildProcesses;
|
||
|
|
var e_9, _d;
|
||
|
|
var _this = this;
|
||
|
|
return __generator(this, function (_e) {
|
||
|
|
switch (_e.label) {
|
||
|
|
case 0:
|
||
|
|
terminateDaemonProcess = function (daemonProcess) { return __awaiter(_this, void 0, void 0, function () {
|
||
|
|
return __generator(this, function (_a) {
|
||
|
|
return [2 /*return*/, new Promise(function (resolve) {
|
||
|
|
log("Attempt to gracefully terminate daemon process PID: " + daemonProcess.pid + "...");
|
||
|
|
daemonProcess.send(null);
|
||
|
|
var timer = setTimeout(function () { return doStopAsap(); }, stop_timeout + 500);
|
||
|
|
daemonProcess.once("error", function () { return doStopAsap(); });
|
||
|
|
daemonProcess.once("close", function (childProcessExitCode) {
|
||
|
|
clearTimeout(timer);
|
||
|
|
log("Daemon process PID: " + daemonProcess.pid + " exited with code " + childProcessExitCode);
|
||
|
|
if (typeof childProcessExitCode !== "number" || isNaN(childProcessExitCode)) {
|
||
|
|
childProcessExitCode = 1;
|
||
|
|
}
|
||
|
|
resolve(childProcessExitCode);
|
||
|
|
});
|
||
|
|
var doStopAsap = function () {
|
||
|
|
log("Daemon process PID:" + daemonProcess.pid + " not responding, force kill...");
|
||
|
|
clearTimeout(timer);
|
||
|
|
daemonProcess.removeAllListeners("error");
|
||
|
|
daemonProcess.removeAllListeners("close");
|
||
|
|
stopProcessSync.stopProcessAsapSync(daemonProcess.pid);
|
||
|
|
resolve(1);
|
||
|
|
};
|
||
|
|
})];
|
||
|
|
});
|
||
|
|
}); };
|
||
|
|
terminatePreForkChildProcessesSafeCall = function (impl) {
|
||
|
|
var timer;
|
||
|
|
return Promise.race([
|
||
|
|
new Promise(function (resolve) { return timer = setTimeout(function () { return resolve("TIMEOUT"); }, stop_timeout + 500); }),
|
||
|
|
(function () { return __awaiter(_this, void 0, void 0, function () {
|
||
|
|
var result, _a;
|
||
|
|
return __generator(this, function (_b) {
|
||
|
|
switch (_b.label) {
|
||
|
|
case 0:
|
||
|
|
_b.trys.push([0, 2, , 3]);
|
||
|
|
return [4 /*yield*/, impl()];
|
||
|
|
case 1:
|
||
|
|
_b.sent();
|
||
|
|
result = "SUCCESS";
|
||
|
|
return [3 /*break*/, 3];
|
||
|
|
case 2:
|
||
|
|
_a = _b.sent();
|
||
|
|
result = "ERROR";
|
||
|
|
return [3 /*break*/, 3];
|
||
|
|
case 3:
|
||
|
|
clearTimeout(timer);
|
||
|
|
return [2 /*return*/, result];
|
||
|
|
}
|
||
|
|
});
|
||
|
|
}); })()
|
||
|
|
]);
|
||
|
|
};
|
||
|
|
tasks = [];
|
||
|
|
_loop_3 = function (daemonProcess_1, terminatePreForkChildProcesses) {
|
||
|
|
tasks[tasks.length] = !daemonProcess_1 ? (new Promise(function (resolve) { return terminatePreForkChildProcessesSafeCall(terminatePreForkChildProcesses.impl)
|
||
|
|
.then(function (result) { return result === "SUCCESS" ? resolve(0) : resolve(1); }); })) : terminateDaemonProcess(daemonProcess_1);
|
||
|
|
};
|
||
|
|
try {
|
||
|
|
for (_a = __values(daemonContexts.values()), _b = _a.next(); !_b.done; _b = _a.next()) {
|
||
|
|
_c = _b.value, daemonProcess_1 = _c.daemonProcess, terminatePreForkChildProcesses = _c.terminatePreForkChildProcesses;
|
||
|
|
_loop_3(daemonProcess_1, terminatePreForkChildProcesses);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
catch (e_9_1) { e_9 = { error: e_9_1 }; }
|
||
|
|
finally {
|
||
|
|
try {
|
||
|
|
if (_b && !_b.done && (_d = _a.return)) _d.call(_a);
|
||
|
|
}
|
||
|
|
finally { if (e_9) throw e_9.error; }
|
||
|
|
}
|
||
|
|
return [4 /*yield*/, Promise.all(tasks)];
|
||
|
|
case 1: return [2 /*return*/, (_e.sent()).reduce(function (accumulator, currentValue) { return accumulator === 0 ? currentValue : accumulator; }, 0)];
|
||
|
|
}
|
||
|
|
});
|
||
|
|
});
|
||
|
|
})()];
|
||
|
|
case 1:
|
||
|
|
childProcessExitCode = _a.sent();
|
||
|
|
if (exitCause.type === "EXCEPTION") {
|
||
|
|
/*
|
||
|
|
preForkTask throw or daemonProcess emit error or
|
||
|
|
one of the daemon exited with a non 0 code and
|
||
|
|
restart_delay was set <0
|
||
|
|
*/
|
||
|
|
log("Root process exception message: " + exitCause.error.message);
|
||
|
|
process.exitCode = 1;
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
process.exitCode = childProcessExitCode;
|
||
|
|
}
|
||
|
|
fs.unlinkSync(pidfile_path);
|
||
|
|
log("pidfile deleted");
|
||
|
|
return [2 /*return*/];
|
||
|
|
}
|
||
|
|
});
|
||
|
|
}); }, stop_timeout + 1000);
|
||
|
|
setProcessExitHandler.log = log;
|
||
|
|
args = (function () {
|
||
|
|
var out = __spread(process.argv);
|
||
|
|
out.shift();
|
||
|
|
out.shift();
|
||
|
|
return out;
|
||
|
|
})();
|
||
|
|
_b = __read((function () {
|
||
|
|
if (!!daemon_unix_user) {
|
||
|
|
return [get_uid(daemon_unix_user), get_gid(daemon_unix_user)];
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
return [undefined, undefined];
|
||
|
|
}
|
||
|
|
})(), 2), daemon_uid = _b[0], daemon_gid = _b[1];
|
||
|
|
makeForkOptions = function (daemon_number) { return ({
|
||
|
|
"uid": daemon_uid,
|
||
|
|
"gid": daemon_gid,
|
||
|
|
"silent": true,
|
||
|
|
"cwd": daemon_cwd,
|
||
|
|
"execPath": daemon_node_path,
|
||
|
|
"env": __assign({}, process.env, { daemon_number: daemon_number,
|
||
|
|
daemon_count: daemon_count,
|
||
|
|
srv_name: srv_name,
|
||
|
|
stop_timeout: stop_timeout, "isQuiet": isQuiet ? "1" : "0" })
|
||
|
|
}); };
|
||
|
|
forkDaemon = function (daemon_number) { return __awaiter(_this, void 0, void 0, function () {
|
||
|
|
var context, error_5, daemonProcess;
|
||
|
|
return __generator(this, function (_a) {
|
||
|
|
switch (_a.label) {
|
||
|
|
case 0:
|
||
|
|
context = daemonContexts.get(daemon_number);
|
||
|
|
clearTimeout(context.reset_restart_attempt_timer);
|
||
|
|
if (!!!preForkTask) return [3 /*break*/, 5];
|
||
|
|
log("performing pre fork tasks for daemon number " + daemon_number + "...");
|
||
|
|
_a.label = 1;
|
||
|
|
case 1:
|
||
|
|
_a.trys.push([1, 3, , 4]);
|
||
|
|
return [4 /*yield*/, preForkTask(context.terminatePreForkChildProcesses, daemon_number)];
|
||
|
|
case 2:
|
||
|
|
_a.sent();
|
||
|
|
return [3 /*break*/, 4];
|
||
|
|
case 3:
|
||
|
|
error_5 = _a.sent();
|
||
|
|
log("PreFork tasks for daemon number " + daemon_number + " raised an exception");
|
||
|
|
throw error_5;
|
||
|
|
case 4:
|
||
|
|
context.terminatePreForkChildProcesses.impl = function () { return Promise.resolve(); };
|
||
|
|
_a.label = 5;
|
||
|
|
case 5:
|
||
|
|
if (isTerminating) {
|
||
|
|
log("Not forking daemon because root process is terminating");
|
||
|
|
return [2 /*return*/];
|
||
|
|
}
|
||
|
|
if (max_consecutive_restart !== undefined) {
|
||
|
|
context.reset_restart_attempt_timer = setTimeout(function () { return context.restart_attempt_remaining = max_consecutive_restart; }, 10000);
|
||
|
|
}
|
||
|
|
log("Forking daemon process number " + daemon_number + " now.");
|
||
|
|
daemonProcess = child_process.fork(main_js_path, args, makeForkOptions(daemon_number));
|
||
|
|
context.daemonProcess = daemonProcess;
|
||
|
|
if (doForwardDaemonStdout) {
|
||
|
|
daemonProcess.stdout.on("data", function (data) {
|
||
|
|
return process.stdout.write(data);
|
||
|
|
});
|
||
|
|
}
|
||
|
|
daemonProcess.once("error", function (error) {
|
||
|
|
if (isTerminating) {
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
context.daemonProcess = undefined;
|
||
|
|
log([
|
||
|
|
"Error evt emitted by daemon process number " + daemon_number,
|
||
|
|
"Meaning that: ",
|
||
|
|
"The process could not be spawned, or",
|
||
|
|
"The process could not be killed, or",
|
||
|
|
"Sending a message to the child process failed."
|
||
|
|
].join("\n"));
|
||
|
|
throw error;
|
||
|
|
});
|
||
|
|
daemonProcess.once("close", function (childProcessExitCode) {
|
||
|
|
if (isTerminating) {
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
context.daemonProcess = undefined;
|
||
|
|
log("Daemon process " + daemon_number + " exited without being requested to.");
|
||
|
|
if (daemon_restart_after_crash_delay < 0) {
|
||
|
|
if (childProcessExitCode === null) {
|
||
|
|
childProcessExitCode = 1;
|
||
|
|
}
|
||
|
|
log("Daemon number " + daemon_number + " will not be restarted.");
|
||
|
|
clearTimeout(context.reset_restart_attempt_timer);
|
||
|
|
if (childProcessExitCode !== 0) {
|
||
|
|
throw new Error("Daemon number " + daemon_number + ", crashed");
|
||
|
|
}
|
||
|
|
else if (!Array.from(daemonContexts.values()).find(function (_a) {
|
||
|
|
var daemonProcess = _a.daemonProcess;
|
||
|
|
return !!daemonProcess;
|
||
|
|
})) {
|
||
|
|
log("As last remaining daemon process terminated cleanly we stop end root process");
|
||
|
|
process.emit("beforeExit", NaN);
|
||
|
|
}
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
if (max_consecutive_restart !== undefined) {
|
||
|
|
if (context.restart_attempt_remaining-- === 0) {
|
||
|
|
throw new Error("Daemon process " + daemon_number + " is crashing over and over");
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
log("Restart remaining: " + context.restart_attempt_remaining);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
log("Daemon process " + daemon_number + " will be restarted");
|
||
|
|
setTimeout(function () { return forkDaemon(daemon_number); }, daemon_restart_after_crash_delay);
|
||
|
|
});
|
||
|
|
return [2 /*return*/];
|
||
|
|
}
|
||
|
|
});
|
||
|
|
}); };
|
||
|
|
for (daemon_number = 1; daemon_number <= daemon_count; daemon_number++) {
|
||
|
|
forkDaemon(daemon_number);
|
||
|
|
}
|
||
|
|
return [2 /*return*/];
|
||
|
|
}
|
||
|
|
});
|
||
|
|
}); };
|
||
|
|
var main_daemon = function () { return __awaiter(_this, void 0, void 0, function () {
|
||
|
|
var _a, daemon_number, daemon_count, stop_timeout, isQuiet, srv_name, _b, launch, beforeExitTask;
|
||
|
|
var _this = this;
|
||
|
|
return __generator(this, function (_c) {
|
||
|
|
switch (_c.label) {
|
||
|
|
case 0:
|
||
|
|
_a = __read(["daemon_number", "daemon_count", "stop_timeout", "isQuiet"].map(function (key) {
|
||
|
|
var value = parseInt(process.env[key]);
|
||
|
|
delete process.env[key];
|
||
|
|
return value;
|
||
|
|
}), 4), daemon_number = _a[0], daemon_count = _a[1], stop_timeout = _a[2], isQuiet = _a[3];
|
||
|
|
srv_name = (function () {
|
||
|
|
var key = "srv_name";
|
||
|
|
var value = process.env[key];
|
||
|
|
delete process.env[key];
|
||
|
|
if (value === "" + undefined) {
|
||
|
|
return undefined;
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
return value;
|
||
|
|
}
|
||
|
|
})();
|
||
|
|
if (!isQuiet) {
|
||
|
|
log = getLog("daemon process " + daemon_number + "/" + daemon_count + ", PID: " + process.pid);
|
||
|
|
}
|
||
|
|
if (srv_name !== undefined) {
|
||
|
|
process.title = srv_name + " daemon " + (daemon_count === 1 ? "" : daemon_number);
|
||
|
|
}
|
||
|
|
return [4 /*yield*/, daemonProcess(daemon_number, daemon_count)];
|
||
|
|
case 1:
|
||
|
|
_b = _c.sent(), launch = _b.launch, beforeExitTask = _b.beforeExitTask;
|
||
|
|
process.once("message", function () { return process.emit("beforeExit", process.exitCode = 0); });
|
||
|
|
process.once("disconnect", function () { return process.exit(1); });
|
||
|
|
setProcessExitHandler(function (exitCause) { return __awaiter(_this, void 0, void 0, function () {
|
||
|
|
var error, prBeforeExitTask;
|
||
|
|
return __generator(this, function (_a) {
|
||
|
|
switch (_a.label) {
|
||
|
|
case 0:
|
||
|
|
error = exitCause.type === "EXCEPTION" ? exitCause.error : undefined;
|
||
|
|
if (!!!beforeExitTask) return [3 /*break*/, 2];
|
||
|
|
prBeforeExitTask = beforeExitTask(error);
|
||
|
|
if (!(prBeforeExitTask instanceof Promise)) return [3 /*break*/, 2];
|
||
|
|
return [4 /*yield*/, safePr(prBeforeExitTask, stop_timeout + 2000).then(function (error) {
|
||
|
|
if (error instanceof Error) {
|
||
|
|
//NOTE: Throwing does not overwrite the exit code.
|
||
|
|
if (error.message === safePr.timeoutErrorMessage) {
|
||
|
|
//NOTE: Throwing string to not have the log of setProcessExitHandler
|
||
|
|
//display the stack trace.
|
||
|
|
throw "beforeExitTask took too much time to complete.";
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
throw error;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
})];
|
||
|
|
case 1:
|
||
|
|
_a.sent();
|
||
|
|
_a.label = 2;
|
||
|
|
case 2: return [2 /*return*/];
|
||
|
|
}
|
||
|
|
});
|
||
|
|
}); }, -1, function (exitCause) { return exitCause.type !== "SIGNAL"; });
|
||
|
|
setProcessExitHandler.log = log;
|
||
|
|
launch();
|
||
|
|
return [2 /*return*/];
|
||
|
|
}
|
||
|
|
});
|
||
|
|
}); };
|
||
|
|
if (!process.send) {
|
||
|
|
main_root(get_caller_file_path_2.get_caller_file_path());
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
main_daemon();
|
||
|
|
}
|
||
|
|
}
|
||
|
|
exports.createService = createService;
|
||
|
|
var systemd;
|
||
|
|
(function (systemd) {
|
||
|
|
var mkPath = function (srv_name) { return "/etc/systemd/system/" + srv_name + ".service"; };
|
||
|
|
/**
|
||
|
|
* Generate a systemd config file for a service created via "createService" function
|
||
|
|
* enable by default, start by default.
|
||
|
|
*/
|
||
|
|
function createConfigFile(srv_name, main_js_path, node_path, enable, start) {
|
||
|
|
if (node_path === void 0) { node_path = process.argv[0]; }
|
||
|
|
if (enable === void 0) { enable = "ENABLE"; }
|
||
|
|
if (start === void 0) { start = "START"; }
|
||
|
|
fs.writeFileSync(mkPath(srv_name), Buffer.from([
|
||
|
|
"[Unit]",
|
||
|
|
"After=network.target",
|
||
|
|
"",
|
||
|
|
"[Service]",
|
||
|
|
"ExecStart=" + node_path + " " + main_js_path,
|
||
|
|
"StandardOutput=inherit",
|
||
|
|
"KillMode=process",
|
||
|
|
"KillSignal=SIGUSR2",
|
||
|
|
"SendSIGKILL=no",
|
||
|
|
"Environment=NODE_ENV=production",
|
||
|
|
"",
|
||
|
|
"[Install]",
|
||
|
|
"WantedBy=multi-user.target",
|
||
|
|
""
|
||
|
|
].join("\n"), "utf8"));
|
||
|
|
execSyncNoCmdTrace("systemctl daemon-reload");
|
||
|
|
if (!!enable) {
|
||
|
|
execSyncNoCmdTrace("systemctl enable " + srv_name, { "stdio": "pipe" });
|
||
|
|
}
|
||
|
|
if (!!start) {
|
||
|
|
execSyncNoCmdTrace("systemctl start " + srv_name);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
systemd.createConfigFile = createConfigFile;
|
||
|
|
/** Remove config file disable and reload daemon, never throw, stop is false by default */
|
||
|
|
function deleteConfigFile(srv_name, stop) {
|
||
|
|
if (stop === void 0) { stop = false; }
|
||
|
|
if (!!stop) {
|
||
|
|
execSyncNoCmdTrace("systemctl stop " + srv_name + " || true", { "stdio": "pipe" });
|
||
|
|
}
|
||
|
|
execSyncNoCmdTrace("systemctl disable " + srv_name + " || true", { "stdio": "pipe" });
|
||
|
|
try {
|
||
|
|
fs.unlinkSync(mkPath(srv_name));
|
||
|
|
}
|
||
|
|
catch (_a) { }
|
||
|
|
execSyncNoCmdTrace("systemctl daemon-reload || true", { "stdio": "pipe" });
|
||
|
|
}
|
||
|
|
systemd.deleteConfigFile = deleteConfigFile;
|
||
|
|
})(systemd = exports.systemd || (exports.systemd = {}));
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 427:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
// Older verions of Node.js might not have `util.getSystemErrorName()`.
|
||
|
|
// In that case, fall back to a deprecated internal.
|
||
|
|
const util = __webpack_require__(669);
|
||
|
|
|
||
|
|
let uv;
|
||
|
|
|
||
|
|
if (typeof util.getSystemErrorName === 'function') {
|
||
|
|
module.exports = util.getSystemErrorName;
|
||
|
|
} else {
|
||
|
|
try {
|
||
|
|
uv = process.binding('uv');
|
||
|
|
|
||
|
|
if (typeof uv.errname !== 'function') {
|
||
|
|
throw new TypeError('uv.errname is not a function');
|
||
|
|
}
|
||
|
|
} catch (err) {
|
||
|
|
console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err);
|
||
|
|
uv = null;
|
||
|
|
}
|
||
|
|
|
||
|
|
module.exports = code => errname(uv, code);
|
||
|
|
}
|
||
|
|
|
||
|
|
// Used for testing the fallback behavior
|
||
|
|
module.exports.__test__ = errname;
|
||
|
|
|
||
|
|
function errname(uv, code) {
|
||
|
|
if (uv) {
|
||
|
|
return uv.errname(code);
|
||
|
|
}
|
||
|
|
|
||
|
|
if (!(code < 0)) {
|
||
|
|
throw new Error('err >= 0');
|
||
|
|
}
|
||
|
|
|
||
|
|
return `Unknown system error ${code}`;
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 431:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
|
if (mod && mod.__esModule) return mod;
|
||
|
|
var result = {};
|
||
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||
|
|
result["default"] = mod;
|
||
|
|
return result;
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
const os = __importStar(__webpack_require__(87));
|
||
|
|
const utils_1 = __webpack_require__(82);
|
||
|
|
/**
|
||
|
|
* Commands
|
||
|
|
*
|
||
|
|
* Command Format:
|
||
|
|
* ::name key=value,key=value::message
|
||
|
|
*
|
||
|
|
* Examples:
|
||
|
|
* ::warning::This is the message
|
||
|
|
* ::set-env name=MY_VAR::some value
|
||
|
|
*/
|
||
|
|
function issueCommand(command, properties, message) {
|
||
|
|
const cmd = new Command(command, properties, message);
|
||
|
|
process.stdout.write(cmd.toString() + os.EOL);
|
||
|
|
}
|
||
|
|
exports.issueCommand = issueCommand;
|
||
|
|
function issue(name, message = '') {
|
||
|
|
issueCommand(name, {}, message);
|
||
|
|
}
|
||
|
|
exports.issue = issue;
|
||
|
|
const CMD_STRING = '::';
|
||
|
|
class Command {
|
||
|
|
constructor(command, properties, message) {
|
||
|
|
if (!command) {
|
||
|
|
command = 'missing.command';
|
||
|
|
}
|
||
|
|
this.command = command;
|
||
|
|
this.properties = properties;
|
||
|
|
this.message = message;
|
||
|
|
}
|
||
|
|
toString() {
|
||
|
|
let cmdStr = CMD_STRING + this.command;
|
||
|
|
if (this.properties && Object.keys(this.properties).length > 0) {
|
||
|
|
cmdStr += ' ';
|
||
|
|
let first = true;
|
||
|
|
for (const key in this.properties) {
|
||
|
|
if (this.properties.hasOwnProperty(key)) {
|
||
|
|
const val = this.properties[key];
|
||
|
|
if (val) {
|
||
|
|
if (first) {
|
||
|
|
first = false;
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
cmdStr += ',';
|
||
|
|
}
|
||
|
|
cmdStr += `${key}=${escapeProperty(val)}`;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
|
||
|
|
return cmdStr;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
function escapeData(s) {
|
||
|
|
return utils_1.toCommandValue(s)
|
||
|
|
.replace(/%/g, '%25')
|
||
|
|
.replace(/\r/g, '%0D')
|
||
|
|
.replace(/\n/g, '%0A');
|
||
|
|
}
|
||
|
|
function escapeProperty(s) {
|
||
|
|
return utils_1.toCommandValue(s)
|
||
|
|
.replace(/%/g, '%25')
|
||
|
|
.replace(/\r/g, '%0D')
|
||
|
|
.replace(/\n/g, '%0A')
|
||
|
|
.replace(/:/g, '%3A')
|
||
|
|
.replace(/,/g, '%2C');
|
||
|
|
}
|
||
|
|
//# sourceMappingURL=command.js.map
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 438:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.getCommitAheadFactory = void 0;
|
||
|
|
const getCommonOrigin_1 = __webpack_require__(709);
|
||
|
|
const listCommit_1 = __webpack_require__(301);
|
||
|
|
/** Take two branch that have a common origin and list all the
|
||
|
|
* commit that have been made on the branch that is ahead since it
|
||
|
|
* has been forked from the branch that is behind.
|
||
|
|
* From the older to the newest.
|
||
|
|
* */
|
||
|
|
function getCommitAheadFactory(params) {
|
||
|
|
const { octokit } = params;
|
||
|
|
const { getCommonOrigin } = getCommonOrigin_1.getCommonOriginFactory({ octokit });
|
||
|
|
const { listCommit } = listCommit_1.listCommitFactory({ octokit });
|
||
|
|
function getCommitAhead(params) {
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
const { owner, repo, branchBehind, branchAhead } = params;
|
||
|
|
const { sha } = yield getCommonOrigin({
|
||
|
|
owner,
|
||
|
|
repo,
|
||
|
|
"branch1": branchBehind,
|
||
|
|
"branch2": branchAhead
|
||
|
|
});
|
||
|
|
const commits = yield listCommit({
|
||
|
|
owner,
|
||
|
|
repo,
|
||
|
|
"branch": branchAhead,
|
||
|
|
sha
|
||
|
|
});
|
||
|
|
return { commits };
|
||
|
|
});
|
||
|
|
}
|
||
|
|
return { getCommitAhead };
|
||
|
|
}
|
||
|
|
exports.getCommitAheadFactory = getCommitAheadFactory;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 448:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
var universalUserAgent = __webpack_require__(796);
|
||
|
|
var beforeAfterHook = __webpack_require__(523);
|
||
|
|
var request = __webpack_require__(753);
|
||
|
|
var graphql = __webpack_require__(898);
|
||
|
|
var authToken = __webpack_require__(813);
|
||
|
|
|
||
|
|
function _defineProperty(obj, key, value) {
|
||
|
|
if (key in obj) {
|
||
|
|
Object.defineProperty(obj, key, {
|
||
|
|
value: value,
|
||
|
|
enumerable: true,
|
||
|
|
configurable: true,
|
||
|
|
writable: true
|
||
|
|
});
|
||
|
|
} else {
|
||
|
|
obj[key] = value;
|
||
|
|
}
|
||
|
|
|
||
|
|
return obj;
|
||
|
|
}
|
||
|
|
|
||
|
|
function ownKeys(object, enumerableOnly) {
|
||
|
|
var keys = Object.keys(object);
|
||
|
|
|
||
|
|
if (Object.getOwnPropertySymbols) {
|
||
|
|
var symbols = Object.getOwnPropertySymbols(object);
|
||
|
|
if (enumerableOnly) symbols = symbols.filter(function (sym) {
|
||
|
|
return Object.getOwnPropertyDescriptor(object, sym).enumerable;
|
||
|
|
});
|
||
|
|
keys.push.apply(keys, symbols);
|
||
|
|
}
|
||
|
|
|
||
|
|
return keys;
|
||
|
|
}
|
||
|
|
|
||
|
|
function _objectSpread2(target) {
|
||
|
|
for (var i = 1; i < arguments.length; i++) {
|
||
|
|
var source = arguments[i] != null ? arguments[i] : {};
|
||
|
|
|
||
|
|
if (i % 2) {
|
||
|
|
ownKeys(Object(source), true).forEach(function (key) {
|
||
|
|
_defineProperty(target, key, source[key]);
|
||
|
|
});
|
||
|
|
} else if (Object.getOwnPropertyDescriptors) {
|
||
|
|
Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));
|
||
|
|
} else {
|
||
|
|
ownKeys(Object(source)).forEach(function (key) {
|
||
|
|
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
|
||
|
|
});
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
return target;
|
||
|
|
}
|
||
|
|
|
||
|
|
const VERSION = "2.5.4";
|
||
|
|
|
||
|
|
class Octokit {
|
||
|
|
constructor(options = {}) {
|
||
|
|
const hook = new beforeAfterHook.Collection();
|
||
|
|
const requestDefaults = {
|
||
|
|
baseUrl: request.request.endpoint.DEFAULTS.baseUrl,
|
||
|
|
headers: {},
|
||
|
|
request: Object.assign({}, options.request, {
|
||
|
|
hook: hook.bind(null, "request")
|
||
|
|
}),
|
||
|
|
mediaType: {
|
||
|
|
previews: [],
|
||
|
|
format: ""
|
||
|
|
}
|
||
|
|
}; // prepend default user agent with `options.userAgent` if set
|
||
|
|
|
||
|
|
requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" ");
|
||
|
|
|
||
|
|
if (options.baseUrl) {
|
||
|
|
requestDefaults.baseUrl = options.baseUrl;
|
||
|
|
}
|
||
|
|
|
||
|
|
if (options.previews) {
|
||
|
|
requestDefaults.mediaType.previews = options.previews;
|
||
|
|
}
|
||
|
|
|
||
|
|
if (options.timeZone) {
|
||
|
|
requestDefaults.headers["time-zone"] = options.timeZone;
|
||
|
|
}
|
||
|
|
|
||
|
|
this.request = request.request.defaults(requestDefaults);
|
||
|
|
this.graphql = graphql.withCustomRequest(this.request).defaults(_objectSpread2(_objectSpread2({}, requestDefaults), {}, {
|
||
|
|
baseUrl: requestDefaults.baseUrl.replace(/\/api\/v3$/, "/api")
|
||
|
|
}));
|
||
|
|
this.log = Object.assign({
|
||
|
|
debug: () => {},
|
||
|
|
info: () => {},
|
||
|
|
warn: console.warn.bind(console),
|
||
|
|
error: console.error.bind(console)
|
||
|
|
}, options.log);
|
||
|
|
this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
|
||
|
|
// is unauthenticated. The `this.auth()` method is a no-op and no request hook is registred.
|
||
|
|
// (2) If only `options.auth` is set, use the default token authentication strategy.
|
||
|
|
// (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
|
||
|
|
// TODO: type `options.auth` based on `options.authStrategy`.
|
||
|
|
|
||
|
|
if (!options.authStrategy) {
|
||
|
|
if (!options.auth) {
|
||
|
|
// (1)
|
||
|
|
this.auth = async () => ({
|
||
|
|
type: "unauthenticated"
|
||
|
|
});
|
||
|
|
} else {
|
||
|
|
// (2)
|
||
|
|
const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯
|
||
|
|
|
||
|
|
hook.wrap("request", auth.hook);
|
||
|
|
this.auth = auth;
|
||
|
|
}
|
||
|
|
} else {
|
||
|
|
const auth = options.authStrategy(Object.assign({
|
||
|
|
request: this.request
|
||
|
|
}, options.auth)); // @ts-ignore ¯\_(ツ)_/¯
|
||
|
|
|
||
|
|
hook.wrap("request", auth.hook);
|
||
|
|
this.auth = auth;
|
||
|
|
} // apply plugins
|
||
|
|
// https://stackoverflow.com/a/16345172
|
||
|
|
|
||
|
|
|
||
|
|
const classConstructor = this.constructor;
|
||
|
|
classConstructor.plugins.forEach(plugin => {
|
||
|
|
Object.assign(this, plugin(this, options));
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
static defaults(defaults) {
|
||
|
|
const OctokitWithDefaults = class extends this {
|
||
|
|
constructor(...args) {
|
||
|
|
const options = args[0] || {};
|
||
|
|
super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {
|
||
|
|
userAgent: `${options.userAgent} ${defaults.userAgent}`
|
||
|
|
} : null));
|
||
|
|
}
|
||
|
|
|
||
|
|
};
|
||
|
|
return OctokitWithDefaults;
|
||
|
|
}
|
||
|
|
/**
|
||
|
|
* Attach a plugin (or many) to your Octokit instance.
|
||
|
|
*
|
||
|
|
* @example
|
||
|
|
* const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)
|
||
|
|
*/
|
||
|
|
|
||
|
|
|
||
|
|
static plugin(p1, ...p2) {
|
||
|
|
var _a;
|
||
|
|
|
||
|
|
if (p1 instanceof Array) {
|
||
|
|
console.warn(["Passing an array of plugins to Octokit.plugin() has been deprecated.", "Instead of:", " Octokit.plugin([plugin1, plugin2, ...])", "Use:", " Octokit.plugin(plugin1, plugin2, ...)"].join("\n"));
|
||
|
|
}
|
||
|
|
|
||
|
|
const currentPlugins = this.plugins;
|
||
|
|
let newPlugins = [...(p1 instanceof Array ? p1 : [p1]), ...p2];
|
||
|
|
const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);
|
||
|
|
return NewOctokit;
|
||
|
|
}
|
||
|
|
|
||
|
|
}
|
||
|
|
Octokit.VERSION = VERSION;
|
||
|
|
Octokit.plugins = [];
|
||
|
|
|
||
|
|
exports.Octokit = Octokit;
|
||
|
|
//# sourceMappingURL=index.js.map
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 453:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
var once = __webpack_require__(969)
|
||
|
|
var eos = __webpack_require__(9)
|
||
|
|
var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes
|
||
|
|
|
||
|
|
var noop = function () {}
|
||
|
|
var ancient = /^v?\.0/.test(process.version)
|
||
|
|
|
||
|
|
var isFn = function (fn) {
|
||
|
|
return typeof fn === 'function'
|
||
|
|
}
|
||
|
|
|
||
|
|
var isFS = function (stream) {
|
||
|
|
if (!ancient) return false // newer node version do not need to care about fs is a special way
|
||
|
|
if (!fs) return false // browser
|
||
|
|
return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close)
|
||
|
|
}
|
||
|
|
|
||
|
|
var isRequest = function (stream) {
|
||
|
|
return stream.setHeader && isFn(stream.abort)
|
||
|
|
}
|
||
|
|
|
||
|
|
var destroyer = function (stream, reading, writing, callback) {
|
||
|
|
callback = once(callback)
|
||
|
|
|
||
|
|
var closed = false
|
||
|
|
stream.on('close', function () {
|
||
|
|
closed = true
|
||
|
|
})
|
||
|
|
|
||
|
|
eos(stream, {readable: reading, writable: writing}, function (err) {
|
||
|
|
if (err) return callback(err)
|
||
|
|
closed = true
|
||
|
|
callback()
|
||
|
|
})
|
||
|
|
|
||
|
|
var destroyed = false
|
||
|
|
return function (err) {
|
||
|
|
if (closed) return
|
||
|
|
if (destroyed) return
|
||
|
|
destroyed = true
|
||
|
|
|
||
|
|
if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks
|
||
|
|
if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want
|
||
|
|
|
||
|
|
if (isFn(stream.destroy)) return stream.destroy()
|
||
|
|
|
||
|
|
callback(err || new Error('stream was destroyed'))
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
var call = function (fn) {
|
||
|
|
fn()
|
||
|
|
}
|
||
|
|
|
||
|
|
var pipe = function (from, to) {
|
||
|
|
return from.pipe(to)
|
||
|
|
}
|
||
|
|
|
||
|
|
var pump = function () {
|
||
|
|
var streams = Array.prototype.slice.call(arguments)
|
||
|
|
var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop
|
||
|
|
|
||
|
|
if (Array.isArray(streams[0])) streams = streams[0]
|
||
|
|
if (streams.length < 2) throw new Error('pump requires two streams per minimum')
|
||
|
|
|
||
|
|
var error
|
||
|
|
var destroys = streams.map(function (stream, i) {
|
||
|
|
var reading = i < streams.length - 1
|
||
|
|
var writing = i > 0
|
||
|
|
return destroyer(stream, reading, writing, function (err) {
|
||
|
|
if (!error) error = err
|
||
|
|
if (err) destroys.forEach(call)
|
||
|
|
if (reading) return
|
||
|
|
destroys.forEach(call)
|
||
|
|
callback(error)
|
||
|
|
})
|
||
|
|
})
|
||
|
|
|
||
|
|
return streams.reduce(pipe)
|
||
|
|
}
|
||
|
|
|
||
|
|
module.exports = pump
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 454:
|
||
|
|
/***/ (function(module, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
||
|
|
|
||
|
|
var Stream = _interopDefault(__webpack_require__(413));
|
||
|
|
var http = _interopDefault(__webpack_require__(605));
|
||
|
|
var Url = _interopDefault(__webpack_require__(835));
|
||
|
|
var https = _interopDefault(__webpack_require__(211));
|
||
|
|
var zlib = _interopDefault(__webpack_require__(761));
|
||
|
|
|
||
|
|
// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
|
||
|
|
|
||
|
|
// fix for "Readable" isn't a named export issue
|
||
|
|
const Readable = Stream.Readable;
|
||
|
|
|
||
|
|
const BUFFER = Symbol('buffer');
|
||
|
|
const TYPE = Symbol('type');
|
||
|
|
|
||
|
|
class Blob {
|
||
|
|
constructor() {
|
||
|
|
this[TYPE] = '';
|
||
|
|
|
||
|
|
const blobParts = arguments[0];
|
||
|
|
const options = arguments[1];
|
||
|
|
|
||
|
|
const buffers = [];
|
||
|
|
let size = 0;
|
||
|
|
|
||
|
|
if (blobParts) {
|
||
|
|
const a = blobParts;
|
||
|
|
const length = Number(a.length);
|
||
|
|
for (let i = 0; i < length; i++) {
|
||
|
|
const element = a[i];
|
||
|
|
let buffer;
|
||
|
|
if (element instanceof Buffer) {
|
||
|
|
buffer = element;
|
||
|
|
} else if (ArrayBuffer.isView(element)) {
|
||
|
|
buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
|
||
|
|
} else if (element instanceof ArrayBuffer) {
|
||
|
|
buffer = Buffer.from(element);
|
||
|
|
} else if (element instanceof Blob) {
|
||
|
|
buffer = element[BUFFER];
|
||
|
|
} else {
|
||
|
|
buffer = Buffer.from(typeof element === 'string' ? element : String(element));
|
||
|
|
}
|
||
|
|
size += buffer.length;
|
||
|
|
buffers.push(buffer);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
this[BUFFER] = Buffer.concat(buffers);
|
||
|
|
|
||
|
|
let type = options && options.type !== undefined && String(options.type).toLowerCase();
|
||
|
|
if (type && !/[^\u0020-\u007E]/.test(type)) {
|
||
|
|
this[TYPE] = type;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
get size() {
|
||
|
|
return this[BUFFER].length;
|
||
|
|
}
|
||
|
|
get type() {
|
||
|
|
return this[TYPE];
|
||
|
|
}
|
||
|
|
text() {
|
||
|
|
return Promise.resolve(this[BUFFER].toString());
|
||
|
|
}
|
||
|
|
arrayBuffer() {
|
||
|
|
const buf = this[BUFFER];
|
||
|
|
const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
|
||
|
|
return Promise.resolve(ab);
|
||
|
|
}
|
||
|
|
stream() {
|
||
|
|
const readable = new Readable();
|
||
|
|
readable._read = function () {};
|
||
|
|
readable.push(this[BUFFER]);
|
||
|
|
readable.push(null);
|
||
|
|
return readable;
|
||
|
|
}
|
||
|
|
toString() {
|
||
|
|
return '[object Blob]';
|
||
|
|
}
|
||
|
|
slice() {
|
||
|
|
const size = this.size;
|
||
|
|
|
||
|
|
const start = arguments[0];
|
||
|
|
const end = arguments[1];
|
||
|
|
let relativeStart, relativeEnd;
|
||
|
|
if (start === undefined) {
|
||
|
|
relativeStart = 0;
|
||
|
|
} else if (start < 0) {
|
||
|
|
relativeStart = Math.max(size + start, 0);
|
||
|
|
} else {
|
||
|
|
relativeStart = Math.min(start, size);
|
||
|
|
}
|
||
|
|
if (end === undefined) {
|
||
|
|
relativeEnd = size;
|
||
|
|
} else if (end < 0) {
|
||
|
|
relativeEnd = Math.max(size + end, 0);
|
||
|
|
} else {
|
||
|
|
relativeEnd = Math.min(end, size);
|
||
|
|
}
|
||
|
|
const span = Math.max(relativeEnd - relativeStart, 0);
|
||
|
|
|
||
|
|
const buffer = this[BUFFER];
|
||
|
|
const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
|
||
|
|
const blob = new Blob([], { type: arguments[2] });
|
||
|
|
blob[BUFFER] = slicedBuffer;
|
||
|
|
return blob;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
Object.defineProperties(Blob.prototype, {
|
||
|
|
size: { enumerable: true },
|
||
|
|
type: { enumerable: true },
|
||
|
|
slice: { enumerable: true }
|
||
|
|
});
|
||
|
|
|
||
|
|
Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
|
||
|
|
value: 'Blob',
|
||
|
|
writable: false,
|
||
|
|
enumerable: false,
|
||
|
|
configurable: true
|
||
|
|
});
|
||
|
|
|
||
|
|
/**
|
||
|
|
* fetch-error.js
|
||
|
|
*
|
||
|
|
* FetchError interface for operational errors
|
||
|
|
*/
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Create FetchError instance
|
||
|
|
*
|
||
|
|
* @param String message Error message for human
|
||
|
|
* @param String type Error type for machine
|
||
|
|
* @param String systemError For Node.js system error
|
||
|
|
* @return FetchError
|
||
|
|
*/
|
||
|
|
function FetchError(message, type, systemError) {
|
||
|
|
Error.call(this, message);
|
||
|
|
|
||
|
|
this.message = message;
|
||
|
|
this.type = type;
|
||
|
|
|
||
|
|
// when err.type is `system`, err.code contains system error code
|
||
|
|
if (systemError) {
|
||
|
|
this.code = this.errno = systemError.code;
|
||
|
|
}
|
||
|
|
|
||
|
|
// hide custom error implementation details from end-users
|
||
|
|
Error.captureStackTrace(this, this.constructor);
|
||
|
|
}
|
||
|
|
|
||
|
|
FetchError.prototype = Object.create(Error.prototype);
|
||
|
|
FetchError.prototype.constructor = FetchError;
|
||
|
|
FetchError.prototype.name = 'FetchError';
|
||
|
|
|
||
|
|
let convert;
|
||
|
|
try {
|
||
|
|
convert = __webpack_require__(18).convert;
|
||
|
|
} catch (e) {}
|
||
|
|
|
||
|
|
const INTERNALS = Symbol('Body internals');
|
||
|
|
|
||
|
|
// fix an issue where "PassThrough" isn't a named export for node <10
|
||
|
|
const PassThrough = Stream.PassThrough;
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Body mixin
|
||
|
|
*
|
||
|
|
* Ref: https://fetch.spec.whatwg.org/#body
|
||
|
|
*
|
||
|
|
* @param Stream body Readable stream
|
||
|
|
* @param Object opts Response options
|
||
|
|
* @return Void
|
||
|
|
*/
|
||
|
|
function Body(body) {
|
||
|
|
var _this = this;
|
||
|
|
|
||
|
|
var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
|
||
|
|
_ref$size = _ref.size;
|
||
|
|
|
||
|
|
let size = _ref$size === undefined ? 0 : _ref$size;
|
||
|
|
var _ref$timeout = _ref.timeout;
|
||
|
|
let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
|
||
|
|
|
||
|
|
if (body == null) {
|
||
|
|
// body is undefined or null
|
||
|
|
body = null;
|
||
|
|
} else if (isURLSearchParams(body)) {
|
||
|
|
// body is a URLSearchParams
|
||
|
|
body = Buffer.from(body.toString());
|
||
|
|
} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
|
||
|
|
// body is ArrayBuffer
|
||
|
|
body = Buffer.from(body);
|
||
|
|
} else if (ArrayBuffer.isView(body)) {
|
||
|
|
// body is ArrayBufferView
|
||
|
|
body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
|
||
|
|
} else if (body instanceof Stream) ; else {
|
||
|
|
// none of the above
|
||
|
|
// coerce to string then buffer
|
||
|
|
body = Buffer.from(String(body));
|
||
|
|
}
|
||
|
|
this[INTERNALS] = {
|
||
|
|
body,
|
||
|
|
disturbed: false,
|
||
|
|
error: null
|
||
|
|
};
|
||
|
|
this.size = size;
|
||
|
|
this.timeout = timeout;
|
||
|
|
|
||
|
|
if (body instanceof Stream) {
|
||
|
|
body.on('error', function (err) {
|
||
|
|
const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
|
||
|
|
_this[INTERNALS].error = error;
|
||
|
|
});
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
Body.prototype = {
|
||
|
|
get body() {
|
||
|
|
return this[INTERNALS].body;
|
||
|
|
},
|
||
|
|
|
||
|
|
get bodyUsed() {
|
||
|
|
return this[INTERNALS].disturbed;
|
||
|
|
},
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Decode response as ArrayBuffer
|
||
|
|
*
|
||
|
|
* @return Promise
|
||
|
|
*/
|
||
|
|
arrayBuffer() {
|
||
|
|
return consumeBody.call(this).then(function (buf) {
|
||
|
|
return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
|
||
|
|
});
|
||
|
|
},
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Return raw response as Blob
|
||
|
|
*
|
||
|
|
* @return Promise
|
||
|
|
*/
|
||
|
|
blob() {
|
||
|
|
let ct = this.headers && this.headers.get('content-type') || '';
|
||
|
|
return consumeBody.call(this).then(function (buf) {
|
||
|
|
return Object.assign(
|
||
|
|
// Prevent copying
|
||
|
|
new Blob([], {
|
||
|
|
type: ct.toLowerCase()
|
||
|
|
}), {
|
||
|
|
[BUFFER]: buf
|
||
|
|
});
|
||
|
|
});
|
||
|
|
},
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Decode response as json
|
||
|
|
*
|
||
|
|
* @return Promise
|
||
|
|
*/
|
||
|
|
json() {
|
||
|
|
var _this2 = this;
|
||
|
|
|
||
|
|
return consumeBody.call(this).then(function (buffer) {
|
||
|
|
try {
|
||
|
|
return JSON.parse(buffer.toString());
|
||
|
|
} catch (err) {
|
||
|
|
return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
|
||
|
|
}
|
||
|
|
});
|
||
|
|
},
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Decode response as text
|
||
|
|
*
|
||
|
|
* @return Promise
|
||
|
|
*/
|
||
|
|
text() {
|
||
|
|
return consumeBody.call(this).then(function (buffer) {
|
||
|
|
return buffer.toString();
|
||
|
|
});
|
||
|
|
},
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Decode response as buffer (non-spec api)
|
||
|
|
*
|
||
|
|
* @return Promise
|
||
|
|
*/
|
||
|
|
buffer() {
|
||
|
|
return consumeBody.call(this);
|
||
|
|
},
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Decode response as text, while automatically detecting the encoding and
|
||
|
|
* trying to decode to UTF-8 (non-spec api)
|
||
|
|
*
|
||
|
|
* @return Promise
|
||
|
|
*/
|
||
|
|
textConverted() {
|
||
|
|
var _this3 = this;
|
||
|
|
|
||
|
|
return consumeBody.call(this).then(function (buffer) {
|
||
|
|
return convertBody(buffer, _this3.headers);
|
||
|
|
});
|
||
|
|
}
|
||
|
|
};
|
||
|
|
|
||
|
|
// In browsers, all properties are enumerable.
|
||
|
|
Object.defineProperties(Body.prototype, {
|
||
|
|
body: { enumerable: true },
|
||
|
|
bodyUsed: { enumerable: true },
|
||
|
|
arrayBuffer: { enumerable: true },
|
||
|
|
blob: { enumerable: true },
|
||
|
|
json: { enumerable: true },
|
||
|
|
text: { enumerable: true }
|
||
|
|
});
|
||
|
|
|
||
|
|
Body.mixIn = function (proto) {
|
||
|
|
for (const name of Object.getOwnPropertyNames(Body.prototype)) {
|
||
|
|
// istanbul ignore else: future proof
|
||
|
|
if (!(name in proto)) {
|
||
|
|
const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
|
||
|
|
Object.defineProperty(proto, name, desc);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
};
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Consume and convert an entire Body to a Buffer.
|
||
|
|
*
|
||
|
|
* Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
|
||
|
|
*
|
||
|
|
* @return Promise
|
||
|
|
*/
|
||
|
|
function consumeBody() {
|
||
|
|
var _this4 = this;
|
||
|
|
|
||
|
|
if (this[INTERNALS].disturbed) {
|
||
|
|
return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
|
||
|
|
}
|
||
|
|
|
||
|
|
this[INTERNALS].disturbed = true;
|
||
|
|
|
||
|
|
if (this[INTERNALS].error) {
|
||
|
|
return Body.Promise.reject(this[INTERNALS].error);
|
||
|
|
}
|
||
|
|
|
||
|
|
let body = this.body;
|
||
|
|
|
||
|
|
// body is null
|
||
|
|
if (body === null) {
|
||
|
|
return Body.Promise.resolve(Buffer.alloc(0));
|
||
|
|
}
|
||
|
|
|
||
|
|
// body is blob
|
||
|
|
if (isBlob(body)) {
|
||
|
|
body = body.stream();
|
||
|
|
}
|
||
|
|
|
||
|
|
// body is buffer
|
||
|
|
if (Buffer.isBuffer(body)) {
|
||
|
|
return Body.Promise.resolve(body);
|
||
|
|
}
|
||
|
|
|
||
|
|
// istanbul ignore if: should never happen
|
||
|
|
if (!(body instanceof Stream)) {
|
||
|
|
return Body.Promise.resolve(Buffer.alloc(0));
|
||
|
|
}
|
||
|
|
|
||
|
|
// body is stream
|
||
|
|
// get ready to actually consume the body
|
||
|
|
let accum = [];
|
||
|
|
let accumBytes = 0;
|
||
|
|
let abort = false;
|
||
|
|
|
||
|
|
return new Body.Promise(function (resolve, reject) {
|
||
|
|
let resTimeout;
|
||
|
|
|
||
|
|
// allow timeout on slow response body
|
||
|
|
if (_this4.timeout) {
|
||
|
|
resTimeout = setTimeout(function () {
|
||
|
|
abort = true;
|
||
|
|
reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
|
||
|
|
}, _this4.timeout);
|
||
|
|
}
|
||
|
|
|
||
|
|
// handle stream errors
|
||
|
|
body.on('error', function (err) {
|
||
|
|
if (err.name === 'AbortError') {
|
||
|
|
// if the request was aborted, reject with this Error
|
||
|
|
abort = true;
|
||
|
|
reject(err);
|
||
|
|
} else {
|
||
|
|
// other errors, such as incorrect content-encoding
|
||
|
|
reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
|
||
|
|
}
|
||
|
|
});
|
||
|
|
|
||
|
|
body.on('data', function (chunk) {
|
||
|
|
if (abort || chunk === null) {
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
if (_this4.size && accumBytes + chunk.length > _this4.size) {
|
||
|
|
abort = true;
|
||
|
|
reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
accumBytes += chunk.length;
|
||
|
|
accum.push(chunk);
|
||
|
|
});
|
||
|
|
|
||
|
|
body.on('end', function () {
|
||
|
|
if (abort) {
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
clearTimeout(resTimeout);
|
||
|
|
|
||
|
|
try {
|
||
|
|
resolve(Buffer.concat(accum, accumBytes));
|
||
|
|
} catch (err) {
|
||
|
|
// handle streams that have accumulated too much data (issue #414)
|
||
|
|
reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
|
||
|
|
}
|
||
|
|
});
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Detect buffer encoding and convert to target encoding
|
||
|
|
* ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
|
||
|
|
*
|
||
|
|
* @param Buffer buffer Incoming buffer
|
||
|
|
* @param String encoding Target encoding
|
||
|
|
* @return String
|
||
|
|
*/
|
||
|
|
function convertBody(buffer, headers) {
|
||
|
|
if (typeof convert !== 'function') {
|
||
|
|
throw new Error('The package `encoding` must be installed to use the textConverted() function');
|
||
|
|
}
|
||
|
|
|
||
|
|
const ct = headers.get('content-type');
|
||
|
|
let charset = 'utf-8';
|
||
|
|
let res, str;
|
||
|
|
|
||
|
|
// header
|
||
|
|
if (ct) {
|
||
|
|
res = /charset=([^;]*)/i.exec(ct);
|
||
|
|
}
|
||
|
|
|
||
|
|
// no charset in content type, peek at response body for at most 1024 bytes
|
||
|
|
str = buffer.slice(0, 1024).toString();
|
||
|
|
|
||
|
|
// html5
|
||
|
|
if (!res && str) {
|
||
|
|
res = /<meta.+?charset=(['"])(.+?)\1/i.exec(str);
|
||
|
|
}
|
||
|
|
|
||
|
|
// html4
|
||
|
|
if (!res && str) {
|
||
|
|
res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i.exec(str);
|
||
|
|
if (!res) {
|
||
|
|
res = /<meta[\s]+?content=(['"])(.+?)\1[\s]+?http-equiv=(['"])content-type\3/i.exec(str);
|
||
|
|
if (res) {
|
||
|
|
res.pop(); // drop last quote
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
if (res) {
|
||
|
|
res = /charset=(.*)/i.exec(res.pop());
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// xml
|
||
|
|
if (!res && str) {
|
||
|
|
res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str);
|
||
|
|
}
|
||
|
|
|
||
|
|
// found charset
|
||
|
|
if (res) {
|
||
|
|
charset = res.pop();
|
||
|
|
|
||
|
|
// prevent decode issues when sites use incorrect encoding
|
||
|
|
// ref: https://hsivonen.fi/encoding-menu/
|
||
|
|
if (charset === 'gb2312' || charset === 'gbk') {
|
||
|
|
charset = 'gb18030';
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// turn raw buffers into a single utf-8 buffer
|
||
|
|
return convert(buffer, 'UTF-8', charset).toString();
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Detect a URLSearchParams object
|
||
|
|
* ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143
|
||
|
|
*
|
||
|
|
* @param Object obj Object to detect by type or brand
|
||
|
|
* @return String
|
||
|
|
*/
|
||
|
|
function isURLSearchParams(obj) {
|
||
|
|
// Duck-typing as a necessary condition.
|
||
|
|
if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') {
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
|
||
|
|
// Brand-checking and more duck-typing as optional condition.
|
||
|
|
return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function';
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Check if `obj` is a W3C `Blob` object (which `File` inherits from)
|
||
|
|
* @param {*} obj
|
||
|
|
* @return {boolean}
|
||
|
|
*/
|
||
|
|
function isBlob(obj) {
|
||
|
|
return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]);
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Clone body given Res/Req instance
|
||
|
|
*
|
||
|
|
* @param Mixed instance Response or Request instance
|
||
|
|
* @return Mixed
|
||
|
|
*/
|
||
|
|
function clone(instance) {
|
||
|
|
let p1, p2;
|
||
|
|
let body = instance.body;
|
||
|
|
|
||
|
|
// don't allow cloning a used body
|
||
|
|
if (instance.bodyUsed) {
|
||
|
|
throw new Error('cannot clone body after it is used');
|
||
|
|
}
|
||
|
|
|
||
|
|
// check that body is a stream and not form-data object
|
||
|
|
// note: we can't clone the form-data object without having it as a dependency
|
||
|
|
if (body instanceof Stream && typeof body.getBoundary !== 'function') {
|
||
|
|
// tee instance body
|
||
|
|
p1 = new PassThrough();
|
||
|
|
p2 = new PassThrough();
|
||
|
|
body.pipe(p1);
|
||
|
|
body.pipe(p2);
|
||
|
|
// set instance body to teed body and return the other teed body
|
||
|
|
instance[INTERNALS].body = p1;
|
||
|
|
body = p2;
|
||
|
|
}
|
||
|
|
|
||
|
|
return body;
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Performs the operation "extract a `Content-Type` value from |object|" as
|
||
|
|
* specified in the specification:
|
||
|
|
* https://fetch.spec.whatwg.org/#concept-bodyinit-extract
|
||
|
|
*
|
||
|
|
* This function assumes that instance.body is present.
|
||
|
|
*
|
||
|
|
* @param Mixed instance Any options.body input
|
||
|
|
*/
|
||
|
|
function extractContentType(body) {
|
||
|
|
if (body === null) {
|
||
|
|
// body is null
|
||
|
|
return null;
|
||
|
|
} else if (typeof body === 'string') {
|
||
|
|
// body is string
|
||
|
|
return 'text/plain;charset=UTF-8';
|
||
|
|
} else if (isURLSearchParams(body)) {
|
||
|
|
// body is a URLSearchParams
|
||
|
|
return 'application/x-www-form-urlencoded;charset=UTF-8';
|
||
|
|
} else if (isBlob(body)) {
|
||
|
|
// body is blob
|
||
|
|
return body.type || null;
|
||
|
|
} else if (Buffer.isBuffer(body)) {
|
||
|
|
// body is buffer
|
||
|
|
return null;
|
||
|
|
} else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
|
||
|
|
// body is ArrayBuffer
|
||
|
|
return null;
|
||
|
|
} else if (ArrayBuffer.isView(body)) {
|
||
|
|
// body is ArrayBufferView
|
||
|
|
return null;
|
||
|
|
} else if (typeof body.getBoundary === 'function') {
|
||
|
|
// detect form data input from form-data module
|
||
|
|
return `multipart/form-data;boundary=${body.getBoundary()}`;
|
||
|
|
} else if (body instanceof Stream) {
|
||
|
|
// body is stream
|
||
|
|
// can't really do much about this
|
||
|
|
return null;
|
||
|
|
} else {
|
||
|
|
// Body constructor defaults other things to string
|
||
|
|
return 'text/plain;charset=UTF-8';
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* The Fetch Standard treats this as if "total bytes" is a property on the body.
|
||
|
|
* For us, we have to explicitly get it with a function.
|
||
|
|
*
|
||
|
|
* ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes
|
||
|
|
*
|
||
|
|
* @param Body instance Instance of Body
|
||
|
|
* @return Number? Number of bytes, or null if not possible
|
||
|
|
*/
|
||
|
|
function getTotalBytes(instance) {
|
||
|
|
const body = instance.body;
|
||
|
|
|
||
|
|
|
||
|
|
if (body === null) {
|
||
|
|
// body is null
|
||
|
|
return 0;
|
||
|
|
} else if (isBlob(body)) {
|
||
|
|
return body.size;
|
||
|
|
} else if (Buffer.isBuffer(body)) {
|
||
|
|
// body is buffer
|
||
|
|
return body.length;
|
||
|
|
} else if (body && typeof body.getLengthSync === 'function') {
|
||
|
|
// detect form data input from form-data module
|
||
|
|
if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x
|
||
|
|
body.hasKnownLength && body.hasKnownLength()) {
|
||
|
|
// 2.x
|
||
|
|
return body.getLengthSync();
|
||
|
|
}
|
||
|
|
return null;
|
||
|
|
} else {
|
||
|
|
// body is stream
|
||
|
|
return null;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Write a Body to a Node.js WritableStream (e.g. http.Request) object.
|
||
|
|
*
|
||
|
|
* @param Body instance Instance of Body
|
||
|
|
* @return Void
|
||
|
|
*/
|
||
|
|
function writeToStream(dest, instance) {
|
||
|
|
const body = instance.body;
|
||
|
|
|
||
|
|
|
||
|
|
if (body === null) {
|
||
|
|
// body is null
|
||
|
|
dest.end();
|
||
|
|
} else if (isBlob(body)) {
|
||
|
|
body.stream().pipe(dest);
|
||
|
|
} else if (Buffer.isBuffer(body)) {
|
||
|
|
// body is buffer
|
||
|
|
dest.write(body);
|
||
|
|
dest.end();
|
||
|
|
} else {
|
||
|
|
// body is stream
|
||
|
|
body.pipe(dest);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// expose Promise
|
||
|
|
Body.Promise = global.Promise;
|
||
|
|
|
||
|
|
/**
|
||
|
|
* headers.js
|
||
|
|
*
|
||
|
|
* Headers class offers convenient helpers
|
||
|
|
*/
|
||
|
|
|
||
|
|
const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/;
|
||
|
|
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/;
|
||
|
|
|
||
|
|
function validateName(name) {
|
||
|
|
name = `${name}`;
|
||
|
|
if (invalidTokenRegex.test(name) || name === '') {
|
||
|
|
throw new TypeError(`${name} is not a legal HTTP header name`);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
function validateValue(value) {
|
||
|
|
value = `${value}`;
|
||
|
|
if (invalidHeaderCharRegex.test(value)) {
|
||
|
|
throw new TypeError(`${value} is not a legal HTTP header value`);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Find the key in the map object given a header name.
|
||
|
|
*
|
||
|
|
* Returns undefined if not found.
|
||
|
|
*
|
||
|
|
* @param String name Header name
|
||
|
|
* @return String|Undefined
|
||
|
|
*/
|
||
|
|
function find(map, name) {
|
||
|
|
name = name.toLowerCase();
|
||
|
|
for (const key in map) {
|
||
|
|
if (key.toLowerCase() === name) {
|
||
|
|
return key;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
return undefined;
|
||
|
|
}
|
||
|
|
|
||
|
|
const MAP = Symbol('map');
|
||
|
|
class Headers {
|
||
|
|
/**
|
||
|
|
* Headers class
|
||
|
|
*
|
||
|
|
* @param Object headers Response headers
|
||
|
|
* @return Void
|
||
|
|
*/
|
||
|
|
constructor() {
|
||
|
|
let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined;
|
||
|
|
|
||
|
|
this[MAP] = Object.create(null);
|
||
|
|
|
||
|
|
if (init instanceof Headers) {
|
||
|
|
const rawHeaders = init.raw();
|
||
|
|
const headerNames = Object.keys(rawHeaders);
|
||
|
|
|
||
|
|
for (const headerName of headerNames) {
|
||
|
|
for (const value of rawHeaders[headerName]) {
|
||
|
|
this.append(headerName, value);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
// We don't worry about converting prop to ByteString here as append()
|
||
|
|
// will handle it.
|
||
|
|
if (init == null) ; else if (typeof init === 'object') {
|
||
|
|
const method = init[Symbol.iterator];
|
||
|
|
if (method != null) {
|
||
|
|
if (typeof method !== 'function') {
|
||
|
|
throw new TypeError('Header pairs must be iterable');
|
||
|
|
}
|
||
|
|
|
||
|
|
// sequence<sequence<ByteString>>
|
||
|
|
// Note: per spec we have to first exhaust the lists then process them
|
||
|
|
const pairs = [];
|
||
|
|
for (const pair of init) {
|
||
|
|
if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
|
||
|
|
throw new TypeError('Each header pair must be iterable');
|
||
|
|
}
|
||
|
|
pairs.push(Array.from(pair));
|
||
|
|
}
|
||
|
|
|
||
|
|
for (const pair of pairs) {
|
||
|
|
if (pair.length !== 2) {
|
||
|
|
throw new TypeError('Each header pair must be a name/value tuple');
|
||
|
|
}
|
||
|
|
this.append(pair[0], pair[1]);
|
||
|
|
}
|
||
|
|
} else {
|
||
|
|
// record<ByteString, ByteString>
|
||
|
|
for (const key of Object.keys(init)) {
|
||
|
|
const value = init[key];
|
||
|
|
this.append(key, value);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
} else {
|
||
|
|
throw new TypeError('Provided initializer must be an object');
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Return combined header value given name
|
||
|
|
*
|
||
|
|
* @param String name Header name
|
||
|
|
* @return Mixed
|
||
|
|
*/
|
||
|
|
get(name) {
|
||
|
|
name = `${name}`;
|
||
|
|
validateName(name);
|
||
|
|
const key = find(this[MAP], name);
|
||
|
|
if (key === undefined) {
|
||
|
|
return null;
|
||
|
|
}
|
||
|
|
|
||
|
|
return this[MAP][key].join(', ');
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Iterate over all headers
|
||
|
|
*
|
||
|
|
* @param Function callback Executed for each item with parameters (value, name, thisArg)
|
||
|
|
* @param Boolean thisArg `this` context for callback function
|
||
|
|
* @return Void
|
||
|
|
*/
|
||
|
|
forEach(callback) {
|
||
|
|
let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
|
||
|
|
|
||
|
|
let pairs = getHeaders(this);
|
||
|
|
let i = 0;
|
||
|
|
while (i < pairs.length) {
|
||
|
|
var _pairs$i = pairs[i];
|
||
|
|
const name = _pairs$i[0],
|
||
|
|
value = _pairs$i[1];
|
||
|
|
|
||
|
|
callback.call(thisArg, value, name, this);
|
||
|
|
pairs = getHeaders(this);
|
||
|
|
i++;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Overwrite header values given name
|
||
|
|
*
|
||
|
|
* @param String name Header name
|
||
|
|
* @param String value Header value
|
||
|
|
* @return Void
|
||
|
|
*/
|
||
|
|
set(name, value) {
|
||
|
|
name = `${name}`;
|
||
|
|
value = `${value}`;
|
||
|
|
validateName(name);
|
||
|
|
validateValue(value);
|
||
|
|
const key = find(this[MAP], name);
|
||
|
|
this[MAP][key !== undefined ? key : name] = [value];
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Append a value onto existing header
|
||
|
|
*
|
||
|
|
* @param String name Header name
|
||
|
|
* @param String value Header value
|
||
|
|
* @return Void
|
||
|
|
*/
|
||
|
|
append(name, value) {
|
||
|
|
name = `${name}`;
|
||
|
|
value = `${value}`;
|
||
|
|
validateName(name);
|
||
|
|
validateValue(value);
|
||
|
|
const key = find(this[MAP], name);
|
||
|
|
if (key !== undefined) {
|
||
|
|
this[MAP][key].push(value);
|
||
|
|
} else {
|
||
|
|
this[MAP][name] = [value];
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Check for header name existence
|
||
|
|
*
|
||
|
|
* @param String name Header name
|
||
|
|
* @return Boolean
|
||
|
|
*/
|
||
|
|
has(name) {
|
||
|
|
name = `${name}`;
|
||
|
|
validateName(name);
|
||
|
|
return find(this[MAP], name) !== undefined;
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Delete all header values given name
|
||
|
|
*
|
||
|
|
* @param String name Header name
|
||
|
|
* @return Void
|
||
|
|
*/
|
||
|
|
delete(name) {
|
||
|
|
name = `${name}`;
|
||
|
|
validateName(name);
|
||
|
|
const key = find(this[MAP], name);
|
||
|
|
if (key !== undefined) {
|
||
|
|
delete this[MAP][key];
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Return raw headers (non-spec api)
|
||
|
|
*
|
||
|
|
* @return Object
|
||
|
|
*/
|
||
|
|
raw() {
|
||
|
|
return this[MAP];
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Get an iterator on keys.
|
||
|
|
*
|
||
|
|
* @return Iterator
|
||
|
|
*/
|
||
|
|
keys() {
|
||
|
|
return createHeadersIterator(this, 'key');
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Get an iterator on values.
|
||
|
|
*
|
||
|
|
* @return Iterator
|
||
|
|
*/
|
||
|
|
values() {
|
||
|
|
return createHeadersIterator(this, 'value');
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Get an iterator on entries.
|
||
|
|
*
|
||
|
|
* This is the default iterator of the Headers object.
|
||
|
|
*
|
||
|
|
* @return Iterator
|
||
|
|
*/
|
||
|
|
[Symbol.iterator]() {
|
||
|
|
return createHeadersIterator(this, 'key+value');
|
||
|
|
}
|
||
|
|
}
|
||
|
|
Headers.prototype.entries = Headers.prototype[Symbol.iterator];
|
||
|
|
|
||
|
|
Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
|
||
|
|
value: 'Headers',
|
||
|
|
writable: false,
|
||
|
|
enumerable: false,
|
||
|
|
configurable: true
|
||
|
|
});
|
||
|
|
|
||
|
|
Object.defineProperties(Headers.prototype, {
|
||
|
|
get: { enumerable: true },
|
||
|
|
forEach: { enumerable: true },
|
||
|
|
set: { enumerable: true },
|
||
|
|
append: { enumerable: true },
|
||
|
|
has: { enumerable: true },
|
||
|
|
delete: { enumerable: true },
|
||
|
|
keys: { enumerable: true },
|
||
|
|
values: { enumerable: true },
|
||
|
|
entries: { enumerable: true }
|
||
|
|
});
|
||
|
|
|
||
|
|
function getHeaders(headers) {
|
||
|
|
let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
|
||
|
|
|
||
|
|
const keys = Object.keys(headers[MAP]).sort();
|
||
|
|
return keys.map(kind === 'key' ? function (k) {
|
||
|
|
return k.toLowerCase();
|
||
|
|
} : kind === 'value' ? function (k) {
|
||
|
|
return headers[MAP][k].join(', ');
|
||
|
|
} : function (k) {
|
||
|
|
return [k.toLowerCase(), headers[MAP][k].join(', ')];
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
const INTERNAL = Symbol('internal');
|
||
|
|
|
||
|
|
function createHeadersIterator(target, kind) {
|
||
|
|
const iterator = Object.create(HeadersIteratorPrototype);
|
||
|
|
iterator[INTERNAL] = {
|
||
|
|
target,
|
||
|
|
kind,
|
||
|
|
index: 0
|
||
|
|
};
|
||
|
|
return iterator;
|
||
|
|
}
|
||
|
|
|
||
|
|
const HeadersIteratorPrototype = Object.setPrototypeOf({
|
||
|
|
next() {
|
||
|
|
// istanbul ignore if
|
||
|
|
if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
|
||
|
|
throw new TypeError('Value of `this` is not a HeadersIterator');
|
||
|
|
}
|
||
|
|
|
||
|
|
var _INTERNAL = this[INTERNAL];
|
||
|
|
const target = _INTERNAL.target,
|
||
|
|
kind = _INTERNAL.kind,
|
||
|
|
index = _INTERNAL.index;
|
||
|
|
|
||
|
|
const values = getHeaders(target, kind);
|
||
|
|
const len = values.length;
|
||
|
|
if (index >= len) {
|
||
|
|
return {
|
||
|
|
value: undefined,
|
||
|
|
done: true
|
||
|
|
};
|
||
|
|
}
|
||
|
|
|
||
|
|
this[INTERNAL].index = index + 1;
|
||
|
|
|
||
|
|
return {
|
||
|
|
value: values[index],
|
||
|
|
done: false
|
||
|
|
};
|
||
|
|
}
|
||
|
|
}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
|
||
|
|
|
||
|
|
Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
|
||
|
|
value: 'HeadersIterator',
|
||
|
|
writable: false,
|
||
|
|
enumerable: false,
|
||
|
|
configurable: true
|
||
|
|
});
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Export the Headers object in a form that Node.js can consume.
|
||
|
|
*
|
||
|
|
* @param Headers headers
|
||
|
|
* @return Object
|
||
|
|
*/
|
||
|
|
function exportNodeCompatibleHeaders(headers) {
|
||
|
|
const obj = Object.assign({ __proto__: null }, headers[MAP]);
|
||
|
|
|
||
|
|
// http.request() only supports string as Host header. This hack makes
|
||
|
|
// specifying custom Host header possible.
|
||
|
|
const hostHeaderKey = find(headers[MAP], 'Host');
|
||
|
|
if (hostHeaderKey !== undefined) {
|
||
|
|
obj[hostHeaderKey] = obj[hostHeaderKey][0];
|
||
|
|
}
|
||
|
|
|
||
|
|
return obj;
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Create a Headers object from an object of headers, ignoring those that do
|
||
|
|
* not conform to HTTP grammar productions.
|
||
|
|
*
|
||
|
|
* @param Object obj Object of headers
|
||
|
|
* @return Headers
|
||
|
|
*/
|
||
|
|
function createHeadersLenient(obj) {
|
||
|
|
const headers = new Headers();
|
||
|
|
for (const name of Object.keys(obj)) {
|
||
|
|
if (invalidTokenRegex.test(name)) {
|
||
|
|
continue;
|
||
|
|
}
|
||
|
|
if (Array.isArray(obj[name])) {
|
||
|
|
for (const val of obj[name]) {
|
||
|
|
if (invalidHeaderCharRegex.test(val)) {
|
||
|
|
continue;
|
||
|
|
}
|
||
|
|
if (headers[MAP][name] === undefined) {
|
||
|
|
headers[MAP][name] = [val];
|
||
|
|
} else {
|
||
|
|
headers[MAP][name].push(val);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
} else if (!invalidHeaderCharRegex.test(obj[name])) {
|
||
|
|
headers[MAP][name] = [obj[name]];
|
||
|
|
}
|
||
|
|
}
|
||
|
|
return headers;
|
||
|
|
}
|
||
|
|
|
||
|
|
const INTERNALS$1 = Symbol('Response internals');
|
||
|
|
|
||
|
|
// fix an issue where "STATUS_CODES" aren't a named export for node <10
|
||
|
|
const STATUS_CODES = http.STATUS_CODES;
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Response class
|
||
|
|
*
|
||
|
|
* @param Stream body Readable stream
|
||
|
|
* @param Object opts Response options
|
||
|
|
* @return Void
|
||
|
|
*/
|
||
|
|
class Response {
|
||
|
|
constructor() {
|
||
|
|
let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
|
||
|
|
let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
||
|
|
|
||
|
|
Body.call(this, body, opts);
|
||
|
|
|
||
|
|
const status = opts.status || 200;
|
||
|
|
const headers = new Headers(opts.headers);
|
||
|
|
|
||
|
|
if (body != null && !headers.has('Content-Type')) {
|
||
|
|
const contentType = extractContentType(body);
|
||
|
|
if (contentType) {
|
||
|
|
headers.append('Content-Type', contentType);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
this[INTERNALS$1] = {
|
||
|
|
url: opts.url,
|
||
|
|
status,
|
||
|
|
statusText: opts.statusText || STATUS_CODES[status],
|
||
|
|
headers,
|
||
|
|
counter: opts.counter
|
||
|
|
};
|
||
|
|
}
|
||
|
|
|
||
|
|
get url() {
|
||
|
|
return this[INTERNALS$1].url || '';
|
||
|
|
}
|
||
|
|
|
||
|
|
get status() {
|
||
|
|
return this[INTERNALS$1].status;
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Convenience property representing if the request ended normally
|
||
|
|
*/
|
||
|
|
get ok() {
|
||
|
|
return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
|
||
|
|
}
|
||
|
|
|
||
|
|
get redirected() {
|
||
|
|
return this[INTERNALS$1].counter > 0;
|
||
|
|
}
|
||
|
|
|
||
|
|
get statusText() {
|
||
|
|
return this[INTERNALS$1].statusText;
|
||
|
|
}
|
||
|
|
|
||
|
|
get headers() {
|
||
|
|
return this[INTERNALS$1].headers;
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Clone this response
|
||
|
|
*
|
||
|
|
* @return Response
|
||
|
|
*/
|
||
|
|
clone() {
|
||
|
|
return new Response(clone(this), {
|
||
|
|
url: this.url,
|
||
|
|
status: this.status,
|
||
|
|
statusText: this.statusText,
|
||
|
|
headers: this.headers,
|
||
|
|
ok: this.ok,
|
||
|
|
redirected: this.redirected
|
||
|
|
});
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
Body.mixIn(Response.prototype);
|
||
|
|
|
||
|
|
Object.defineProperties(Response.prototype, {
|
||
|
|
url: { enumerable: true },
|
||
|
|
status: { enumerable: true },
|
||
|
|
ok: { enumerable: true },
|
||
|
|
redirected: { enumerable: true },
|
||
|
|
statusText: { enumerable: true },
|
||
|
|
headers: { enumerable: true },
|
||
|
|
clone: { enumerable: true }
|
||
|
|
});
|
||
|
|
|
||
|
|
Object.defineProperty(Response.prototype, Symbol.toStringTag, {
|
||
|
|
value: 'Response',
|
||
|
|
writable: false,
|
||
|
|
enumerable: false,
|
||
|
|
configurable: true
|
||
|
|
});
|
||
|
|
|
||
|
|
const INTERNALS$2 = Symbol('Request internals');
|
||
|
|
|
||
|
|
// fix an issue where "format", "parse" aren't a named export for node <10
|
||
|
|
const parse_url = Url.parse;
|
||
|
|
const format_url = Url.format;
|
||
|
|
|
||
|
|
const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Check if a value is an instance of Request.
|
||
|
|
*
|
||
|
|
* @param Mixed input
|
||
|
|
* @return Boolean
|
||
|
|
*/
|
||
|
|
function isRequest(input) {
|
||
|
|
return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
|
||
|
|
}
|
||
|
|
|
||
|
|
function isAbortSignal(signal) {
|
||
|
|
const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
|
||
|
|
return !!(proto && proto.constructor.name === 'AbortSignal');
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Request class
|
||
|
|
*
|
||
|
|
* @param Mixed input Url or Request instance
|
||
|
|
* @param Object init Custom options
|
||
|
|
* @return Void
|
||
|
|
*/
|
||
|
|
class Request {
|
||
|
|
constructor(input) {
|
||
|
|
let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
||
|
|
|
||
|
|
let parsedURL;
|
||
|
|
|
||
|
|
// normalize input
|
||
|
|
if (!isRequest(input)) {
|
||
|
|
if (input && input.href) {
|
||
|
|
// in order to support Node.js' Url objects; though WHATWG's URL objects
|
||
|
|
// will fall into this branch also (since their `toString()` will return
|
||
|
|
// `href` property anyway)
|
||
|
|
parsedURL = parse_url(input.href);
|
||
|
|
} else {
|
||
|
|
// coerce input to a string before attempting to parse
|
||
|
|
parsedURL = parse_url(`${input}`);
|
||
|
|
}
|
||
|
|
input = {};
|
||
|
|
} else {
|
||
|
|
parsedURL = parse_url(input.url);
|
||
|
|
}
|
||
|
|
|
||
|
|
let method = init.method || input.method || 'GET';
|
||
|
|
method = method.toUpperCase();
|
||
|
|
|
||
|
|
if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
|
||
|
|
throw new TypeError('Request with GET/HEAD method cannot have body');
|
||
|
|
}
|
||
|
|
|
||
|
|
let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
|
||
|
|
|
||
|
|
Body.call(this, inputBody, {
|
||
|
|
timeout: init.timeout || input.timeout || 0,
|
||
|
|
size: init.size || input.size || 0
|
||
|
|
});
|
||
|
|
|
||
|
|
const headers = new Headers(init.headers || input.headers || {});
|
||
|
|
|
||
|
|
if (inputBody != null && !headers.has('Content-Type')) {
|
||
|
|
const contentType = extractContentType(inputBody);
|
||
|
|
if (contentType) {
|
||
|
|
headers.append('Content-Type', contentType);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
let signal = isRequest(input) ? input.signal : null;
|
||
|
|
if ('signal' in init) signal = init.signal;
|
||
|
|
|
||
|
|
if (signal != null && !isAbortSignal(signal)) {
|
||
|
|
throw new TypeError('Expected signal to be an instanceof AbortSignal');
|
||
|
|
}
|
||
|
|
|
||
|
|
this[INTERNALS$2] = {
|
||
|
|
method,
|
||
|
|
redirect: init.redirect || input.redirect || 'follow',
|
||
|
|
headers,
|
||
|
|
parsedURL,
|
||
|
|
signal
|
||
|
|
};
|
||
|
|
|
||
|
|
// node-fetch-only options
|
||
|
|
this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
|
||
|
|
this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
|
||
|
|
this.counter = init.counter || input.counter || 0;
|
||
|
|
this.agent = init.agent || input.agent;
|
||
|
|
}
|
||
|
|
|
||
|
|
get method() {
|
||
|
|
return this[INTERNALS$2].method;
|
||
|
|
}
|
||
|
|
|
||
|
|
get url() {
|
||
|
|
return format_url(this[INTERNALS$2].parsedURL);
|
||
|
|
}
|
||
|
|
|
||
|
|
get headers() {
|
||
|
|
return this[INTERNALS$2].headers;
|
||
|
|
}
|
||
|
|
|
||
|
|
get redirect() {
|
||
|
|
return this[INTERNALS$2].redirect;
|
||
|
|
}
|
||
|
|
|
||
|
|
get signal() {
|
||
|
|
return this[INTERNALS$2].signal;
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Clone this request
|
||
|
|
*
|
||
|
|
* @return Request
|
||
|
|
*/
|
||
|
|
clone() {
|
||
|
|
return new Request(this);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
Body.mixIn(Request.prototype);
|
||
|
|
|
||
|
|
Object.defineProperty(Request.prototype, Symbol.toStringTag, {
|
||
|
|
value: 'Request',
|
||
|
|
writable: false,
|
||
|
|
enumerable: false,
|
||
|
|
configurable: true
|
||
|
|
});
|
||
|
|
|
||
|
|
Object.defineProperties(Request.prototype, {
|
||
|
|
method: { enumerable: true },
|
||
|
|
url: { enumerable: true },
|
||
|
|
headers: { enumerable: true },
|
||
|
|
redirect: { enumerable: true },
|
||
|
|
clone: { enumerable: true },
|
||
|
|
signal: { enumerable: true }
|
||
|
|
});
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Convert a Request to Node.js http request options.
|
||
|
|
*
|
||
|
|
* @param Request A Request instance
|
||
|
|
* @return Object The options object to be passed to http.request
|
||
|
|
*/
|
||
|
|
function getNodeRequestOptions(request) {
|
||
|
|
const parsedURL = request[INTERNALS$2].parsedURL;
|
||
|
|
const headers = new Headers(request[INTERNALS$2].headers);
|
||
|
|
|
||
|
|
// fetch step 1.3
|
||
|
|
if (!headers.has('Accept')) {
|
||
|
|
headers.set('Accept', '*/*');
|
||
|
|
}
|
||
|
|
|
||
|
|
// Basic fetch
|
||
|
|
if (!parsedURL.protocol || !parsedURL.hostname) {
|
||
|
|
throw new TypeError('Only absolute URLs are supported');
|
||
|
|
}
|
||
|
|
|
||
|
|
if (!/^https?:$/.test(parsedURL.protocol)) {
|
||
|
|
throw new TypeError('Only HTTP(S) protocols are supported');
|
||
|
|
}
|
||
|
|
|
||
|
|
if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
|
||
|
|
throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
|
||
|
|
}
|
||
|
|
|
||
|
|
// HTTP-network-or-cache fetch steps 2.4-2.7
|
||
|
|
let contentLengthValue = null;
|
||
|
|
if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
|
||
|
|
contentLengthValue = '0';
|
||
|
|
}
|
||
|
|
if (request.body != null) {
|
||
|
|
const totalBytes = getTotalBytes(request);
|
||
|
|
if (typeof totalBytes === 'number') {
|
||
|
|
contentLengthValue = String(totalBytes);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
if (contentLengthValue) {
|
||
|
|
headers.set('Content-Length', contentLengthValue);
|
||
|
|
}
|
||
|
|
|
||
|
|
// HTTP-network-or-cache fetch step 2.11
|
||
|
|
if (!headers.has('User-Agent')) {
|
||
|
|
headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
|
||
|
|
}
|
||
|
|
|
||
|
|
// HTTP-network-or-cache fetch step 2.15
|
||
|
|
if (request.compress && !headers.has('Accept-Encoding')) {
|
||
|
|
headers.set('Accept-Encoding', 'gzip,deflate');
|
||
|
|
}
|
||
|
|
|
||
|
|
let agent = request.agent;
|
||
|
|
if (typeof agent === 'function') {
|
||
|
|
agent = agent(parsedURL);
|
||
|
|
}
|
||
|
|
|
||
|
|
if (!headers.has('Connection') && !agent) {
|
||
|
|
headers.set('Connection', 'close');
|
||
|
|
}
|
||
|
|
|
||
|
|
// HTTP-network fetch step 4.2
|
||
|
|
// chunked encoding is handled by Node.js
|
||
|
|
|
||
|
|
return Object.assign({}, parsedURL, {
|
||
|
|
method: request.method,
|
||
|
|
headers: exportNodeCompatibleHeaders(headers),
|
||
|
|
agent
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* abort-error.js
|
||
|
|
*
|
||
|
|
* AbortError interface for cancelled requests
|
||
|
|
*/
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Create AbortError instance
|
||
|
|
*
|
||
|
|
* @param String message Error message for human
|
||
|
|
* @return AbortError
|
||
|
|
*/
|
||
|
|
function AbortError(message) {
|
||
|
|
Error.call(this, message);
|
||
|
|
|
||
|
|
this.type = 'aborted';
|
||
|
|
this.message = message;
|
||
|
|
|
||
|
|
// hide custom error implementation details from end-users
|
||
|
|
Error.captureStackTrace(this, this.constructor);
|
||
|
|
}
|
||
|
|
|
||
|
|
AbortError.prototype = Object.create(Error.prototype);
|
||
|
|
AbortError.prototype.constructor = AbortError;
|
||
|
|
AbortError.prototype.name = 'AbortError';
|
||
|
|
|
||
|
|
// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
|
||
|
|
const PassThrough$1 = Stream.PassThrough;
|
||
|
|
const resolve_url = Url.resolve;
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Fetch function
|
||
|
|
*
|
||
|
|
* @param Mixed url Absolute url or Request instance
|
||
|
|
* @param Object opts Fetch options
|
||
|
|
* @return Promise
|
||
|
|
*/
|
||
|
|
function fetch(url, opts) {
|
||
|
|
|
||
|
|
// allow custom promise
|
||
|
|
if (!fetch.Promise) {
|
||
|
|
throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
|
||
|
|
}
|
||
|
|
|
||
|
|
Body.Promise = fetch.Promise;
|
||
|
|
|
||
|
|
// wrap http.request into fetch
|
||
|
|
return new fetch.Promise(function (resolve, reject) {
|
||
|
|
// build request object
|
||
|
|
const request = new Request(url, opts);
|
||
|
|
const options = getNodeRequestOptions(request);
|
||
|
|
|
||
|
|
const send = (options.protocol === 'https:' ? https : http).request;
|
||
|
|
const signal = request.signal;
|
||
|
|
|
||
|
|
let response = null;
|
||
|
|
|
||
|
|
const abort = function abort() {
|
||
|
|
let error = new AbortError('The user aborted a request.');
|
||
|
|
reject(error);
|
||
|
|
if (request.body && request.body instanceof Stream.Readable) {
|
||
|
|
request.body.destroy(error);
|
||
|
|
}
|
||
|
|
if (!response || !response.body) return;
|
||
|
|
response.body.emit('error', error);
|
||
|
|
};
|
||
|
|
|
||
|
|
if (signal && signal.aborted) {
|
||
|
|
abort();
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
const abortAndFinalize = function abortAndFinalize() {
|
||
|
|
abort();
|
||
|
|
finalize();
|
||
|
|
};
|
||
|
|
|
||
|
|
// send request
|
||
|
|
const req = send(options);
|
||
|
|
let reqTimeout;
|
||
|
|
|
||
|
|
if (signal) {
|
||
|
|
signal.addEventListener('abort', abortAndFinalize);
|
||
|
|
}
|
||
|
|
|
||
|
|
function finalize() {
|
||
|
|
req.abort();
|
||
|
|
if (signal) signal.removeEventListener('abort', abortAndFinalize);
|
||
|
|
clearTimeout(reqTimeout);
|
||
|
|
}
|
||
|
|
|
||
|
|
if (request.timeout) {
|
||
|
|
req.once('socket', function (socket) {
|
||
|
|
reqTimeout = setTimeout(function () {
|
||
|
|
reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
|
||
|
|
finalize();
|
||
|
|
}, request.timeout);
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
req.on('error', function (err) {
|
||
|
|
reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
|
||
|
|
finalize();
|
||
|
|
});
|
||
|
|
|
||
|
|
req.on('response', function (res) {
|
||
|
|
clearTimeout(reqTimeout);
|
||
|
|
|
||
|
|
const headers = createHeadersLenient(res.headers);
|
||
|
|
|
||
|
|
// HTTP fetch step 5
|
||
|
|
if (fetch.isRedirect(res.statusCode)) {
|
||
|
|
// HTTP fetch step 5.2
|
||
|
|
const location = headers.get('Location');
|
||
|
|
|
||
|
|
// HTTP fetch step 5.3
|
||
|
|
const locationURL = location === null ? null : resolve_url(request.url, location);
|
||
|
|
|
||
|
|
// HTTP fetch step 5.5
|
||
|
|
switch (request.redirect) {
|
||
|
|
case 'error':
|
||
|
|
reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
|
||
|
|
finalize();
|
||
|
|
return;
|
||
|
|
case 'manual':
|
||
|
|
// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
|
||
|
|
if (locationURL !== null) {
|
||
|
|
// handle corrupted header
|
||
|
|
try {
|
||
|
|
headers.set('Location', locationURL);
|
||
|
|
} catch (err) {
|
||
|
|
// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
|
||
|
|
reject(err);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
break;
|
||
|
|
case 'follow':
|
||
|
|
// HTTP-redirect fetch step 2
|
||
|
|
if (locationURL === null) {
|
||
|
|
break;
|
||
|
|
}
|
||
|
|
|
||
|
|
// HTTP-redirect fetch step 5
|
||
|
|
if (request.counter >= request.follow) {
|
||
|
|
reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
|
||
|
|
finalize();
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
// HTTP-redirect fetch step 6 (counter increment)
|
||
|
|
// Create a new Request object.
|
||
|
|
const requestOpts = {
|
||
|
|
headers: new Headers(request.headers),
|
||
|
|
follow: request.follow,
|
||
|
|
counter: request.counter + 1,
|
||
|
|
agent: request.agent,
|
||
|
|
compress: request.compress,
|
||
|
|
method: request.method,
|
||
|
|
body: request.body,
|
||
|
|
signal: request.signal,
|
||
|
|
timeout: request.timeout,
|
||
|
|
size: request.size
|
||
|
|
};
|
||
|
|
|
||
|
|
// HTTP-redirect fetch step 9
|
||
|
|
if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
|
||
|
|
reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
|
||
|
|
finalize();
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
// HTTP-redirect fetch step 11
|
||
|
|
if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
|
||
|
|
requestOpts.method = 'GET';
|
||
|
|
requestOpts.body = undefined;
|
||
|
|
requestOpts.headers.delete('content-length');
|
||
|
|
}
|
||
|
|
|
||
|
|
// HTTP-redirect fetch step 15
|
||
|
|
resolve(fetch(new Request(locationURL, requestOpts)));
|
||
|
|
finalize();
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// prepare response
|
||
|
|
res.once('end', function () {
|
||
|
|
if (signal) signal.removeEventListener('abort', abortAndFinalize);
|
||
|
|
});
|
||
|
|
let body = res.pipe(new PassThrough$1());
|
||
|
|
|
||
|
|
const response_options = {
|
||
|
|
url: request.url,
|
||
|
|
status: res.statusCode,
|
||
|
|
statusText: res.statusMessage,
|
||
|
|
headers: headers,
|
||
|
|
size: request.size,
|
||
|
|
timeout: request.timeout,
|
||
|
|
counter: request.counter
|
||
|
|
};
|
||
|
|
|
||
|
|
// HTTP-network fetch step 12.1.1.3
|
||
|
|
const codings = headers.get('Content-Encoding');
|
||
|
|
|
||
|
|
// HTTP-network fetch step 12.1.1.4: handle content codings
|
||
|
|
|
||
|
|
// in following scenarios we ignore compression support
|
||
|
|
// 1. compression support is disabled
|
||
|
|
// 2. HEAD request
|
||
|
|
// 3. no Content-Encoding header
|
||
|
|
// 4. no content response (204)
|
||
|
|
// 5. content not modified response (304)
|
||
|
|
if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
|
||
|
|
response = new Response(body, response_options);
|
||
|
|
resolve(response);
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
// For Node v6+
|
||
|
|
// Be less strict when decoding compressed responses, since sometimes
|
||
|
|
// servers send slightly invalid responses that are still accepted
|
||
|
|
// by common browsers.
|
||
|
|
// Always using Z_SYNC_FLUSH is what cURL does.
|
||
|
|
const zlibOptions = {
|
||
|
|
flush: zlib.Z_SYNC_FLUSH,
|
||
|
|
finishFlush: zlib.Z_SYNC_FLUSH
|
||
|
|
};
|
||
|
|
|
||
|
|
// for gzip
|
||
|
|
if (codings == 'gzip' || codings == 'x-gzip') {
|
||
|
|
body = body.pipe(zlib.createGunzip(zlibOptions));
|
||
|
|
response = new Response(body, response_options);
|
||
|
|
resolve(response);
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
// for deflate
|
||
|
|
if (codings == 'deflate' || codings == 'x-deflate') {
|
||
|
|
// handle the infamous raw deflate response from old servers
|
||
|
|
// a hack for old IIS and Apache servers
|
||
|
|
const raw = res.pipe(new PassThrough$1());
|
||
|
|
raw.once('data', function (chunk) {
|
||
|
|
// see http://stackoverflow.com/questions/37519828
|
||
|
|
if ((chunk[0] & 0x0F) === 0x08) {
|
||
|
|
body = body.pipe(zlib.createInflate());
|
||
|
|
} else {
|
||
|
|
body = body.pipe(zlib.createInflateRaw());
|
||
|
|
}
|
||
|
|
response = new Response(body, response_options);
|
||
|
|
resolve(response);
|
||
|
|
});
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
// for br
|
||
|
|
if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
|
||
|
|
body = body.pipe(zlib.createBrotliDecompress());
|
||
|
|
response = new Response(body, response_options);
|
||
|
|
resolve(response);
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
// otherwise, use response as-is
|
||
|
|
response = new Response(body, response_options);
|
||
|
|
resolve(response);
|
||
|
|
});
|
||
|
|
|
||
|
|
writeToStream(req, request);
|
||
|
|
});
|
||
|
|
}
|
||
|
|
/**
|
||
|
|
* Redirect code matching
|
||
|
|
*
|
||
|
|
* @param Number code Status code
|
||
|
|
* @return Boolean
|
||
|
|
*/
|
||
|
|
fetch.isRedirect = function (code) {
|
||
|
|
return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
|
||
|
|
};
|
||
|
|
|
||
|
|
// expose Promise
|
||
|
|
fetch.Promise = global.Promise;
|
||
|
|
|
||
|
|
module.exports = exports = fetch;
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.default = exports;
|
||
|
|
exports.Headers = Headers;
|
||
|
|
exports.Request = Request;
|
||
|
|
exports.Response = Response;
|
||
|
|
exports.FetchError = FetchError;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 462:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
// See http://www.robvanderwoude.com/escapechars.php
|
||
|
|
const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g;
|
||
|
|
|
||
|
|
function escapeCommand(arg) {
|
||
|
|
// Escape meta chars
|
||
|
|
arg = arg.replace(metaCharsRegExp, '^$1');
|
||
|
|
|
||
|
|
return arg;
|
||
|
|
}
|
||
|
|
|
||
|
|
function escapeArgument(arg, doubleEscapeMetaChars) {
|
||
|
|
// Convert to string
|
||
|
|
arg = `${arg}`;
|
||
|
|
|
||
|
|
// Algorithm below is based on https://qntm.org/cmd
|
||
|
|
|
||
|
|
// Sequence of backslashes followed by a double quote:
|
||
|
|
// double up all the backslashes and escape the double quote
|
||
|
|
arg = arg.replace(/(\\*)"/g, '$1$1\\"');
|
||
|
|
|
||
|
|
// Sequence of backslashes followed by the end of the string
|
||
|
|
// (which will become a double quote later):
|
||
|
|
// double up all the backslashes
|
||
|
|
arg = arg.replace(/(\\*)$/, '$1$1');
|
||
|
|
|
||
|
|
// All other backslashes occur literally
|
||
|
|
|
||
|
|
// Quote the whole thing:
|
||
|
|
arg = `"${arg}"`;
|
||
|
|
|
||
|
|
// Escape meta chars
|
||
|
|
arg = arg.replace(metaCharsRegExp, '^$1');
|
||
|
|
|
||
|
|
// Double escape meta chars if necessary
|
||
|
|
if (doubleEscapeMetaChars) {
|
||
|
|
arg = arg.replace(metaCharsRegExp, '^$1');
|
||
|
|
}
|
||
|
|
|
||
|
|
return arg;
|
||
|
|
}
|
||
|
|
|
||
|
|
module.exports.command = escapeCommand;
|
||
|
|
module.exports.argument = escapeArgument;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 463:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
||
|
|
|
||
|
|
var deprecation = __webpack_require__(692);
|
||
|
|
var once = _interopDefault(__webpack_require__(969));
|
||
|
|
|
||
|
|
const logOnce = once(deprecation => console.warn(deprecation));
|
||
|
|
/**
|
||
|
|
* Error with extra properties to help with debugging
|
||
|
|
*/
|
||
|
|
|
||
|
|
class RequestError extends Error {
|
||
|
|
constructor(message, statusCode, options) {
|
||
|
|
super(message); // Maintains proper stack trace (only available on V8)
|
||
|
|
|
||
|
|
/* istanbul ignore next */
|
||
|
|
|
||
|
|
if (Error.captureStackTrace) {
|
||
|
|
Error.captureStackTrace(this, this.constructor);
|
||
|
|
}
|
||
|
|
|
||
|
|
this.name = "HttpError";
|
||
|
|
this.status = statusCode;
|
||
|
|
Object.defineProperty(this, "code", {
|
||
|
|
get() {
|
||
|
|
logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
|
||
|
|
return statusCode;
|
||
|
|
}
|
||
|
|
|
||
|
|
});
|
||
|
|
this.headers = options.headers || {}; // redact request credentials without mutating original request options
|
||
|
|
|
||
|
|
const requestCopy = Object.assign({}, options.request);
|
||
|
|
|
||
|
|
if (options.request.headers.authorization) {
|
||
|
|
requestCopy.headers = Object.assign({}, options.request.headers, {
|
||
|
|
authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
|
||
|
|
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
|
||
|
|
.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
|
||
|
|
// see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
|
||
|
|
.replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
|
||
|
|
this.request = requestCopy;
|
||
|
|
}
|
||
|
|
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.RequestError = RequestError;
|
||
|
|
//# sourceMappingURL=index.js.map
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 470:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
|
if (mod && mod.__esModule) return mod;
|
||
|
|
var result = {};
|
||
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||
|
|
result["default"] = mod;
|
||
|
|
return result;
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
const command_1 = __webpack_require__(431);
|
||
|
|
const file_command_1 = __webpack_require__(102);
|
||
|
|
const utils_1 = __webpack_require__(82);
|
||
|
|
const os = __importStar(__webpack_require__(87));
|
||
|
|
const path = __importStar(__webpack_require__(622));
|
||
|
|
/**
|
||
|
|
* The code to exit an action
|
||
|
|
*/
|
||
|
|
var ExitCode;
|
||
|
|
(function (ExitCode) {
|
||
|
|
/**
|
||
|
|
* A code indicating that the action was successful
|
||
|
|
*/
|
||
|
|
ExitCode[ExitCode["Success"] = 0] = "Success";
|
||
|
|
/**
|
||
|
|
* A code indicating that the action was a failure
|
||
|
|
*/
|
||
|
|
ExitCode[ExitCode["Failure"] = 1] = "Failure";
|
||
|
|
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
|
||
|
|
//-----------------------------------------------------------------------
|
||
|
|
// Variables
|
||
|
|
//-----------------------------------------------------------------------
|
||
|
|
/**
|
||
|
|
* Sets env variable for this action and future actions in the job
|
||
|
|
* @param name the name of the variable to set
|
||
|
|
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
||
|
|
*/
|
||
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||
|
|
function exportVariable(name, val) {
|
||
|
|
const convertedVal = utils_1.toCommandValue(val);
|
||
|
|
process.env[name] = convertedVal;
|
||
|
|
const filePath = process.env['GITHUB_ENV'] || '';
|
||
|
|
if (filePath) {
|
||
|
|
const delimiter = '_GitHubActionsFileCommandDelimeter_';
|
||
|
|
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
|
||
|
|
file_command_1.issueCommand('ENV', commandValue);
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
command_1.issueCommand('set-env', { name }, convertedVal);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
exports.exportVariable = exportVariable;
|
||
|
|
/**
|
||
|
|
* Registers a secret which will get masked from logs
|
||
|
|
* @param secret value of the secret
|
||
|
|
*/
|
||
|
|
function setSecret(secret) {
|
||
|
|
command_1.issueCommand('add-mask', {}, secret);
|
||
|
|
}
|
||
|
|
exports.setSecret = setSecret;
|
||
|
|
/**
|
||
|
|
* Prepends inputPath to the PATH (for this action and future actions)
|
||
|
|
* @param inputPath
|
||
|
|
*/
|
||
|
|
function addPath(inputPath) {
|
||
|
|
const filePath = process.env['GITHUB_PATH'] || '';
|
||
|
|
if (filePath) {
|
||
|
|
file_command_1.issueCommand('PATH', inputPath);
|
||
|
|
}
|
||
|
|
else {
|
||
|
|
command_1.issueCommand('add-path', {}, inputPath);
|
||
|
|
}
|
||
|
|
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
||
|
|
}
|
||
|
|
exports.addPath = addPath;
|
||
|
|
/**
|
||
|
|
* Gets the value of an input. The value is also trimmed.
|
||
|
|
*
|
||
|
|
* @param name name of the input to get
|
||
|
|
* @param options optional. See InputOptions.
|
||
|
|
* @returns string
|
||
|
|
*/
|
||
|
|
function getInput(name, options) {
|
||
|
|
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
|
||
|
|
if (options && options.required && !val) {
|
||
|
|
throw new Error(`Input required and not supplied: ${name}`);
|
||
|
|
}
|
||
|
|
return val.trim();
|
||
|
|
}
|
||
|
|
exports.getInput = getInput;
|
||
|
|
/**
|
||
|
|
* Sets the value of an output.
|
||
|
|
*
|
||
|
|
* @param name name of the output to set
|
||
|
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||
|
|
*/
|
||
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||
|
|
function setOutput(name, value) {
|
||
|
|
command_1.issueCommand('set-output', { name }, value);
|
||
|
|
}
|
||
|
|
exports.setOutput = setOutput;
|
||
|
|
/**
|
||
|
|
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
||
|
|
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
||
|
|
*
|
||
|
|
*/
|
||
|
|
function setCommandEcho(enabled) {
|
||
|
|
command_1.issue('echo', enabled ? 'on' : 'off');
|
||
|
|
}
|
||
|
|
exports.setCommandEcho = setCommandEcho;
|
||
|
|
//-----------------------------------------------------------------------
|
||
|
|
// Results
|
||
|
|
//-----------------------------------------------------------------------
|
||
|
|
/**
|
||
|
|
* Sets the action status to failed.
|
||
|
|
* When the action exits it will be with an exit code of 1
|
||
|
|
* @param message add error issue message
|
||
|
|
*/
|
||
|
|
function setFailed(message) {
|
||
|
|
process.exitCode = ExitCode.Failure;
|
||
|
|
error(message);
|
||
|
|
}
|
||
|
|
exports.setFailed = setFailed;
|
||
|
|
//-----------------------------------------------------------------------
|
||
|
|
// Logging Commands
|
||
|
|
//-----------------------------------------------------------------------
|
||
|
|
/**
|
||
|
|
* Gets whether Actions Step Debug is on or not
|
||
|
|
*/
|
||
|
|
function isDebug() {
|
||
|
|
return process.env['RUNNER_DEBUG'] === '1';
|
||
|
|
}
|
||
|
|
exports.isDebug = isDebug;
|
||
|
|
/**
|
||
|
|
* Writes debug message to user log
|
||
|
|
* @param message debug message
|
||
|
|
*/
|
||
|
|
function debug(message) {
|
||
|
|
command_1.issueCommand('debug', {}, message);
|
||
|
|
}
|
||
|
|
exports.debug = debug;
|
||
|
|
/**
|
||
|
|
* Adds an error issue
|
||
|
|
* @param message error issue message. Errors will be converted to string via toString()
|
||
|
|
*/
|
||
|
|
function error(message) {
|
||
|
|
command_1.issue('error', message instanceof Error ? message.toString() : message);
|
||
|
|
}
|
||
|
|
exports.error = error;
|
||
|
|
/**
|
||
|
|
* Adds an warning issue
|
||
|
|
* @param message warning issue message. Errors will be converted to string via toString()
|
||
|
|
*/
|
||
|
|
function warning(message) {
|
||
|
|
command_1.issue('warning', message instanceof Error ? message.toString() : message);
|
||
|
|
}
|
||
|
|
exports.warning = warning;
|
||
|
|
/**
|
||
|
|
* Writes info to log with console.log.
|
||
|
|
* @param message info message
|
||
|
|
*/
|
||
|
|
function info(message) {
|
||
|
|
process.stdout.write(message + os.EOL);
|
||
|
|
}
|
||
|
|
exports.info = info;
|
||
|
|
/**
|
||
|
|
* Begin an output group.
|
||
|
|
*
|
||
|
|
* Output until the next `groupEnd` will be foldable in this group
|
||
|
|
*
|
||
|
|
* @param name The name of the output group
|
||
|
|
*/
|
||
|
|
function startGroup(name) {
|
||
|
|
command_1.issue('group', name);
|
||
|
|
}
|
||
|
|
exports.startGroup = startGroup;
|
||
|
|
/**
|
||
|
|
* End an output group.
|
||
|
|
*/
|
||
|
|
function endGroup() {
|
||
|
|
command_1.issue('endgroup');
|
||
|
|
}
|
||
|
|
exports.endGroup = endGroup;
|
||
|
|
/**
|
||
|
|
* Wrap an asynchronous function call in a group.
|
||
|
|
*
|
||
|
|
* Returns the same type as the function itself.
|
||
|
|
*
|
||
|
|
* @param name The name of the group
|
||
|
|
* @param fn The function to wrap in the group
|
||
|
|
*/
|
||
|
|
function group(name, fn) {
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
startGroup(name);
|
||
|
|
let result;
|
||
|
|
try {
|
||
|
|
result = yield fn();
|
||
|
|
}
|
||
|
|
finally {
|
||
|
|
endGroup();
|
||
|
|
}
|
||
|
|
return result;
|
||
|
|
});
|
||
|
|
}
|
||
|
|
exports.group = group;
|
||
|
|
//-----------------------------------------------------------------------
|
||
|
|
// Wrapper action state
|
||
|
|
//-----------------------------------------------------------------------
|
||
|
|
/**
|
||
|
|
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
||
|
|
*
|
||
|
|
* @param name name of the state to store
|
||
|
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||
|
|
*/
|
||
|
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||
|
|
function saveState(name, value) {
|
||
|
|
command_1.issueCommand('save-state', { name }, value);
|
||
|
|
}
|
||
|
|
exports.saveState = saveState;
|
||
|
|
/**
|
||
|
|
* Gets the value of an state set by this action's main execution.
|
||
|
|
*
|
||
|
|
* @param name name of the state to get
|
||
|
|
* @returns string
|
||
|
|
*/
|
||
|
|
function getState(name) {
|
||
|
|
return process.env[`STATE_${name}`] || '';
|
||
|
|
}
|
||
|
|
exports.getState = getState;
|
||
|
|
//# sourceMappingURL=core.js.map
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 472:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||
|
|
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||
|
|
var m = o[Symbol.asyncIterator], i;
|
||
|
|
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||
|
|
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||
|
|
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.getLatestSemVersionedTagFactory = void 0;
|
||
|
|
const listTags_1 = __webpack_require__(500);
|
||
|
|
const NpmModuleVersion_1 = __webpack_require__(395);
|
||
|
|
function getLatestSemVersionedTagFactory(params) {
|
||
|
|
const { octokit } = params;
|
||
|
|
function getLatestSemVersionedTag(params) {
|
||
|
|
var e_1, _a;
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
const { owner, repo } = params;
|
||
|
|
const semVersionedTags = [];
|
||
|
|
const { listTags } = listTags_1.listTagsFactory({ octokit });
|
||
|
|
try {
|
||
|
|
for (var _b = __asyncValues(listTags({ owner, repo })), _c; _c = yield _b.next(), !_c.done;) {
|
||
|
|
const tag = _c.value;
|
||
|
|
const match = tag.match(/^v?([0-9]+\.[0-9]+\.[0-9]+)$/);
|
||
|
|
if (!match) {
|
||
|
|
continue;
|
||
|
|
}
|
||
|
|
semVersionedTags.push({
|
||
|
|
tag,
|
||
|
|
"version": NpmModuleVersion_1.NpmModuleVersion.parse(match[1])
|
||
|
|
});
|
||
|
|
}
|
||
|
|
}
|
||
|
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||
|
|
finally {
|
||
|
|
try {
|
||
|
|
if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
|
||
|
|
}
|
||
|
|
finally { if (e_1) throw e_1.error; }
|
||
|
|
}
|
||
|
|
return semVersionedTags
|
||
|
|
.sort(({ version: vX }, { version: vY }) => NpmModuleVersion_1.NpmModuleVersion.compare(vY, vX))[0];
|
||
|
|
});
|
||
|
|
}
|
||
|
|
;
|
||
|
|
return { getLatestSemVersionedTag };
|
||
|
|
}
|
||
|
|
exports.getLatestSemVersionedTagFactory = getLatestSemVersionedTagFactory;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 489:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
const path = __webpack_require__(622);
|
||
|
|
const which = __webpack_require__(814);
|
||
|
|
const pathKey = __webpack_require__(39)();
|
||
|
|
|
||
|
|
function resolveCommandAttempt(parsed, withoutPathExt) {
|
||
|
|
const cwd = process.cwd();
|
||
|
|
const hasCustomCwd = parsed.options.cwd != null;
|
||
|
|
|
||
|
|
// If a custom `cwd` was specified, we need to change the process cwd
|
||
|
|
// because `which` will do stat calls but does not support a custom cwd
|
||
|
|
if (hasCustomCwd) {
|
||
|
|
try {
|
||
|
|
process.chdir(parsed.options.cwd);
|
||
|
|
} catch (err) {
|
||
|
|
/* Empty */
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
let resolved;
|
||
|
|
|
||
|
|
try {
|
||
|
|
resolved = which.sync(parsed.command, {
|
||
|
|
path: (parsed.options.env || process.env)[pathKey],
|
||
|
|
pathExt: withoutPathExt ? path.delimiter : undefined,
|
||
|
|
});
|
||
|
|
} catch (e) {
|
||
|
|
/* Empty */
|
||
|
|
} finally {
|
||
|
|
process.chdir(cwd);
|
||
|
|
}
|
||
|
|
|
||
|
|
// If we successfully resolved, ensure that an absolute path is returned
|
||
|
|
// Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it
|
||
|
|
if (resolved) {
|
||
|
|
resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved);
|
||
|
|
}
|
||
|
|
|
||
|
|
return resolved;
|
||
|
|
}
|
||
|
|
|
||
|
|
function resolveCommand(parsed) {
|
||
|
|
return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true);
|
||
|
|
}
|
||
|
|
|
||
|
|
module.exports = resolveCommand;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 500:
|
||
|
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
|
||
|
|
var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
|
||
|
|
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||
|
|
var g = generator.apply(thisArg, _arguments || []), i, q = [];
|
||
|
|
return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
|
||
|
|
function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
|
||
|
|
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
|
||
|
|
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
|
||
|
|
function fulfill(value) { resume("next", value); }
|
||
|
|
function reject(value) { resume("throw", value); }
|
||
|
|
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.listTagsFactory = void 0;
|
||
|
|
const per_page = 99;
|
||
|
|
function listTagsFactory(params) {
|
||
|
|
const { octokit } = params;
|
||
|
|
const octokit_repo_listTags = ((params) => __awaiter(this, void 0, void 0, function* () {
|
||
|
|
return octokit.repos.listTags(params);
|
||
|
|
}));
|
||
|
|
function listTags(params) {
|
||
|
|
return __asyncGenerator(this, arguments, function* listTags_1() {
|
||
|
|
const { owner, repo } = params;
|
||
|
|
let page = 1;
|
||
|
|
while (true) {
|
||
|
|
const resp = yield __await(octokit_repo_listTags({
|
||
|
|
owner,
|
||
|
|
repo,
|
||
|
|
per_page,
|
||
|
|
"page": page++
|
||
|
|
}));
|
||
|
|
for (const branch of resp.data.map(({ name }) => name)) {
|
||
|
|
yield yield __await(branch);
|
||
|
|
}
|
||
|
|
if (resp.data.length < 99) {
|
||
|
|
break;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
});
|
||
|
|
}
|
||
|
|
/** Returns the same "latest" tag as deno.land/x, not actually the latest though */
|
||
|
|
function getLatestTag(params) {
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
const { owner, repo } = params;
|
||
|
|
const itRes = yield listTags({ owner, repo }).next();
|
||
|
|
if (itRes.done) {
|
||
|
|
return undefined;
|
||
|
|
}
|
||
|
|
return itRes.value;
|
||
|
|
});
|
||
|
|
}
|
||
|
|
return { listTags, getLatestTag };
|
||
|
|
}
|
||
|
|
exports.listTagsFactory = listTagsFactory;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 503:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
|
|
if (k2 === undefined) k2 = k;
|
||
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
|
|
}) : (function(o, m, k, k2) {
|
||
|
|
if (k2 === undefined) k2 = k;
|
||
|
|
o[k2] = m[k];
|
||
|
|
}));
|
||
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
|
|
}) : function(o, v) {
|
||
|
|
o["default"] = v;
|
||
|
|
});
|
||
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
|
if (mod && mod.__esModule) return mod;
|
||
|
|
var result = {};
|
||
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
|
|
__setModuleDefault(result, mod);
|
||
|
|
return result;
|
||
|
|
};
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.gitCommit = void 0;
|
||
|
|
const st = __importStar(__webpack_require__(425));
|
||
|
|
function gitCommit(params) {
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
const { owner, repo, commitAuthorEmail, performChanges, github_token } = params;
|
||
|
|
yield st.exec(`git clone https://github.com/${owner}/${repo}`);
|
||
|
|
const cwd = process.cwd();
|
||
|
|
process.chdir(repo);
|
||
|
|
const changesResult = yield (() => __awaiter(this, void 0, void 0, function* () {
|
||
|
|
try {
|
||
|
|
return yield performChanges();
|
||
|
|
}
|
||
|
|
catch (error) {
|
||
|
|
return error;
|
||
|
|
}
|
||
|
|
}))();
|
||
|
|
if (!(changesResult instanceof Error) && changesResult.commit) {
|
||
|
|
yield st.exec(`git config --local user.email "${commitAuthorEmail}"`);
|
||
|
|
yield st.exec(`git config --local user.name "${commitAuthorEmail.split("@")[0]}"`);
|
||
|
|
if (changesResult.addAll) {
|
||
|
|
yield st.exec(`git add -A`);
|
||
|
|
}
|
||
|
|
yield st.exec(`git commit -am "${changesResult.message}"`);
|
||
|
|
yield st.exec(`git push "https://${owner}:${github_token}@github.com/${owner}/${repo}.git"`);
|
||
|
|
}
|
||
|
|
process.chdir(cwd);
|
||
|
|
yield st.exec(`rm -r ${repo}`);
|
||
|
|
if (changesResult instanceof Error) {
|
||
|
|
throw changesResult;
|
||
|
|
}
|
||
|
|
});
|
||
|
|
}
|
||
|
|
exports.gitCommit = gitCommit;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 510:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = addHook;
|
||
|
|
|
||
|
|
function addHook(state, kind, name, hook) {
|
||
|
|
var orig = hook;
|
||
|
|
if (!state.registry[name]) {
|
||
|
|
state.registry[name] = [];
|
||
|
|
}
|
||
|
|
|
||
|
|
if (kind === "before") {
|
||
|
|
hook = function (method, options) {
|
||
|
|
return Promise.resolve()
|
||
|
|
.then(orig.bind(null, options))
|
||
|
|
.then(method.bind(null, options));
|
||
|
|
};
|
||
|
|
}
|
||
|
|
|
||
|
|
if (kind === "after") {
|
||
|
|
hook = function (method, options) {
|
||
|
|
var result;
|
||
|
|
return Promise.resolve()
|
||
|
|
.then(method.bind(null, options))
|
||
|
|
.then(function (result_) {
|
||
|
|
result = result_;
|
||
|
|
return orig(result, options);
|
||
|
|
})
|
||
|
|
.then(function () {
|
||
|
|
return result;
|
||
|
|
});
|
||
|
|
};
|
||
|
|
}
|
||
|
|
|
||
|
|
if (kind === "error") {
|
||
|
|
hook = function (method, options) {
|
||
|
|
return Promise.resolve()
|
||
|
|
.then(method.bind(null, options))
|
||
|
|
.catch(function (error) {
|
||
|
|
return orig(error, options);
|
||
|
|
});
|
||
|
|
};
|
||
|
|
}
|
||
|
|
|
||
|
|
state.registry[name].push({
|
||
|
|
hook: hook,
|
||
|
|
orig: orig,
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 518:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.action = exports.setOutput = exports.getActionParams = void 0;
|
||
|
|
const outputHelper_1 = __webpack_require__(762);
|
||
|
|
const inputHelper_1 = __webpack_require__(649);
|
||
|
|
const createOctokit_1 = __webpack_require__(906);
|
||
|
|
exports.getActionParams = inputHelper_1.getActionParamsFactory({
|
||
|
|
"inputNameSubset": [
|
||
|
|
"owner",
|
||
|
|
"repo",
|
||
|
|
"should_webhook_be_enabled",
|
||
|
|
"github_token"
|
||
|
|
]
|
||
|
|
}).getActionParams;
|
||
|
|
exports.setOutput = outputHelper_1.setOutputFactory().setOutput;
|
||
|
|
function action(_actionName, params, core) {
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
const { owner, repo, should_webhook_be_enabled, github_token } = params;
|
||
|
|
const octokit = createOctokit_1.createOctokit({ github_token });
|
||
|
|
try {
|
||
|
|
yield octokit.repos.createWebhook({
|
||
|
|
owner,
|
||
|
|
repo,
|
||
|
|
"active": should_webhook_be_enabled === "true",
|
||
|
|
"events": ["create"],
|
||
|
|
"config": {
|
||
|
|
"url": `https://api.deno.land/webhook/gh/${repo}?subdir=deno_dist%252F`,
|
||
|
|
"content_type": "json"
|
||
|
|
}
|
||
|
|
});
|
||
|
|
}
|
||
|
|
catch (_a) {
|
||
|
|
return { "was_hook_created": "false" };
|
||
|
|
}
|
||
|
|
return { "was_hook_created": "true" };
|
||
|
|
});
|
||
|
|
}
|
||
|
|
exports.action = action;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 522:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
var scopedPackagePattern = new RegExp('^(?:@([^/]+?)[/])?([^/]+?)$')
|
||
|
|
var builtins = __webpack_require__(745)
|
||
|
|
var blacklist = [
|
||
|
|
'node_modules',
|
||
|
|
'favicon.ico'
|
||
|
|
]
|
||
|
|
|
||
|
|
var validate = module.exports = function (name) {
|
||
|
|
var warnings = []
|
||
|
|
var errors = []
|
||
|
|
|
||
|
|
if (name === null) {
|
||
|
|
errors.push('name cannot be null')
|
||
|
|
return done(warnings, errors)
|
||
|
|
}
|
||
|
|
|
||
|
|
if (name === undefined) {
|
||
|
|
errors.push('name cannot be undefined')
|
||
|
|
return done(warnings, errors)
|
||
|
|
}
|
||
|
|
|
||
|
|
if (typeof name !== 'string') {
|
||
|
|
errors.push('name must be a string')
|
||
|
|
return done(warnings, errors)
|
||
|
|
}
|
||
|
|
|
||
|
|
if (!name.length) {
|
||
|
|
errors.push('name length must be greater than zero')
|
||
|
|
}
|
||
|
|
|
||
|
|
if (name.match(/^\./)) {
|
||
|
|
errors.push('name cannot start with a period')
|
||
|
|
}
|
||
|
|
|
||
|
|
if (name.match(/^_/)) {
|
||
|
|
errors.push('name cannot start with an underscore')
|
||
|
|
}
|
||
|
|
|
||
|
|
if (name.trim() !== name) {
|
||
|
|
errors.push('name cannot contain leading or trailing spaces')
|
||
|
|
}
|
||
|
|
|
||
|
|
// No funny business
|
||
|
|
blacklist.forEach(function (blacklistedName) {
|
||
|
|
if (name.toLowerCase() === blacklistedName) {
|
||
|
|
errors.push(blacklistedName + ' is a blacklisted name')
|
||
|
|
}
|
||
|
|
})
|
||
|
|
|
||
|
|
// Generate warnings for stuff that used to be allowed
|
||
|
|
|
||
|
|
// core module names like http, events, util, etc
|
||
|
|
builtins.forEach(function (builtin) {
|
||
|
|
if (name.toLowerCase() === builtin) {
|
||
|
|
warnings.push(builtin + ' is a core module name')
|
||
|
|
}
|
||
|
|
})
|
||
|
|
|
||
|
|
// really-long-package-names-------------------------------such--length-----many---wow
|
||
|
|
// the thisisareallyreallylongpackagenameitshouldpublishdowenowhavealimittothelengthofpackagenames-poch.
|
||
|
|
if (name.length > 214) {
|
||
|
|
warnings.push('name can no longer contain more than 214 characters')
|
||
|
|
}
|
||
|
|
|
||
|
|
// mIxeD CaSe nAMEs
|
||
|
|
if (name.toLowerCase() !== name) {
|
||
|
|
warnings.push('name can no longer contain capital letters')
|
||
|
|
}
|
||
|
|
|
||
|
|
if (/[~'!()*]/.test(name.split('/').slice(-1)[0])) {
|
||
|
|
warnings.push('name can no longer contain special characters ("~\'!()*")')
|
||
|
|
}
|
||
|
|
|
||
|
|
if (encodeURIComponent(name) !== name) {
|
||
|
|
// Maybe it's a scoped package name, like @user/package
|
||
|
|
var nameMatch = name.match(scopedPackagePattern)
|
||
|
|
if (nameMatch) {
|
||
|
|
var user = nameMatch[1]
|
||
|
|
var pkg = nameMatch[2]
|
||
|
|
if (encodeURIComponent(user) === user && encodeURIComponent(pkg) === pkg) {
|
||
|
|
return done(warnings, errors)
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
errors.push('name can only contain URL-friendly characters')
|
||
|
|
}
|
||
|
|
|
||
|
|
return done(warnings, errors)
|
||
|
|
}
|
||
|
|
|
||
|
|
validate.scopedPackagePattern = scopedPackagePattern
|
||
|
|
|
||
|
|
var done = function (warnings, errors) {
|
||
|
|
var result = {
|
||
|
|
validForNewPackages: errors.length === 0 && warnings.length === 0,
|
||
|
|
validForOldPackages: errors.length === 0,
|
||
|
|
warnings: warnings,
|
||
|
|
errors: errors
|
||
|
|
}
|
||
|
|
if (!result.warnings.length) delete result.warnings
|
||
|
|
if (!result.errors.length) delete result.errors
|
||
|
|
return result
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 523:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
var register = __webpack_require__(363)
|
||
|
|
var addHook = __webpack_require__(510)
|
||
|
|
var removeHook = __webpack_require__(763)
|
||
|
|
|
||
|
|
// bind with array of arguments: https://stackoverflow.com/a/21792913
|
||
|
|
var bind = Function.bind
|
||
|
|
var bindable = bind.bind(bind)
|
||
|
|
|
||
|
|
function bindApi (hook, state, name) {
|
||
|
|
var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])
|
||
|
|
hook.api = { remove: removeHookRef }
|
||
|
|
hook.remove = removeHookRef
|
||
|
|
|
||
|
|
;['before', 'error', 'after', 'wrap'].forEach(function (kind) {
|
||
|
|
var args = name ? [state, kind, name] : [state, kind]
|
||
|
|
hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
function HookSingular () {
|
||
|
|
var singularHookName = 'h'
|
||
|
|
var singularHookState = {
|
||
|
|
registry: {}
|
||
|
|
}
|
||
|
|
var singularHook = register.bind(null, singularHookState, singularHookName)
|
||
|
|
bindApi(singularHook, singularHookState, singularHookName)
|
||
|
|
return singularHook
|
||
|
|
}
|
||
|
|
|
||
|
|
function HookCollection () {
|
||
|
|
var state = {
|
||
|
|
registry: {}
|
||
|
|
}
|
||
|
|
|
||
|
|
var hook = register.bind(null, state)
|
||
|
|
bindApi(hook, state)
|
||
|
|
|
||
|
|
return hook
|
||
|
|
}
|
||
|
|
|
||
|
|
var collectionHookDeprecationMessageDisplayed = false
|
||
|
|
function Hook () {
|
||
|
|
if (!collectionHookDeprecationMessageDisplayed) {
|
||
|
|
console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4')
|
||
|
|
collectionHookDeprecationMessageDisplayed = true
|
||
|
|
}
|
||
|
|
return HookCollection()
|
||
|
|
}
|
||
|
|
|
||
|
|
Hook.Singular = HookSingular.bind()
|
||
|
|
Hook.Collection = HookCollection.bind()
|
||
|
|
|
||
|
|
module.exports = Hook
|
||
|
|
// expose constructors as a named property for TypeScript
|
||
|
|
module.exports.Hook = Hook
|
||
|
|
module.exports.Singular = Hook.Singular
|
||
|
|
module.exports.Collection = Hook.Collection
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 562:
|
||
|
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
function getUserAgent() {
|
||
|
|
if (typeof navigator === "object" && "userAgent" in navigator) {
|
||
|
|
return navigator.userAgent;
|
||
|
|
}
|
||
|
|
|
||
|
|
if (typeof process === "object" && "version" in process) {
|
||
|
|
return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;
|
||
|
|
}
|
||
|
|
|
||
|
|
return "<environment undetectable>";
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.getUserAgent = getUserAgent;
|
||
|
|
//# sourceMappingURL=index.js.map
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 568:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
const path = __webpack_require__(622);
|
||
|
|
const niceTry = __webpack_require__(948);
|
||
|
|
const resolveCommand = __webpack_require__(489);
|
||
|
|
const escape = __webpack_require__(462);
|
||
|
|
const readShebang = __webpack_require__(389);
|
||
|
|
const semver = __webpack_require__(280);
|
||
|
|
|
||
|
|
const isWin = process.platform === 'win32';
|
||
|
|
const isExecutableRegExp = /\.(?:com|exe)$/i;
|
||
|
|
const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i;
|
||
|
|
|
||
|
|
// `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0
|
||
|
|
const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false;
|
||
|
|
|
||
|
|
function detectShebang(parsed) {
|
||
|
|
parsed.file = resolveCommand(parsed);
|
||
|
|
|
||
|
|
const shebang = parsed.file && readShebang(parsed.file);
|
||
|
|
|
||
|
|
if (shebang) {
|
||
|
|
parsed.args.unshift(parsed.file);
|
||
|
|
parsed.command = shebang;
|
||
|
|
|
||
|
|
return resolveCommand(parsed);
|
||
|
|
}
|
||
|
|
|
||
|
|
return parsed.file;
|
||
|
|
}
|
||
|
|
|
||
|
|
function parseNonShell(parsed) {
|
||
|
|
if (!isWin) {
|
||
|
|
return parsed;
|
||
|
|
}
|
||
|
|
|
||
|
|
// Detect & add support for shebangs
|
||
|
|
const commandFile = detectShebang(parsed);
|
||
|
|
|
||
|
|
// We don't need a shell if the command filename is an executable
|
||
|
|
const needsShell = !isExecutableRegExp.test(commandFile);
|
||
|
|
|
||
|
|
// If a shell is required, use cmd.exe and take care of escaping everything correctly
|
||
|
|
// Note that `forceShell` is an hidden option used only in tests
|
||
|
|
if (parsed.options.forceShell || needsShell) {
|
||
|
|
// Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/`
|
||
|
|
// The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument
|
||
|
|
// Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called,
|
||
|
|
// we need to double escape them
|
||
|
|
const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile);
|
||
|
|
|
||
|
|
// Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar)
|
||
|
|
// This is necessary otherwise it will always fail with ENOENT in those cases
|
||
|
|
parsed.command = path.normalize(parsed.command);
|
||
|
|
|
||
|
|
// Escape command & arguments
|
||
|
|
parsed.command = escape.command(parsed.command);
|
||
|
|
parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars));
|
||
|
|
|
||
|
|
const shellCommand = [parsed.command].concat(parsed.args).join(' ');
|
||
|
|
|
||
|
|
parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`];
|
||
|
|
parsed.command = process.env.comspec || 'cmd.exe';
|
||
|
|
parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped
|
||
|
|
}
|
||
|
|
|
||
|
|
return parsed;
|
||
|
|
}
|
||
|
|
|
||
|
|
function parseShell(parsed) {
|
||
|
|
// If node supports the shell option, there's no need to mimic its behavior
|
||
|
|
if (supportsShellOption) {
|
||
|
|
return parsed;
|
||
|
|
}
|
||
|
|
|
||
|
|
// Mimic node shell option
|
||
|
|
// See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335
|
||
|
|
const shellCommand = [parsed.command].concat(parsed.args).join(' ');
|
||
|
|
|
||
|
|
if (isWin) {
|
||
|
|
parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe';
|
||
|
|
parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`];
|
||
|
|
parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped
|
||
|
|
} else {
|
||
|
|
if (typeof parsed.options.shell === 'string') {
|
||
|
|
parsed.command = parsed.options.shell;
|
||
|
|
} else if (process.platform === 'android') {
|
||
|
|
parsed.command = '/system/bin/sh';
|
||
|
|
} else {
|
||
|
|
parsed.command = '/bin/sh';
|
||
|
|
}
|
||
|
|
|
||
|
|
parsed.args = ['-c', shellCommand];
|
||
|
|
}
|
||
|
|
|
||
|
|
return parsed;
|
||
|
|
}
|
||
|
|
|
||
|
|
function parse(command, args, options) {
|
||
|
|
// Normalize arguments, similar to nodejs
|
||
|
|
if (args && !Array.isArray(args)) {
|
||
|
|
options = args;
|
||
|
|
args = null;
|
||
|
|
}
|
||
|
|
|
||
|
|
args = args ? args.slice(0) : []; // Clone array to avoid changing the original
|
||
|
|
options = Object.assign({}, options); // Clone object to avoid changing the original
|
||
|
|
|
||
|
|
// Build our parsed object
|
||
|
|
const parsed = {
|
||
|
|
command,
|
||
|
|
args,
|
||
|
|
options,
|
||
|
|
file: undefined,
|
||
|
|
original: {
|
||
|
|
command,
|
||
|
|
args,
|
||
|
|
},
|
||
|
|
};
|
||
|
|
|
||
|
|
// Delegate further parsing to shell or non-shell
|
||
|
|
return options.shell ? parseShell(parsed) : parseNonShell(parsed);
|
||
|
|
}
|
||
|
|
|
||
|
|
module.exports = parse;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 599:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.action = exports.setOutput = exports.getActionParams = void 0;
|
||
|
|
const outputHelper_1 = __webpack_require__(762);
|
||
|
|
const inputHelper_1 = __webpack_require__(649);
|
||
|
|
exports.getActionParams = inputHelper_1.getActionParamsFactory({
|
||
|
|
"inputNameSubset": [
|
||
|
|
"input_string",
|
||
|
|
"search_value",
|
||
|
|
"replace_value"
|
||
|
|
]
|
||
|
|
}).getActionParams;
|
||
|
|
exports.setOutput = outputHelper_1.setOutputFactory().setOutput;
|
||
|
|
function action(_actionName, params, core) {
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
core.debug(JSON.stringify(params));
|
||
|
|
const { input_string, search_value, replace_value } = params;
|
||
|
|
return {
|
||
|
|
"replace_result": input_string.replace(new RegExp(search_value, "g"), replace_value)
|
||
|
|
};
|
||
|
|
});
|
||
|
|
}
|
||
|
|
exports.action = action;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 605:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = require("http");
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 614:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = require("events");
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 621:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
const path = __webpack_require__(622);
|
||
|
|
const pathKey = __webpack_require__(39);
|
||
|
|
|
||
|
|
module.exports = opts => {
|
||
|
|
opts = Object.assign({
|
||
|
|
cwd: process.cwd(),
|
||
|
|
path: process.env[pathKey()]
|
||
|
|
}, opts);
|
||
|
|
|
||
|
|
let prev;
|
||
|
|
let pth = path.resolve(opts.cwd);
|
||
|
|
const ret = [];
|
||
|
|
|
||
|
|
while (prev !== pth) {
|
||
|
|
ret.push(path.join(pth, 'node_modules/.bin'));
|
||
|
|
prev = pth;
|
||
|
|
pth = path.resolve(pth, '..');
|
||
|
|
}
|
||
|
|
|
||
|
|
// ensure the running `node` binary is used
|
||
|
|
ret.push(path.dirname(process.execPath));
|
||
|
|
|
||
|
|
return ret.concat(opts.path).join(path.delimiter);
|
||
|
|
};
|
||
|
|
|
||
|
|
module.exports.env = opts => {
|
||
|
|
opts = Object.assign({
|
||
|
|
env: process.env
|
||
|
|
}, opts);
|
||
|
|
|
||
|
|
const env = Object.assign({}, opts.env);
|
||
|
|
const path = pathKey({env});
|
||
|
|
|
||
|
|
opts.path = env[path];
|
||
|
|
env[path] = module.exports(opts);
|
||
|
|
|
||
|
|
return env;
|
||
|
|
};
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 622:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = require("path");
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 649:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
|
|
if (k2 === undefined) k2 = k;
|
||
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
|
|
}) : (function(o, m, k, k2) {
|
||
|
|
if (k2 === undefined) k2 = k;
|
||
|
|
o[k2] = m[k];
|
||
|
|
}));
|
||
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
|
|
}) : function(o, v) {
|
||
|
|
o["default"] = v;
|
||
|
|
});
|
||
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
|
if (mod && mod.__esModule) return mod;
|
||
|
|
var result = {};
|
||
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
|
|
__setModuleDefault(result, mod);
|
||
|
|
return result;
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.getActionName = exports.getActionParamsFactory = exports.getInputDefault = exports.getInputDescription = exports.availableActions = exports.inputNames = void 0;
|
||
|
|
const core = __importStar(__webpack_require__(470));
|
||
|
|
exports.inputNames = [
|
||
|
|
"action_name",
|
||
|
|
"owner",
|
||
|
|
"repo",
|
||
|
|
"event_type",
|
||
|
|
"client_payload_json",
|
||
|
|
"branch",
|
||
|
|
"exclude_commit_from_author_names_json",
|
||
|
|
"module_name",
|
||
|
|
"compare_to_version",
|
||
|
|
"input_string",
|
||
|
|
"search_value",
|
||
|
|
"replace_value",
|
||
|
|
"should_webhook_be_enabled",
|
||
|
|
"github_token"
|
||
|
|
];
|
||
|
|
exports.availableActions = [
|
||
|
|
"get_package_json_version",
|
||
|
|
"dispatch_event",
|
||
|
|
"update_changelog",
|
||
|
|
"sync_package_and_package_lock_version",
|
||
|
|
"setup_repo_webhook_for_deno_land_publishing",
|
||
|
|
"is_well_formed_and_available_module_name",
|
||
|
|
"string_replace",
|
||
|
|
"tell_if_project_uses_npm_or_yarn",
|
||
|
|
"is_package_json_version_upgraded"
|
||
|
|
];
|
||
|
|
function getInputDescription(inputName) {
|
||
|
|
switch (inputName) {
|
||
|
|
case "action_name": return [
|
||
|
|
`Action to run, one of: `,
|
||
|
|
exports.availableActions.map(s => `"${s}"`).join(", ")
|
||
|
|
].join("");
|
||
|
|
case "owner": return [
|
||
|
|
"Repository owner, example: 'garronej',",
|
||
|
|
"github.repository_owner"
|
||
|
|
].join("");
|
||
|
|
case "repo": return [
|
||
|
|
"Repository name, example: ",
|
||
|
|
"'evt', github.event.repository.name"
|
||
|
|
].join("");
|
||
|
|
case "event_type": return [
|
||
|
|
"see: https://developer.github.com/v3/",
|
||
|
|
"repos/#create-a-repository-dispatch-event"
|
||
|
|
].join("");
|
||
|
|
case "client_payload_json": return [
|
||
|
|
"Example '{\"p\":\"foo\"}' see: https://developer.github.com/v3/",
|
||
|
|
"repos/#create-a-repository-dispatch-event"
|
||
|
|
].join("");
|
||
|
|
case "branch": return "Example: default ( can also be a sha )";
|
||
|
|
case "exclude_commit_from_author_names_json": return [
|
||
|
|
"For update_changelog, do not includes commit from user ",
|
||
|
|
`certain committer in the CHANGELOG.md, ex: '["denoify_ci"]'`
|
||
|
|
].join("");
|
||
|
|
case "module_name": return [
|
||
|
|
`A candidate module name, Example: lodash`
|
||
|
|
].join("");
|
||
|
|
case "compare_to_version": return [
|
||
|
|
`For get_package_json_version, a version against which comparing the result`,
|
||
|
|
`if found version more recent than compare_to_version compare_result is 1`,
|
||
|
|
`if found version is equal to compare_to_version compare_result is 0`,
|
||
|
|
`if found version is older to compare_to_version compare_result -1`,
|
||
|
|
`Example: 0.1.3`
|
||
|
|
].join(" ");
|
||
|
|
case "input_string": return `For string_replace, the string to replace`;
|
||
|
|
case "search_value": return `For string_replace, Example '-' ( Will be used as arg for RegExp constructor )`;
|
||
|
|
case "replace_value": return `For string_replace, Example '_'`;
|
||
|
|
case "should_webhook_be_enabled": return `true|false, Should the create webhook be enabled, with setup_repo_webhook_for_deno_land_publishing`;
|
||
|
|
case "github_token": return "GitHub Personal access token";
|
||
|
|
}
|
||
|
|
}
|
||
|
|
exports.getInputDescription = getInputDescription;
|
||
|
|
function getInputDefault(inputName) {
|
||
|
|
switch (inputName) {
|
||
|
|
case "owner": return "${{github.repository_owner}}";
|
||
|
|
case "repo": return "${{github.event.repository.name}}";
|
||
|
|
case "branch": return "${{ github.sha }}";
|
||
|
|
case "github_token": return "${{ github.token }}";
|
||
|
|
case "exclude_commit_from_author_names_json": return '["actions"]';
|
||
|
|
case "should_webhook_be_enabled": return "true";
|
||
|
|
}
|
||
|
|
}
|
||
|
|
exports.getInputDefault = getInputDefault;
|
||
|
|
const getInput = (inputName) => {
|
||
|
|
if (exports.inputNames.indexOf(inputName) < 0) {
|
||
|
|
throw new Error(`${inputName} expected`);
|
||
|
|
}
|
||
|
|
return core.getInput(inputName);
|
||
|
|
};
|
||
|
|
function getActionParamsFactory(params) {
|
||
|
|
const { inputNameSubset } = params;
|
||
|
|
function getActionParams() {
|
||
|
|
const params = {};
|
||
|
|
inputNameSubset.forEach(inputName => params[inputName] = getInput(inputName));
|
||
|
|
return params;
|
||
|
|
}
|
||
|
|
;
|
||
|
|
return { getActionParams };
|
||
|
|
}
|
||
|
|
exports.getActionParamsFactory = getActionParamsFactory;
|
||
|
|
function getActionName() {
|
||
|
|
return getInput("action_name");
|
||
|
|
}
|
||
|
|
exports.getActionName = getActionName;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 654:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
// This is not the set of all possible signals.
|
||
|
|
//
|
||
|
|
// It IS, however, the set of all signals that trigger
|
||
|
|
// an exit on either Linux or BSD systems. Linux is a
|
||
|
|
// superset of the signal names supported on BSD, and
|
||
|
|
// the unknown signals just fail to register, so we can
|
||
|
|
// catch that easily enough.
|
||
|
|
//
|
||
|
|
// Don't bother with SIGKILL. It's uncatchable, which
|
||
|
|
// means that we can't fire any callbacks anyway.
|
||
|
|
//
|
||
|
|
// If a user does happen to register a handler on a non-
|
||
|
|
// fatal signal like SIGWINCH or something, and then
|
||
|
|
// exit, it'll end up firing `process.emit('exit')`, so
|
||
|
|
// the handler will be fired anyway.
|
||
|
|
//
|
||
|
|
// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised
|
||
|
|
// artificially, inherently leave the process in a
|
||
|
|
// state from which it is not safe to try and enter JS
|
||
|
|
// listeners.
|
||
|
|
module.exports = [
|
||
|
|
'SIGABRT',
|
||
|
|
'SIGALRM',
|
||
|
|
'SIGHUP',
|
||
|
|
'SIGINT',
|
||
|
|
'SIGTERM'
|
||
|
|
]
|
||
|
|
|
||
|
|
if (process.platform !== 'win32') {
|
||
|
|
module.exports.push(
|
||
|
|
'SIGVTALRM',
|
||
|
|
'SIGXCPU',
|
||
|
|
'SIGXFSZ',
|
||
|
|
'SIGUSR2',
|
||
|
|
'SIGTRAP',
|
||
|
|
'SIGSYS',
|
||
|
|
'SIGQUIT',
|
||
|
|
'SIGIOT'
|
||
|
|
// should detect profiler and enable/disable accordingly.
|
||
|
|
// see #21
|
||
|
|
// 'SIGPROF'
|
||
|
|
)
|
||
|
|
}
|
||
|
|
|
||
|
|
if (process.platform === 'linux') {
|
||
|
|
module.exports.push(
|
||
|
|
'SIGIO',
|
||
|
|
'SIGPOLL',
|
||
|
|
'SIGPWR',
|
||
|
|
'SIGSTKFLT',
|
||
|
|
'SIGUNUSED'
|
||
|
|
)
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 669:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = require("util");
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 683:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
(function (name, context, definition) {
|
||
|
|
if ( true && module.exports) module.exports = definition();
|
||
|
|
else if (typeof define === 'function' && define.amd) define(definition);
|
||
|
|
else context[name] = definition();
|
||
|
|
})('urljoin', this, function () {
|
||
|
|
|
||
|
|
function normalize (strArray) {
|
||
|
|
var resultArray = [];
|
||
|
|
if (strArray.length === 0) { return ''; }
|
||
|
|
|
||
|
|
if (typeof strArray[0] !== 'string') {
|
||
|
|
throw new TypeError('Url must be a string. Received ' + strArray[0]);
|
||
|
|
}
|
||
|
|
|
||
|
|
// If the first part is a plain protocol, we combine it with the next part.
|
||
|
|
if (strArray[0].match(/^[^/:]+:\/*$/) && strArray.length > 1) {
|
||
|
|
var first = strArray.shift();
|
||
|
|
strArray[0] = first + strArray[0];
|
||
|
|
}
|
||
|
|
|
||
|
|
// There must be two or three slashes in the file protocol, two slashes in anything else.
|
||
|
|
if (strArray[0].match(/^file:\/\/\//)) {
|
||
|
|
strArray[0] = strArray[0].replace(/^([^/:]+):\/*/, '$1:///');
|
||
|
|
} else {
|
||
|
|
strArray[0] = strArray[0].replace(/^([^/:]+):\/*/, '$1://');
|
||
|
|
}
|
||
|
|
|
||
|
|
for (var i = 0; i < strArray.length; i++) {
|
||
|
|
var component = strArray[i];
|
||
|
|
|
||
|
|
if (typeof component !== 'string') {
|
||
|
|
throw new TypeError('Url must be a string. Received ' + component);
|
||
|
|
}
|
||
|
|
|
||
|
|
if (component === '') { continue; }
|
||
|
|
|
||
|
|
if (i > 0) {
|
||
|
|
// Removing the starting slashes for each component but the first.
|
||
|
|
component = component.replace(/^[\/]+/, '');
|
||
|
|
}
|
||
|
|
if (i < strArray.length - 1) {
|
||
|
|
// Removing the ending slashes for each component but the last.
|
||
|
|
component = component.replace(/[\/]+$/, '');
|
||
|
|
} else {
|
||
|
|
// For the last component we will combine multiple slashes to a single one.
|
||
|
|
component = component.replace(/[\/]+$/, '/');
|
||
|
|
}
|
||
|
|
|
||
|
|
resultArray.push(component);
|
||
|
|
|
||
|
|
}
|
||
|
|
|
||
|
|
var str = resultArray.join('/');
|
||
|
|
// Each input component is now separated by a single slash except the possible first plain protocol part.
|
||
|
|
|
||
|
|
// remove trailing slash before parameters or hash
|
||
|
|
str = str.replace(/\/(\?|&|#[^!])/g, '$1');
|
||
|
|
|
||
|
|
// replace ? in parameters with &
|
||
|
|
var parts = str.split('?');
|
||
|
|
str = parts.shift() + (parts.length > 0 ? '?': '') + parts.join('&');
|
||
|
|
|
||
|
|
return str;
|
||
|
|
}
|
||
|
|
|
||
|
|
return function () {
|
||
|
|
var input;
|
||
|
|
|
||
|
|
if (typeof arguments[0] === 'object') {
|
||
|
|
input = arguments[0];
|
||
|
|
} else {
|
||
|
|
input = [].slice.call(arguments);
|
||
|
|
}
|
||
|
|
|
||
|
|
return normalize(input);
|
||
|
|
};
|
||
|
|
|
||
|
|
});
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 692:
|
||
|
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
class Deprecation extends Error {
|
||
|
|
constructor(message) {
|
||
|
|
super(message); // Maintains proper stack trace (only available on V8)
|
||
|
|
|
||
|
|
/* istanbul ignore next */
|
||
|
|
|
||
|
|
if (Error.captureStackTrace) {
|
||
|
|
Error.captureStackTrace(this, this.constructor);
|
||
|
|
}
|
||
|
|
|
||
|
|
this.name = 'Deprecation';
|
||
|
|
}
|
||
|
|
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.Deprecation = Deprecation;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 697:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
module.exports = (promise, onFinally) => {
|
||
|
|
onFinally = onFinally || (() => {});
|
||
|
|
|
||
|
|
return promise.then(
|
||
|
|
val => new Promise(resolve => {
|
||
|
|
resolve(onFinally());
|
||
|
|
}).then(() => val),
|
||
|
|
err => new Promise(resolve => {
|
||
|
|
resolve(onFinally());
|
||
|
|
}).then(() => {
|
||
|
|
throw err;
|
||
|
|
})
|
||
|
|
);
|
||
|
|
};
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 702:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
|
|
if (k2 === undefined) k2 = k;
|
||
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
|
|
}) : (function(o, m, k, k2) {
|
||
|
|
if (k2 === undefined) k2 = k;
|
||
|
|
o[k2] = m[k];
|
||
|
|
}));
|
||
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
|
|
}) : function(o, v) {
|
||
|
|
o["default"] = v;
|
||
|
|
});
|
||
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
|
if (mod && mod.__esModule) return mod;
|
||
|
|
var result = {};
|
||
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
|
|
__setModuleDefault(result, mod);
|
||
|
|
return result;
|
||
|
|
};
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.action = exports.getActionParams = void 0;
|
||
|
|
const inputHelper_1 = __webpack_require__(649);
|
||
|
|
const st = __importStar(__webpack_require__(425));
|
||
|
|
const getCommitAhead_1 = __webpack_require__(438);
|
||
|
|
const get_package_json_version = __importStar(__webpack_require__(43));
|
||
|
|
const fs = __importStar(__webpack_require__(747));
|
||
|
|
const NpmModuleVersion_1 = __webpack_require__(395);
|
||
|
|
const gitCommit_1 = __webpack_require__(503);
|
||
|
|
const getLatestSemVersionedTag_1 = __webpack_require__(472);
|
||
|
|
const createOctokit_1 = __webpack_require__(906);
|
||
|
|
exports.getActionParams = inputHelper_1.getActionParamsFactory({
|
||
|
|
"inputNameSubset": [
|
||
|
|
"owner",
|
||
|
|
"repo",
|
||
|
|
"branch",
|
||
|
|
"exclude_commit_from_author_names_json",
|
||
|
|
"github_token"
|
||
|
|
]
|
||
|
|
}).getActionParams;
|
||
|
|
function action(_actionName, params, core) {
|
||
|
|
var _a;
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
const { owner, repo, github_token } = params;
|
||
|
|
const branch = params.branch.split("/").reverse()[0];
|
||
|
|
core.debug(`params: ${JSON.stringify(params)}`);
|
||
|
|
const exclude_commit_from_author_names = JSON.parse(params.exclude_commit_from_author_names_json);
|
||
|
|
const octokit = createOctokit_1.createOctokit({ github_token });
|
||
|
|
const { getCommitAhead } = getCommitAhead_1.getCommitAheadFactory({ octokit });
|
||
|
|
const { getLatestSemVersionedTag } = getLatestSemVersionedTag_1.getLatestSemVersionedTagFactory({ octokit });
|
||
|
|
const { tag: branchBehind } = (_a = (yield getLatestSemVersionedTag({ owner, repo }))) !== null && _a !== void 0 ? _a : {};
|
||
|
|
if (branchBehind === undefined) {
|
||
|
|
core.warning(`It's the first release, not editing the CHANGELOG.md`);
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
const { commits } = yield getCommitAhead({
|
||
|
|
owner,
|
||
|
|
repo,
|
||
|
|
branchBehind,
|
||
|
|
"branchAhead": branch
|
||
|
|
}).catch(() => ({ "commits": undefined }));
|
||
|
|
if (commits === undefined) {
|
||
|
|
core.warning(`${branchBehind} probably does not exist`);
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
const [branchBehindVersion, branchAheadVersion] = yield Promise.all([branchBehind, branch]
|
||
|
|
.map(branch => get_package_json_version.action("get_package_json_version", {
|
||
|
|
owner,
|
||
|
|
repo,
|
||
|
|
branch,
|
||
|
|
"compare_to_version": "0.0.0"
|
||
|
|
}, core).then(({ version }) => version)));
|
||
|
|
const bumpType = NpmModuleVersion_1.NpmModuleVersion.bumpType({
|
||
|
|
"versionAheadStr": branchAheadVersion,
|
||
|
|
"versionBehindStr": branchBehindVersion || "0.0.0"
|
||
|
|
});
|
||
|
|
if (bumpType === "SAME") {
|
||
|
|
core.warning(`Version on ${branch} and ${branchBehind} are the same, not editing CHANGELOG.md`);
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
yield gitCommit_1.gitCommit({
|
||
|
|
owner,
|
||
|
|
repo,
|
||
|
|
github_token,
|
||
|
|
"commitAuthorEmail": "actions@github.com",
|
||
|
|
"performChanges": () => __awaiter(this, void 0, void 0, function* () {
|
||
|
|
yield st.exec(`git checkout ${branch}`);
|
||
|
|
const { changelogRaw } = updateChangelog({
|
||
|
|
"changelogRaw": fs.existsSync("CHANGELOG.md") ?
|
||
|
|
fs.readFileSync("CHANGELOG.md")
|
||
|
|
.toString("utf8")
|
||
|
|
: "",
|
||
|
|
"version": branchAheadVersion,
|
||
|
|
bumpType,
|
||
|
|
"body": commits
|
||
|
|
.reverse()
|
||
|
|
.filter(({ commit }) => !exclude_commit_from_author_names.includes(commit.author.name))
|
||
|
|
.map(({ commit }) => commit.message)
|
||
|
|
.filter(message => !/changelog/i.test(message))
|
||
|
|
.filter(message => !/^Merge branch /.test(message))
|
||
|
|
.filter(message => !/^GitBook: /.test(message))
|
||
|
|
.map(message => `- ${message} `)
|
||
|
|
.join("\n")
|
||
|
|
});
|
||
|
|
core.debug(`CHANGELOG.md: ${changelogRaw}`);
|
||
|
|
fs.writeFileSync("CHANGELOG.md", Buffer.from(changelogRaw, "utf8"));
|
||
|
|
return {
|
||
|
|
"commit": true,
|
||
|
|
"addAll": true,
|
||
|
|
"message": `Update changelog v${branchAheadVersion}`
|
||
|
|
};
|
||
|
|
})
|
||
|
|
});
|
||
|
|
});
|
||
|
|
}
|
||
|
|
exports.action = action;
|
||
|
|
function updateChangelog(params) {
|
||
|
|
const { body, version, bumpType } = params;
|
||
|
|
const dateString = (() => {
|
||
|
|
const now = new Date();
|
||
|
|
return new Date(now.getTime() - (now.getTimezoneOffset() * 60000))
|
||
|
|
.toISOString()
|
||
|
|
.split("T")[0];
|
||
|
|
})();
|
||
|
|
const changelogRaw = [
|
||
|
|
`${bumpType === "MAJOR" ? "#" : (bumpType === "MINOR" ? "##" : "###")}`,
|
||
|
|
` **${version}** (${dateString}) \n \n`,
|
||
|
|
`${body} \n \n`,
|
||
|
|
params.changelogRaw
|
||
|
|
].join("");
|
||
|
|
return { changelogRaw };
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 709:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.getCommonOriginFactory = void 0;
|
||
|
|
const getCommitAsyncIterable_1 = __webpack_require__(32);
|
||
|
|
/** Return the sha of the first common commit between two branches */
|
||
|
|
function getCommonOriginFactory(params) {
|
||
|
|
const { octokit } = params;
|
||
|
|
const { getCommitAsyncIterable } = getCommitAsyncIterable_1.getCommitAsyncIterableFactory({ octokit });
|
||
|
|
function getCommonOrigin(params) {
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
const { owner, repo, branch1, branch2 } = params;
|
||
|
|
const [commitAsyncIterable1, commitAsyncIterable2] = [branch1, branch2]
|
||
|
|
.map(branch => getCommitAsyncIterable({ owner, repo, branch }));
|
||
|
|
let shas1 = [];
|
||
|
|
let shas2 = [];
|
||
|
|
while (true) {
|
||
|
|
const [itRes1, itRes2] = yield Promise.all([commitAsyncIterable1, commitAsyncIterable2]
|
||
|
|
.map(commitAsyncIterable => commitAsyncIterable[Symbol.asyncIterator]()
|
||
|
|
.next()));
|
||
|
|
let sha1 = undefined;
|
||
|
|
if (!itRes1.done) {
|
||
|
|
sha1 = itRes1.value.sha;
|
||
|
|
shas1.push(sha1);
|
||
|
|
}
|
||
|
|
let sha2 = undefined;
|
||
|
|
if (!itRes2.done) {
|
||
|
|
sha2 = itRes2.value.sha;
|
||
|
|
shas2.push(sha2);
|
||
|
|
}
|
||
|
|
if (!!sha1 && shas2.includes(sha1)) {
|
||
|
|
return { "sha": sha1 };
|
||
|
|
}
|
||
|
|
if (!!sha2 && shas1.includes(sha2)) {
|
||
|
|
return { "sha": sha2 };
|
||
|
|
}
|
||
|
|
if (itRes1.done && itRes2.done) {
|
||
|
|
throw new Error("No common origin");
|
||
|
|
}
|
||
|
|
}
|
||
|
|
});
|
||
|
|
}
|
||
|
|
return { getCommonOrigin };
|
||
|
|
}
|
||
|
|
exports.getCommonOriginFactory = getCommonOriginFactory;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 742:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
var fs = __webpack_require__(747)
|
||
|
|
var core
|
||
|
|
if (process.platform === 'win32' || global.TESTING_WINDOWS) {
|
||
|
|
core = __webpack_require__(818)
|
||
|
|
} else {
|
||
|
|
core = __webpack_require__(197)
|
||
|
|
}
|
||
|
|
|
||
|
|
module.exports = isexe
|
||
|
|
isexe.sync = sync
|
||
|
|
|
||
|
|
function isexe (path, options, cb) {
|
||
|
|
if (typeof options === 'function') {
|
||
|
|
cb = options
|
||
|
|
options = {}
|
||
|
|
}
|
||
|
|
|
||
|
|
if (!cb) {
|
||
|
|
if (typeof Promise !== 'function') {
|
||
|
|
throw new TypeError('callback not provided')
|
||
|
|
}
|
||
|
|
|
||
|
|
return new Promise(function (resolve, reject) {
|
||
|
|
isexe(path, options || {}, function (er, is) {
|
||
|
|
if (er) {
|
||
|
|
reject(er)
|
||
|
|
} else {
|
||
|
|
resolve(is)
|
||
|
|
}
|
||
|
|
})
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
core(path, options || {}, function (er, is) {
|
||
|
|
// ignore EACCES because that just means we aren't allowed to run it
|
||
|
|
if (er) {
|
||
|
|
if (er.code === 'EACCES' || options && options.ignoreErrors) {
|
||
|
|
er = null
|
||
|
|
is = false
|
||
|
|
}
|
||
|
|
}
|
||
|
|
cb(er, is)
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
function sync (path, options) {
|
||
|
|
// my kingdom for a filtered catch
|
||
|
|
try {
|
||
|
|
return core.sync(path, options || {})
|
||
|
|
} catch (er) {
|
||
|
|
if (options && options.ignoreErrors || er.code === 'EACCES') {
|
||
|
|
return false
|
||
|
|
} else {
|
||
|
|
throw er
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 745:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = ["assert","buffer","child_process","cluster","console","constants","crypto","dgram","dns","domain","events","fs","http","https","module","net","os","path","process","punycode","querystring","readline","repl","stream","string_decoder","timers","tls","tty","url","util","v8","vm","zlib"];
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 747:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = require("fs");
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 753:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
||
|
|
|
||
|
|
var endpoint = __webpack_require__(385);
|
||
|
|
var universalUserAgent = __webpack_require__(392);
|
||
|
|
var isPlainObject = __webpack_require__(356);
|
||
|
|
var nodeFetch = _interopDefault(__webpack_require__(454));
|
||
|
|
var requestError = __webpack_require__(463);
|
||
|
|
|
||
|
|
const VERSION = "5.4.14";
|
||
|
|
|
||
|
|
function getBufferResponse(response) {
|
||
|
|
return response.arrayBuffer();
|
||
|
|
}
|
||
|
|
|
||
|
|
function fetchWrapper(requestOptions) {
|
||
|
|
if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
|
||
|
|
requestOptions.body = JSON.stringify(requestOptions.body);
|
||
|
|
}
|
||
|
|
|
||
|
|
let headers = {};
|
||
|
|
let status;
|
||
|
|
let url;
|
||
|
|
const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
|
||
|
|
return fetch(requestOptions.url, Object.assign({
|
||
|
|
method: requestOptions.method,
|
||
|
|
body: requestOptions.body,
|
||
|
|
headers: requestOptions.headers,
|
||
|
|
redirect: requestOptions.redirect
|
||
|
|
}, requestOptions.request)).then(response => {
|
||
|
|
url = response.url;
|
||
|
|
status = response.status;
|
||
|
|
|
||
|
|
for (const keyAndValue of response.headers) {
|
||
|
|
headers[keyAndValue[0]] = keyAndValue[1];
|
||
|
|
}
|
||
|
|
|
||
|
|
if (status === 204 || status === 205) {
|
||
|
|
return;
|
||
|
|
} // GitHub API returns 200 for HEAD requests
|
||
|
|
|
||
|
|
|
||
|
|
if (requestOptions.method === "HEAD") {
|
||
|
|
if (status < 400) {
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
throw new requestError.RequestError(response.statusText, status, {
|
||
|
|
headers,
|
||
|
|
request: requestOptions
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
if (status === 304) {
|
||
|
|
throw new requestError.RequestError("Not modified", status, {
|
||
|
|
headers,
|
||
|
|
request: requestOptions
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
if (status >= 400) {
|
||
|
|
return response.text().then(message => {
|
||
|
|
const error = new requestError.RequestError(message, status, {
|
||
|
|
headers,
|
||
|
|
request: requestOptions
|
||
|
|
});
|
||
|
|
|
||
|
|
try {
|
||
|
|
let responseBody = JSON.parse(error.message);
|
||
|
|
Object.assign(error, responseBody);
|
||
|
|
let errors = responseBody.errors; // Assumption `errors` would always be in Array format
|
||
|
|
|
||
|
|
error.message = error.message + ": " + errors.map(JSON.stringify).join(", ");
|
||
|
|
} catch (e) {// ignore, see octokit/rest.js#684
|
||
|
|
}
|
||
|
|
|
||
|
|
throw error;
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
const contentType = response.headers.get("content-type");
|
||
|
|
|
||
|
|
if (/application\/json/.test(contentType)) {
|
||
|
|
return response.json();
|
||
|
|
}
|
||
|
|
|
||
|
|
if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
|
||
|
|
return response.text();
|
||
|
|
}
|
||
|
|
|
||
|
|
return getBufferResponse(response);
|
||
|
|
}).then(data => {
|
||
|
|
return {
|
||
|
|
status,
|
||
|
|
url,
|
||
|
|
headers,
|
||
|
|
data
|
||
|
|
};
|
||
|
|
}).catch(error => {
|
||
|
|
if (error instanceof requestError.RequestError) {
|
||
|
|
throw error;
|
||
|
|
}
|
||
|
|
|
||
|
|
throw new requestError.RequestError(error.message, 500, {
|
||
|
|
headers,
|
||
|
|
request: requestOptions
|
||
|
|
});
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
function withDefaults(oldEndpoint, newDefaults) {
|
||
|
|
const endpoint = oldEndpoint.defaults(newDefaults);
|
||
|
|
|
||
|
|
const newApi = function (route, parameters) {
|
||
|
|
const endpointOptions = endpoint.merge(route, parameters);
|
||
|
|
|
||
|
|
if (!endpointOptions.request || !endpointOptions.request.hook) {
|
||
|
|
return fetchWrapper(endpoint.parse(endpointOptions));
|
||
|
|
}
|
||
|
|
|
||
|
|
const request = (route, parameters) => {
|
||
|
|
return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
|
||
|
|
};
|
||
|
|
|
||
|
|
Object.assign(request, {
|
||
|
|
endpoint,
|
||
|
|
defaults: withDefaults.bind(null, endpoint)
|
||
|
|
});
|
||
|
|
return endpointOptions.request.hook(request, endpointOptions);
|
||
|
|
};
|
||
|
|
|
||
|
|
return Object.assign(newApi, {
|
||
|
|
endpoint,
|
||
|
|
defaults: withDefaults.bind(null, endpoint)
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
const request = withDefaults(endpoint.endpoint, {
|
||
|
|
headers: {
|
||
|
|
"user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
|
||
|
|
}
|
||
|
|
});
|
||
|
|
|
||
|
|
exports.request = request;
|
||
|
|
//# sourceMappingURL=index.js.map
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 761:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = require("zlib");
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 762:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
|
|
if (k2 === undefined) k2 = k;
|
||
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
|
|
}) : (function(o, m, k, k2) {
|
||
|
|
if (k2 === undefined) k2 = k;
|
||
|
|
o[k2] = m[k];
|
||
|
|
}));
|
||
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
|
|
}) : function(o, v) {
|
||
|
|
o["default"] = v;
|
||
|
|
});
|
||
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
|
if (mod && mod.__esModule) return mod;
|
||
|
|
var result = {};
|
||
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
|
|
__setModuleDefault(result, mod);
|
||
|
|
return result;
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.setOutputFactory = exports.getOutputDescription = exports.outputNames = void 0;
|
||
|
|
const core = __importStar(__webpack_require__(470));
|
||
|
|
const objectKeys_1 = __webpack_require__(40);
|
||
|
|
exports.outputNames = [
|
||
|
|
"version",
|
||
|
|
"is_valid_node_module_name",
|
||
|
|
"is_valid_deno_module_name",
|
||
|
|
"is_available_on_npm",
|
||
|
|
"is_available_on_deno_land",
|
||
|
|
"was_already_published",
|
||
|
|
"compare_result",
|
||
|
|
"replace_result",
|
||
|
|
"was_hook_created",
|
||
|
|
"npm_or_yarn",
|
||
|
|
"from_version",
|
||
|
|
"to_version",
|
||
|
|
"is_upgraded_version"
|
||
|
|
];
|
||
|
|
function getOutputDescription(inputName) {
|
||
|
|
switch (inputName) {
|
||
|
|
case "version": return "Output of get_package_json_version";
|
||
|
|
case "is_valid_node_module_name": return "true|false";
|
||
|
|
case "is_valid_deno_module_name": return "true|false";
|
||
|
|
case "is_available_on_npm": return "true|false";
|
||
|
|
case "is_available_on_deno_land": return "true|false";
|
||
|
|
case "was_already_published": return "true|false";
|
||
|
|
case "compare_result": return "1|0|-1";
|
||
|
|
case "replace_result": return "Output of string_replace";
|
||
|
|
case "was_hook_created": return "true|false";
|
||
|
|
case "npm_or_yarn": return "npm|yarn";
|
||
|
|
case "from_version": return "Output of is_package_json_version_upgraded, string";
|
||
|
|
case "to_version": return "Output of is_package_json_version_upgraded, string";
|
||
|
|
case "is_upgraded_version": return "Output of is_package_json_version_upgraded, true|false";
|
||
|
|
}
|
||
|
|
}
|
||
|
|
exports.getOutputDescription = getOutputDescription;
|
||
|
|
function setOutputFactory() {
|
||
|
|
function setOutput(outputs) {
|
||
|
|
objectKeys_1.objectKeys(outputs)
|
||
|
|
.forEach(outputName => core.setOutput(outputName, outputs[outputName]));
|
||
|
|
}
|
||
|
|
;
|
||
|
|
return { setOutput };
|
||
|
|
}
|
||
|
|
exports.setOutputFactory = setOutputFactory;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 763:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = removeHook;
|
||
|
|
|
||
|
|
function removeHook(state, name, method) {
|
||
|
|
if (!state.registry[name]) {
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
var index = state.registry[name]
|
||
|
|
.map(function (registered) {
|
||
|
|
return registered.orig;
|
||
|
|
})
|
||
|
|
.indexOf(method);
|
||
|
|
|
||
|
|
if (index === -1) {
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
state.registry[name].splice(index, 1);
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 768:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
module.exports = function (x) {
|
||
|
|
var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt();
|
||
|
|
var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt();
|
||
|
|
|
||
|
|
if (x[x.length - 1] === lf) {
|
||
|
|
x = x.slice(0, x.length - 1);
|
||
|
|
}
|
||
|
|
|
||
|
|
if (x[x.length - 1] === cr) {
|
||
|
|
x = x.slice(0, x.length - 1);
|
||
|
|
}
|
||
|
|
|
||
|
|
return x;
|
||
|
|
};
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 794:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.action = exports.setOutput = exports.getActionParams = void 0;
|
||
|
|
const outputHelper_1 = __webpack_require__(762);
|
||
|
|
const inputHelper_1 = __webpack_require__(649);
|
||
|
|
const is404_1 = __webpack_require__(854);
|
||
|
|
const validate_npm_package_name_1 = __importDefault(__webpack_require__(522));
|
||
|
|
exports.getActionParams = inputHelper_1.getActionParamsFactory({
|
||
|
|
"inputNameSubset": [
|
||
|
|
"module_name"
|
||
|
|
]
|
||
|
|
}).getActionParams;
|
||
|
|
exports.setOutput = outputHelper_1.setOutputFactory().setOutput;
|
||
|
|
function action(_actionName, params, core) {
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
const { module_name } = params;
|
||
|
|
const { validForNewPackages } = validate_npm_package_name_1.default(module_name);
|
||
|
|
const validForDenoPackages = validForNewPackages && module_name.indexOf("-") < 0;
|
||
|
|
return {
|
||
|
|
"is_valid_node_module_name": validForNewPackages ? "true" : "false",
|
||
|
|
"is_available_on_npm": !validForNewPackages ?
|
||
|
|
"false"
|
||
|
|
:
|
||
|
|
(yield is404_1.is404(`https://www.npmjs.com/package/${module_name}`)) ?
|
||
|
|
"true" : "false",
|
||
|
|
"is_valid_deno_module_name": validForDenoPackages ? "true" : "false",
|
||
|
|
"is_available_on_deno_land": !validForDenoPackages ?
|
||
|
|
"false"
|
||
|
|
:
|
||
|
|
(yield is404_1.is404(`https://deno.land/x/${module_name}/`)) ?
|
||
|
|
"true" : "false"
|
||
|
|
};
|
||
|
|
});
|
||
|
|
}
|
||
|
|
exports.action = action;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 796:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
||
|
|
|
||
|
|
var osName = _interopDefault(__webpack_require__(2));
|
||
|
|
|
||
|
|
function getUserAgent() {
|
||
|
|
try {
|
||
|
|
return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`;
|
||
|
|
} catch (error) {
|
||
|
|
if (/wmic os get Caption/.test(error.message)) {
|
||
|
|
return "Windows <version undetectable>";
|
||
|
|
}
|
||
|
|
|
||
|
|
return "<environment undetectable>";
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.getUserAgent = getUserAgent;
|
||
|
|
//# sourceMappingURL=index.js.map
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 813:
|
||
|
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
async function auth(token) {
|
||
|
|
const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth";
|
||
|
|
return {
|
||
|
|
type: "token",
|
||
|
|
token: token,
|
||
|
|
tokenType
|
||
|
|
};
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Prefix token for usage in the Authorization header
|
||
|
|
*
|
||
|
|
* @param token OAuth token or JSON Web Token
|
||
|
|
*/
|
||
|
|
function withAuthorizationPrefix(token) {
|
||
|
|
if (token.split(/\./).length === 3) {
|
||
|
|
return `bearer ${token}`;
|
||
|
|
}
|
||
|
|
|
||
|
|
return `token ${token}`;
|
||
|
|
}
|
||
|
|
|
||
|
|
async function hook(token, request, route, parameters) {
|
||
|
|
const endpoint = request.endpoint.merge(route, parameters);
|
||
|
|
endpoint.headers.authorization = withAuthorizationPrefix(token);
|
||
|
|
return request(endpoint);
|
||
|
|
}
|
||
|
|
|
||
|
|
const createTokenAuth = function createTokenAuth(token) {
|
||
|
|
if (!token) {
|
||
|
|
throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
|
||
|
|
}
|
||
|
|
|
||
|
|
if (typeof token !== "string") {
|
||
|
|
throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
|
||
|
|
}
|
||
|
|
|
||
|
|
token = token.replace(/^(token|bearer) +/i, "");
|
||
|
|
return Object.assign(auth.bind(null, token), {
|
||
|
|
hook: hook.bind(null, token)
|
||
|
|
});
|
||
|
|
};
|
||
|
|
|
||
|
|
exports.createTokenAuth = createTokenAuth;
|
||
|
|
//# sourceMappingURL=index.js.map
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 814:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
module.exports = which
|
||
|
|
which.sync = whichSync
|
||
|
|
|
||
|
|
var isWindows = process.platform === 'win32' ||
|
||
|
|
process.env.OSTYPE === 'cygwin' ||
|
||
|
|
process.env.OSTYPE === 'msys'
|
||
|
|
|
||
|
|
var path = __webpack_require__(622)
|
||
|
|
var COLON = isWindows ? ';' : ':'
|
||
|
|
var isexe = __webpack_require__(742)
|
||
|
|
|
||
|
|
function getNotFoundError (cmd) {
|
||
|
|
var er = new Error('not found: ' + cmd)
|
||
|
|
er.code = 'ENOENT'
|
||
|
|
|
||
|
|
return er
|
||
|
|
}
|
||
|
|
|
||
|
|
function getPathInfo (cmd, opt) {
|
||
|
|
var colon = opt.colon || COLON
|
||
|
|
var pathEnv = opt.path || process.env.PATH || ''
|
||
|
|
var pathExt = ['']
|
||
|
|
|
||
|
|
pathEnv = pathEnv.split(colon)
|
||
|
|
|
||
|
|
var pathExtExe = ''
|
||
|
|
if (isWindows) {
|
||
|
|
pathEnv.unshift(process.cwd())
|
||
|
|
pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM')
|
||
|
|
pathExt = pathExtExe.split(colon)
|
||
|
|
|
||
|
|
|
||
|
|
// Always test the cmd itself first. isexe will check to make sure
|
||
|
|
// it's found in the pathExt set.
|
||
|
|
if (cmd.indexOf('.') !== -1 && pathExt[0] !== '')
|
||
|
|
pathExt.unshift('')
|
||
|
|
}
|
||
|
|
|
||
|
|
// If it has a slash, then we don't bother searching the pathenv.
|
||
|
|
// just check the file itself, and that's it.
|
||
|
|
if (cmd.match(/\//) || isWindows && cmd.match(/\\/))
|
||
|
|
pathEnv = ['']
|
||
|
|
|
||
|
|
return {
|
||
|
|
env: pathEnv,
|
||
|
|
ext: pathExt,
|
||
|
|
extExe: pathExtExe
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
function which (cmd, opt, cb) {
|
||
|
|
if (typeof opt === 'function') {
|
||
|
|
cb = opt
|
||
|
|
opt = {}
|
||
|
|
}
|
||
|
|
|
||
|
|
var info = getPathInfo(cmd, opt)
|
||
|
|
var pathEnv = info.env
|
||
|
|
var pathExt = info.ext
|
||
|
|
var pathExtExe = info.extExe
|
||
|
|
var found = []
|
||
|
|
|
||
|
|
;(function F (i, l) {
|
||
|
|
if (i === l) {
|
||
|
|
if (opt.all && found.length)
|
||
|
|
return cb(null, found)
|
||
|
|
else
|
||
|
|
return cb(getNotFoundError(cmd))
|
||
|
|
}
|
||
|
|
|
||
|
|
var pathPart = pathEnv[i]
|
||
|
|
if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"')
|
||
|
|
pathPart = pathPart.slice(1, -1)
|
||
|
|
|
||
|
|
var p = path.join(pathPart, cmd)
|
||
|
|
if (!pathPart && (/^\.[\\\/]/).test(cmd)) {
|
||
|
|
p = cmd.slice(0, 2) + p
|
||
|
|
}
|
||
|
|
;(function E (ii, ll) {
|
||
|
|
if (ii === ll) return F(i + 1, l)
|
||
|
|
var ext = pathExt[ii]
|
||
|
|
isexe(p + ext, { pathExt: pathExtExe }, function (er, is) {
|
||
|
|
if (!er && is) {
|
||
|
|
if (opt.all)
|
||
|
|
found.push(p + ext)
|
||
|
|
else
|
||
|
|
return cb(null, p + ext)
|
||
|
|
}
|
||
|
|
return E(ii + 1, ll)
|
||
|
|
})
|
||
|
|
})(0, pathExt.length)
|
||
|
|
})(0, pathEnv.length)
|
||
|
|
}
|
||
|
|
|
||
|
|
function whichSync (cmd, opt) {
|
||
|
|
opt = opt || {}
|
||
|
|
|
||
|
|
var info = getPathInfo(cmd, opt)
|
||
|
|
var pathEnv = info.env
|
||
|
|
var pathExt = info.ext
|
||
|
|
var pathExtExe = info.extExe
|
||
|
|
var found = []
|
||
|
|
|
||
|
|
for (var i = 0, l = pathEnv.length; i < l; i ++) {
|
||
|
|
var pathPart = pathEnv[i]
|
||
|
|
if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"')
|
||
|
|
pathPart = pathPart.slice(1, -1)
|
||
|
|
|
||
|
|
var p = path.join(pathPart, cmd)
|
||
|
|
if (!pathPart && /^\.[\\\/]/.test(cmd)) {
|
||
|
|
p = cmd.slice(0, 2) + p
|
||
|
|
}
|
||
|
|
for (var j = 0, ll = pathExt.length; j < ll; j ++) {
|
||
|
|
var cur = p + pathExt[j]
|
||
|
|
var is
|
||
|
|
try {
|
||
|
|
is = isexe.sync(cur, { pathExt: pathExtExe })
|
||
|
|
if (is) {
|
||
|
|
if (opt.all)
|
||
|
|
found.push(cur)
|
||
|
|
else
|
||
|
|
return cur
|
||
|
|
}
|
||
|
|
} catch (ex) {}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
if (opt.all && found.length)
|
||
|
|
return found
|
||
|
|
|
||
|
|
if (opt.nothrow)
|
||
|
|
return null
|
||
|
|
|
||
|
|
throw getNotFoundError(cmd)
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 816:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
module.exports = /^#!.*/;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 818:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
module.exports = isexe
|
||
|
|
isexe.sync = sync
|
||
|
|
|
||
|
|
var fs = __webpack_require__(747)
|
||
|
|
|
||
|
|
function checkPathExt (path, options) {
|
||
|
|
var pathext = options.pathExt !== undefined ?
|
||
|
|
options.pathExt : process.env.PATHEXT
|
||
|
|
|
||
|
|
if (!pathext) {
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
|
||
|
|
pathext = pathext.split(';')
|
||
|
|
if (pathext.indexOf('') !== -1) {
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
for (var i = 0; i < pathext.length; i++) {
|
||
|
|
var p = pathext[i].toLowerCase()
|
||
|
|
if (p && path.substr(-p.length).toLowerCase() === p) {
|
||
|
|
return true
|
||
|
|
}
|
||
|
|
}
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
|
||
|
|
function checkStat (stat, path, options) {
|
||
|
|
if (!stat.isSymbolicLink() && !stat.isFile()) {
|
||
|
|
return false
|
||
|
|
}
|
||
|
|
return checkPathExt(path, options)
|
||
|
|
}
|
||
|
|
|
||
|
|
function isexe (path, options, cb) {
|
||
|
|
fs.stat(path, function (er, stat) {
|
||
|
|
cb(er, er ? false : checkStat(stat, path, options))
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
function sync (path, options) {
|
||
|
|
return checkStat(fs.statSync(path), path, options)
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 830:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
|
|
if (k2 === undefined) k2 = k;
|
||
|
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
|
|
}) : (function(o, m, k, k2) {
|
||
|
|
if (k2 === undefined) k2 = k;
|
||
|
|
o[k2] = m[k];
|
||
|
|
}));
|
||
|
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
|
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
|
|
}) : function(o, v) {
|
||
|
|
o["default"] = v;
|
||
|
|
});
|
||
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
|
if (mod && mod.__esModule) return mod;
|
||
|
|
var result = {};
|
||
|
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
|
|
__setModuleDefault(result, mod);
|
||
|
|
return result;
|
||
|
|
};
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.action = exports.getActionParams = void 0;
|
||
|
|
const inputHelper_1 = __webpack_require__(649);
|
||
|
|
const st = __importStar(__webpack_require__(425));
|
||
|
|
const fs = __importStar(__webpack_require__(747));
|
||
|
|
const gitCommit_1 = __webpack_require__(503);
|
||
|
|
exports.getActionParams = inputHelper_1.getActionParamsFactory({
|
||
|
|
"inputNameSubset": [
|
||
|
|
"owner",
|
||
|
|
"repo",
|
||
|
|
"branch",
|
||
|
|
"github_token"
|
||
|
|
]
|
||
|
|
}).getActionParams;
|
||
|
|
function action(_actionName, params, core) {
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
core.debug(JSON.stringify(params));
|
||
|
|
const { owner, repo, branch, github_token } = params;
|
||
|
|
yield gitCommit_1.gitCommit({
|
||
|
|
owner,
|
||
|
|
repo,
|
||
|
|
github_token,
|
||
|
|
"commitAuthorEmail": "actions@github.com",
|
||
|
|
"performChanges": () => __awaiter(this, void 0, void 0, function* () {
|
||
|
|
yield st.exec(`git checkout ${branch}`);
|
||
|
|
const { version } = JSON.parse(fs.readFileSync("package.json")
|
||
|
|
.toString("utf8"));
|
||
|
|
if (!fs.existsSync("package-lock.json")) {
|
||
|
|
core.debug(`No package-lock.json tracked by ${owner}/${repo}#${branch}`);
|
||
|
|
return { "commit": false };
|
||
|
|
}
|
||
|
|
const packageLockJsonRaw = fs.readFileSync("package-lock.json")
|
||
|
|
.toString("utf8");
|
||
|
|
const packageLockJsonParsed = JSON.parse(packageLockJsonRaw);
|
||
|
|
if (packageLockJsonParsed.version === version) {
|
||
|
|
core.debug("Nothing to do, version in package.json and package-lock.json are the same");
|
||
|
|
return { "commit": false };
|
||
|
|
}
|
||
|
|
fs.writeFileSync("package-lock.json", Buffer.from(JSON.stringify((() => {
|
||
|
|
packageLockJsonParsed.version = version;
|
||
|
|
packageLockJsonParsed.packages[""].version = version;
|
||
|
|
return packageLockJsonParsed;
|
||
|
|
})(), null, packageLockJsonRaw
|
||
|
|
.replace(/\t/g, " ")
|
||
|
|
.match(/^(\s*)\"version\"/m)[1].length) + packageLockJsonRaw.match(/}([\r\n]*)$/)[1], "utf8"));
|
||
|
|
return {
|
||
|
|
"commit": true,
|
||
|
|
"addAll": false,
|
||
|
|
"message": "Sync package.json and package.lock version"
|
||
|
|
};
|
||
|
|
})
|
||
|
|
});
|
||
|
|
});
|
||
|
|
}
|
||
|
|
exports.action = action;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 835:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
module.exports = require("url");
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 842:
|
||
|
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
const Endpoints = {
|
||
|
|
actions: {
|
||
|
|
addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
|
||
|
|
cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"],
|
||
|
|
createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"],
|
||
|
|
createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, {
|
||
|
|
renamedParameters: {
|
||
|
|
name: "secret_name"
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createOrUpdateSecretForRepo: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, {
|
||
|
|
renamed: ["actions", "createOrUpdateRepoSecret"],
|
||
|
|
renamedParameters: {
|
||
|
|
name: "secret_name"
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createRegistrationToken: ["POST /repos/{owner}/{repo}/actions/runners/registration-token", {}, {
|
||
|
|
renamed: ["actions", "createRegistrationTokenForRepo"]
|
||
|
|
}],
|
||
|
|
createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"],
|
||
|
|
createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"],
|
||
|
|
createRemoveToken: ["POST /repos/{owner}/{repo}/actions/runners/remove-token", {}, {
|
||
|
|
renamed: ["actions", "createRemoveTokenForRepo"]
|
||
|
|
}],
|
||
|
|
createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"],
|
||
|
|
createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"],
|
||
|
|
deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
|
||
|
|
deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"],
|
||
|
|
deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, {
|
||
|
|
renamedParameters: {
|
||
|
|
name: "secret_name"
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
deleteSecretFromRepo: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, {
|
||
|
|
renamed: ["actions", "deleteRepoSecret"],
|
||
|
|
renamedParameters: {
|
||
|
|
name: "secret_name"
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"],
|
||
|
|
deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"],
|
||
|
|
deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
|
||
|
|
downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"],
|
||
|
|
downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"],
|
||
|
|
downloadWorkflowJobLogs: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs", {}, {
|
||
|
|
renamed: ["actions", "downloadJobLogsForWorkflowRun"]
|
||
|
|
}],
|
||
|
|
downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
|
||
|
|
getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
|
||
|
|
getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"],
|
||
|
|
getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"],
|
||
|
|
getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"],
|
||
|
|
getPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key", {}, {
|
||
|
|
renamed: ["actions", "getRepoPublicKey"]
|
||
|
|
}],
|
||
|
|
getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"],
|
||
|
|
getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, {
|
||
|
|
renamedParameters: {
|
||
|
|
name: "secret_name"
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, {
|
||
|
|
renamed: ["actions", "getRepoSecret"],
|
||
|
|
renamedParameters: {
|
||
|
|
name: "secret_name"
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getSelfHostedRunner: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}", {}, {
|
||
|
|
renamed: ["actions", "getSelfHostedRunnerForRepo"]
|
||
|
|
}],
|
||
|
|
getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"],
|
||
|
|
getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"],
|
||
|
|
getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"],
|
||
|
|
getWorkflowJob: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}", {}, {
|
||
|
|
renamed: ["actions", "getJobForWorkflowRun"]
|
||
|
|
}],
|
||
|
|
getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"],
|
||
|
|
getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"],
|
||
|
|
getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"],
|
||
|
|
listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"],
|
||
|
|
listDownloadsForSelfHostedRunnerApplication: ["GET /repos/{owner}/{repo}/actions/runners/downloads", {}, {
|
||
|
|
renamed: ["actions", "listRunnerApplicationsForRepo"]
|
||
|
|
}],
|
||
|
|
listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"],
|
||
|
|
listOrgSecrets: ["GET /orgs/{org}/actions/secrets"],
|
||
|
|
listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"],
|
||
|
|
listRepoWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/runs", {}, {
|
||
|
|
renamed: ["actions", "listWorkflowRunsForRepo"]
|
||
|
|
}],
|
||
|
|
listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"],
|
||
|
|
listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"],
|
||
|
|
listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"],
|
||
|
|
listSecretsForRepo: ["GET /repos/{owner}/{repo}/actions/secrets", {}, {
|
||
|
|
renamed: ["actions", "listRepoSecrets"]
|
||
|
|
}],
|
||
|
|
listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"],
|
||
|
|
listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"],
|
||
|
|
listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"],
|
||
|
|
listWorkflowJobLogs: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs", {}, {
|
||
|
|
renamed: ["actions", "downloadWorkflowJobLogs"]
|
||
|
|
}],
|
||
|
|
listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"],
|
||
|
|
listWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs", {}, {
|
||
|
|
renamed: ["actions", "downloadWorkflowRunLogs"]
|
||
|
|
}],
|
||
|
|
listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"],
|
||
|
|
listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"],
|
||
|
|
reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"],
|
||
|
|
removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
|
||
|
|
removeSelfHostedRunner: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}", {}, {
|
||
|
|
renamed: ["actions", "deleteSelfHostedRunnerFromRepo"]
|
||
|
|
}],
|
||
|
|
setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"]
|
||
|
|
},
|
||
|
|
activity: {
|
||
|
|
checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"],
|
||
|
|
checkStarringRepo: ["GET /user/starred/{owner}/{repo}", {}, {
|
||
|
|
renamed: ["activity", "checkRepoIsStarredByAuthenticatedUser"]
|
||
|
|
}],
|
||
|
|
deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"],
|
||
|
|
deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"],
|
||
|
|
getFeeds: ["GET /feeds"],
|
||
|
|
getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"],
|
||
|
|
getThread: ["GET /notifications/threads/{thread_id}"],
|
||
|
|
getThreadSubscription: ["PUT /notifications", {}, {
|
||
|
|
renamed: ["activity", "getThreadSubscriptionForAuthenticatedUser"]
|
||
|
|
}],
|
||
|
|
getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"],
|
||
|
|
listEventsForAuthenticatedUser: ["GET /users/{username}/events"],
|
||
|
|
listEventsForOrg: ["GET /users/{username}/events/orgs/{org}", {}, {
|
||
|
|
renamed: ["activity", "listOrgEventsForAuthenticatedUser"]
|
||
|
|
}],
|
||
|
|
listEventsForUser: ["GET /users/{username}/events", {}, {
|
||
|
|
renamed: ["activity", "listEventsForAuthenticatedUser"]
|
||
|
|
}],
|
||
|
|
listFeeds: ["GET /feeds", {}, {
|
||
|
|
renamed: ["activity", "getFeeds"]
|
||
|
|
}],
|
||
|
|
listNotifications: ["GET /notifications", {}, {
|
||
|
|
renamed: ["activity", "listNotificationsForAuthenticatedUser"]
|
||
|
|
}],
|
||
|
|
listNotificationsForAuthenticatedUser: ["GET /notifications"],
|
||
|
|
listNotificationsForRepo: ["GET /repos/{owner}/{repo}/notifications", {}, {
|
||
|
|
renamed: ["activity", "listRepoNotificationsForAuthenticatedUser"]
|
||
|
|
}],
|
||
|
|
listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"],
|
||
|
|
listPublicEvents: ["GET /events"],
|
||
|
|
listPublicEventsForOrg: ["GET /orgs/{org}/events", {}, {
|
||
|
|
renamed: ["activity", "listPublicOrgEvents"]
|
||
|
|
}],
|
||
|
|
listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"],
|
||
|
|
listPublicEventsForUser: ["GET /users/{username}/events/public"],
|
||
|
|
listPublicOrgEvents: ["GET /orgs/{org}/events"],
|
||
|
|
listReceivedEventsForUser: ["GET /users/{username}/received_events"],
|
||
|
|
listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"],
|
||
|
|
listRepoEvents: ["GET /repos/{owner}/{repo}/events"],
|
||
|
|
listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"],
|
||
|
|
listReposStarredByAuthenticatedUser: ["GET /user/starred"],
|
||
|
|
listReposStarredByUser: ["GET /users/{username}/starred"],
|
||
|
|
listReposWatchedByUser: ["GET /users/{username}/subscriptions"],
|
||
|
|
listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"],
|
||
|
|
listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"],
|
||
|
|
listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"],
|
||
|
|
markAsRead: ["PUT /notifications", {}, {
|
||
|
|
renamed: ["activity", "markNotificationsAsRead"]
|
||
|
|
}],
|
||
|
|
markNotificationsAsRead: ["PUT /notifications"],
|
||
|
|
markNotificationsAsReadForRepo: ["PUT /repos/{owner}/{repo}/notifications", {}, {
|
||
|
|
renamed: ["activity", "markRepoNotificationsAsRead"]
|
||
|
|
}],
|
||
|
|
markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"],
|
||
|
|
markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"],
|
||
|
|
setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"],
|
||
|
|
setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"],
|
||
|
|
starRepo: ["PUT /user/starred/{owner}/{repo}", {}, {
|
||
|
|
renamed: ["activity", "starRepoForAuthenticatedUser"]
|
||
|
|
}],
|
||
|
|
starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"],
|
||
|
|
unstarRepo: ["DELETE /user/starred/{owner}/{repo}", {}, {
|
||
|
|
renamed: ["activity", "unstarRepoForAuthenticatedUser"]
|
||
|
|
}],
|
||
|
|
unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"]
|
||
|
|
},
|
||
|
|
apps: {
|
||
|
|
addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
checkAccountIsAssociatedWithAny: ["GET /marketplace_listing/accounts/{account_id}", {}, {
|
||
|
|
renamed: ["apps", "getSubscriptionPlanForAccount"]
|
||
|
|
}],
|
||
|
|
checkAccountIsAssociatedWithAnyStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}", {}, {
|
||
|
|
renamed: ["apps", "getSubscriptionPlanForAccountStubbed"]
|
||
|
|
}],
|
||
|
|
checkToken: ["POST /applications/{client_id}/token"],
|
||
|
|
createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["corsair"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createFromManifest: ["POST /app-manifests/{code}/conversions"],
|
||
|
|
createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createInstallationToken: ["POST /app/installations/{installation_id}/access_tokens", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["apps", "createInstallationAccessToken"]
|
||
|
|
}],
|
||
|
|
deleteAuthorization: ["DELETE /applications/{client_id}/grant"],
|
||
|
|
deleteInstallation: ["DELETE /app/installations/{installation_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
deleteToken: ["DELETE /applications/{client_id}/token"],
|
||
|
|
getAuthenticated: ["GET /app", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getBySlug: ["GET /apps/{app_slug}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getInstallation: ["GET /app/installations/{installation_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getOrgInstallation: ["GET /orgs/{org}/installation", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getRepoInstallation: ["GET /repos/{owner}/{repo}/installation", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"],
|
||
|
|
getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"],
|
||
|
|
getUserInstallation: ["GET /users/{username}/installation", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"],
|
||
|
|
listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"],
|
||
|
|
listAccountsUserOrOrgOnPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts", {}, {
|
||
|
|
renamed: ["apps", "listAccountsForPlan"]
|
||
|
|
}],
|
||
|
|
listAccountsUserOrOrgOnPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", {}, {
|
||
|
|
renamed: ["apps", "listAccountsForPlanStubbed"]
|
||
|
|
}],
|
||
|
|
listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listInstallations: ["GET /app/installations", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listInstallationsForAuthenticatedUser: ["GET /user/installations", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listMarketplacePurchasesForAuthenticatedUser: ["GET /user/marketplace_purchases", {}, {
|
||
|
|
renamed: ["apps", "listSubscriptionsForAuthenticatedUser"]
|
||
|
|
}],
|
||
|
|
listMarketplacePurchasesForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed", {}, {
|
||
|
|
renamed: ["apps", "listSubscriptionsForAuthenticatedUserStubbed"]
|
||
|
|
}],
|
||
|
|
listPlans: ["GET /marketplace_listing/plans"],
|
||
|
|
listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"],
|
||
|
|
listRepos: ["GET /installation/repositories", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["apps", "listReposAccessibleToInstallation"]
|
||
|
|
}],
|
||
|
|
listReposAccessibleToInstallation: ["GET /installation/repositories", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"],
|
||
|
|
listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"],
|
||
|
|
removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
resetToken: ["PATCH /applications/{client_id}/token"],
|
||
|
|
revokeInstallationAccessToken: ["DELETE /installation/token"],
|
||
|
|
revokeInstallationToken: ["DELETE /installation/token", {}, {
|
||
|
|
renamed: ["apps", "revokeInstallationAccessToken"]
|
||
|
|
}],
|
||
|
|
suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"],
|
||
|
|
unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"]
|
||
|
|
},
|
||
|
|
checks: {
|
||
|
|
create: ["POST /repos/{owner}/{repo}/check-runs", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["antiope"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createSuite: ["POST /repos/{owner}/{repo}/check-suites", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["antiope"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["antiope"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["antiope"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["antiope"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["antiope"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["antiope"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["antiope"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["antiope"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["antiope"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["antiope"]
|
||
|
|
}
|
||
|
|
}]
|
||
|
|
},
|
||
|
|
codeScanning: {
|
||
|
|
getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}"],
|
||
|
|
listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"]
|
||
|
|
},
|
||
|
|
codesOfConduct: {
|
||
|
|
getAllCodesOfConduct: ["GET /codes_of_conduct", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["scarlet-witch"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getConductCode: ["GET /codes_of_conduct/{key}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["scarlet-witch"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["scarlet-witch"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listConductCodes: ["GET /codes_of_conduct", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["scarlet-witch"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["codesOfConduct", "getAllCodesOfConduct"]
|
||
|
|
}]
|
||
|
|
},
|
||
|
|
emojis: {
|
||
|
|
get: ["GET /emojis"]
|
||
|
|
},
|
||
|
|
gists: {
|
||
|
|
checkIsStarred: ["GET /gists/{gist_id}/star"],
|
||
|
|
create: ["POST /gists"],
|
||
|
|
createComment: ["POST /gists/{gist_id}/comments"],
|
||
|
|
delete: ["DELETE /gists/{gist_id}"],
|
||
|
|
deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"],
|
||
|
|
fork: ["POST /gists/{gist_id}/forks"],
|
||
|
|
get: ["GET /gists/{gist_id}"],
|
||
|
|
getComment: ["GET /gists/{gist_id}/comments/{comment_id}"],
|
||
|
|
getRevision: ["GET /gists/{gist_id}/{sha}"],
|
||
|
|
list: ["GET /gists"],
|
||
|
|
listComments: ["GET /gists/{gist_id}/comments"],
|
||
|
|
listCommits: ["GET /gists/{gist_id}/commits"],
|
||
|
|
listForUser: ["GET /users/{username}/gists"],
|
||
|
|
listForks: ["GET /gists/{gist_id}/forks"],
|
||
|
|
listPublic: ["GET /gists/public"],
|
||
|
|
listPublicForUser: ["GET /users/{username}/gists", {}, {
|
||
|
|
renamed: ["gists", "listForUser"]
|
||
|
|
}],
|
||
|
|
listStarred: ["GET /gists/starred"],
|
||
|
|
star: ["PUT /gists/{gist_id}/star"],
|
||
|
|
unstar: ["DELETE /gists/{gist_id}/star"],
|
||
|
|
update: ["PATCH /gists/{gist_id}"],
|
||
|
|
updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"]
|
||
|
|
},
|
||
|
|
git: {
|
||
|
|
createBlob: ["POST /repos/{owner}/{repo}/git/blobs"],
|
||
|
|
createCommit: ["POST /repos/{owner}/{repo}/git/commits"],
|
||
|
|
createRef: ["POST /repos/{owner}/{repo}/git/refs"],
|
||
|
|
createTag: ["POST /repos/{owner}/{repo}/git/tags"],
|
||
|
|
createTree: ["POST /repos/{owner}/{repo}/git/trees"],
|
||
|
|
deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"],
|
||
|
|
getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"],
|
||
|
|
getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"],
|
||
|
|
getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"],
|
||
|
|
getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"],
|
||
|
|
getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"],
|
||
|
|
listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"],
|
||
|
|
updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"]
|
||
|
|
},
|
||
|
|
gitignore: {
|
||
|
|
getAllTemplates: ["GET /gitignore/templates"],
|
||
|
|
getTemplate: ["GET /gitignore/templates/{name}"],
|
||
|
|
listTemplates: ["GET /gitignore/templates", {}, {
|
||
|
|
renamed: ["gitignore", "getAllTemplates"]
|
||
|
|
}]
|
||
|
|
},
|
||
|
|
interactions: {
|
||
|
|
addOrUpdateRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["sombra"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["interactions", "setRestrictionsForOrg"]
|
||
|
|
}],
|
||
|
|
addOrUpdateRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["sombra"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["interactions", "setRestrictionsForRepo"]
|
||
|
|
}],
|
||
|
|
getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["sombra"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["sombra"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["sombra"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["sombra"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["sombra"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["sombra"]
|
||
|
|
}
|
||
|
|
}]
|
||
|
|
},
|
||
|
|
issues: {
|
||
|
|
addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
|
||
|
|
addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"],
|
||
|
|
checkAssignee: ["GET /repos/{owner}/{repo}/assignees/{assignee}", {}, {
|
||
|
|
renamed: ["issues", "checkUserCanBeAssigned"]
|
||
|
|
}],
|
||
|
|
checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"],
|
||
|
|
create: ["POST /repos/{owner}/{repo}/issues"],
|
||
|
|
createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"],
|
||
|
|
createLabel: ["POST /repos/{owner}/{repo}/labels"],
|
||
|
|
createMilestone: ["POST /repos/{owner}/{repo}/milestones"],
|
||
|
|
deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"],
|
||
|
|
deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"],
|
||
|
|
deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"],
|
||
|
|
get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"],
|
||
|
|
getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"],
|
||
|
|
getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"],
|
||
|
|
getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"],
|
||
|
|
getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"],
|
||
|
|
list: ["GET /issues"],
|
||
|
|
listAssignees: ["GET /repos/{owner}/{repo}/assignees"],
|
||
|
|
listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"],
|
||
|
|
listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"],
|
||
|
|
listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"],
|
||
|
|
listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"],
|
||
|
|
listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["mockingbird"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listForAuthenticatedUser: ["GET /user/issues"],
|
||
|
|
listForOrg: ["GET /orgs/{org}/issues"],
|
||
|
|
listForRepo: ["GET /repos/{owner}/{repo}/issues"],
|
||
|
|
listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"],
|
||
|
|
listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"],
|
||
|
|
listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"],
|
||
|
|
listMilestones: ["GET /repos/{owner}/{repo}/milestones"],
|
||
|
|
listMilestonesForRepo: ["GET /repos/{owner}/{repo}/milestones", {}, {
|
||
|
|
renamed: ["issues", "listMilestones"]
|
||
|
|
}],
|
||
|
|
lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"],
|
||
|
|
removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"],
|
||
|
|
removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
|
||
|
|
removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"],
|
||
|
|
removeLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels", {}, {
|
||
|
|
renamed: ["issues", "removeAllLabels"]
|
||
|
|
}],
|
||
|
|
replaceAllLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels", {}, {
|
||
|
|
renamed: ["issues", "setLabels"]
|
||
|
|
}],
|
||
|
|
replaceLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels", {}, {
|
||
|
|
renamed: ["issues", "replaceAllLabels"]
|
||
|
|
}],
|
||
|
|
setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"],
|
||
|
|
unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"],
|
||
|
|
update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"],
|
||
|
|
updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"],
|
||
|
|
updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"],
|
||
|
|
updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"]
|
||
|
|
},
|
||
|
|
licenses: {
|
||
|
|
get: ["GET /licenses/{license}"],
|
||
|
|
getAllCommonlyUsed: ["GET /licenses"],
|
||
|
|
getForRepo: ["GET /repos/{owner}/{repo}/license"],
|
||
|
|
listCommonlyUsed: ["GET /licenses", {}, {
|
||
|
|
renamed: ["licenses", "getAllCommonlyUsed"]
|
||
|
|
}]
|
||
|
|
},
|
||
|
|
markdown: {
|
||
|
|
render: ["POST /markdown"],
|
||
|
|
renderRaw: ["POST /markdown/raw", {
|
||
|
|
headers: {
|
||
|
|
"content-type": "text/plain; charset=utf-8"
|
||
|
|
}
|
||
|
|
}]
|
||
|
|
},
|
||
|
|
meta: {
|
||
|
|
get: ["GET /meta"]
|
||
|
|
},
|
||
|
|
migrations: {
|
||
|
|
cancelImport: ["DELETE /repos/{owner}/{repo}/import"],
|
||
|
|
deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["wyandotte"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["wyandotte"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["wyandotte"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["wyandotte"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"],
|
||
|
|
getImportProgress: ["GET /repos/{owner}/{repo}/import", {}, {
|
||
|
|
renamed: ["migrations", "getImportStatus"]
|
||
|
|
}],
|
||
|
|
getImportStatus: ["GET /repos/{owner}/{repo}/import"],
|
||
|
|
getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"],
|
||
|
|
getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["wyandotte"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["wyandotte"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listForAuthenticatedUser: ["GET /user/migrations", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["wyandotte"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listForOrg: ["GET /orgs/{org}/migrations", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["wyandotte"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["wyandotte"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listReposForUser: ["GET /user/{migration_id}/repositories", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["wyandotte"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"],
|
||
|
|
setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"],
|
||
|
|
startForAuthenticatedUser: ["POST /user/migrations"],
|
||
|
|
startForOrg: ["POST /orgs/{org}/migrations"],
|
||
|
|
startImport: ["PUT /repos/{owner}/{repo}/import"],
|
||
|
|
unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["wyandotte"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["wyandotte"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
updateImport: ["PATCH /repos/{owner}/{repo}/import"]
|
||
|
|
},
|
||
|
|
orgs: {
|
||
|
|
addOrUpdateMembership: ["PUT /orgs/{org}/memberships/{username}", {}, {
|
||
|
|
renamed: ["orgs", "setMembershipForUser"]
|
||
|
|
}],
|
||
|
|
blockUser: ["PUT /orgs/{org}/blocks/{username}"],
|
||
|
|
checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"],
|
||
|
|
checkMembership: ["GET /orgs/{org}/members/{username}", {}, {
|
||
|
|
renamed: ["orgs", "checkMembershipForUser"]
|
||
|
|
}],
|
||
|
|
checkMembershipForUser: ["GET /orgs/{org}/members/{username}"],
|
||
|
|
checkPublicMembership: ["GET /orgs/{org}/public_members/{username}", {}, {
|
||
|
|
renamed: ["orgs", "checkPublicMembershipForUser"]
|
||
|
|
}],
|
||
|
|
checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"],
|
||
|
|
concealMembership: ["DELETE /orgs/{org}/public_members/{username}", {}, {
|
||
|
|
renamed: ["orgs", "removePublicMembershipForAuthenticatedUser"]
|
||
|
|
}],
|
||
|
|
convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"],
|
||
|
|
createHook: ["POST /orgs/{org}/hooks", {}, {
|
||
|
|
renamed: ["orgs", "createWebhook"]
|
||
|
|
}],
|
||
|
|
createInvitation: ["POST /orgs/{org}/invitations"],
|
||
|
|
createWebhook: ["POST /orgs/{org}/hooks"],
|
||
|
|
deleteHook: ["DELETE /orgs/{org}/hooks/{hook_id}", {}, {
|
||
|
|
renamed: ["orgs", "deleteWebhook"]
|
||
|
|
}],
|
||
|
|
deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"],
|
||
|
|
get: ["GET /orgs/{org}"],
|
||
|
|
getHook: ["GET /orgs/{org}/hooks/{hook_id}", {}, {
|
||
|
|
renamed: ["orgs", "getWebhook"]
|
||
|
|
}],
|
||
|
|
getMembership: ["GET /orgs/{org}/memberships/{username}", {}, {
|
||
|
|
renamed: ["orgs", "getMembershipForUser"]
|
||
|
|
}],
|
||
|
|
getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"],
|
||
|
|
getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"],
|
||
|
|
getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"],
|
||
|
|
list: ["GET /organizations"],
|
||
|
|
listAppInstallations: ["GET /orgs/{org}/installations", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listBlockedUsers: ["GET /orgs/{org}/blocks"],
|
||
|
|
listForAuthenticatedUser: ["GET /user/orgs"],
|
||
|
|
listForUser: ["GET /users/{username}/orgs"],
|
||
|
|
listHooks: ["GET /orgs/{org}/hooks", {}, {
|
||
|
|
renamed: ["orgs", "listWebhooks"]
|
||
|
|
}],
|
||
|
|
listInstallations: ["GET /orgs/{org}/installations", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["machine-man"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["orgs", "listAppInstallations"]
|
||
|
|
}],
|
||
|
|
listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"],
|
||
|
|
listMembers: ["GET /orgs/{org}/members"],
|
||
|
|
listMemberships: ["GET /user/memberships/orgs", {}, {
|
||
|
|
renamed: ["orgs", "listMembershipsForAuthenticatedUser"]
|
||
|
|
}],
|
||
|
|
listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"],
|
||
|
|
listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"],
|
||
|
|
listPendingInvitations: ["GET /orgs/{org}/invitations"],
|
||
|
|
listPublicMembers: ["GET /orgs/{org}/public_members"],
|
||
|
|
listWebhooks: ["GET /orgs/{org}/hooks"],
|
||
|
|
pingHook: ["POST /orgs/{org}/hooks/{hook_id}/pings", {}, {
|
||
|
|
renamed: ["orgs", "pingWebhook"]
|
||
|
|
}],
|
||
|
|
pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"],
|
||
|
|
publicizeMembership: ["PUT /orgs/{org}/public_members/{username}", {}, {
|
||
|
|
renamed: ["orgs", "setPublicMembershipForAuthenticatedUser"]
|
||
|
|
}],
|
||
|
|
removeMember: ["DELETE /orgs/{org}/members/{username}"],
|
||
|
|
removeMembership: ["DELETE /orgs/{org}/memberships/{username}", {}, {
|
||
|
|
renamed: ["orgs", "removeMembershipForUser"]
|
||
|
|
}],
|
||
|
|
removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"],
|
||
|
|
removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"],
|
||
|
|
removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"],
|
||
|
|
setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"],
|
||
|
|
setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"],
|
||
|
|
unblockUser: ["DELETE /orgs/{org}/blocks/{username}"],
|
||
|
|
update: ["PATCH /orgs/{org}"],
|
||
|
|
updateHook: ["PATCH /orgs/{org}/hooks/{hook_id}", {}, {
|
||
|
|
renamed: ["orgs", "updateWebhook"]
|
||
|
|
}],
|
||
|
|
updateMembership: ["PATCH /user/memberships/orgs/{org}", {}, {
|
||
|
|
renamed: ["orgs", "updateMembershipForAuthenticatedUser"]
|
||
|
|
}],
|
||
|
|
updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"],
|
||
|
|
updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"]
|
||
|
|
},
|
||
|
|
projects: {
|
||
|
|
addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createCard: ["POST /projects/columns/{column_id}/cards", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createColumn: ["POST /projects/{project_id}/columns", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createForAuthenticatedUser: ["POST /user/projects", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createForOrg: ["POST /orgs/{org}/projects", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createForRepo: ["POST /repos/{owner}/{repo}/projects", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
delete: ["DELETE /projects/{project_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
deleteCard: ["DELETE /projects/columns/cards/{card_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
deleteColumn: ["DELETE /projects/columns/{column_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
get: ["GET /projects/{project_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getCard: ["GET /projects/columns/cards/{card_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getColumn: ["GET /projects/columns/{column_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listCards: ["GET /projects/columns/{column_id}/cards", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listCollaborators: ["GET /projects/{project_id}/collaborators", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listColumns: ["GET /projects/{project_id}/columns", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listForOrg: ["GET /orgs/{org}/projects", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listForRepo: ["GET /repos/{owner}/{repo}/projects", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listForUser: ["GET /users/{username}/projects", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
moveCard: ["POST /projects/columns/cards/{card_id}/moves", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
moveColumn: ["POST /projects/columns/{column_id}/moves", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
reviewUserPermissionLevel: ["GET /projects/{project_id}/collaborators/{username}/permission", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["projects", "getPermissionForUser"]
|
||
|
|
}],
|
||
|
|
update: ["PATCH /projects/{project_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
updateCard: ["PATCH /projects/columns/cards/{card_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
updateColumn: ["PATCH /projects/columns/{column_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}]
|
||
|
|
},
|
||
|
|
pulls: {
|
||
|
|
checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
|
||
|
|
create: ["POST /repos/{owner}/{repo}/pulls"],
|
||
|
|
createComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments", {}, {
|
||
|
|
renamed: ["pulls", "createReviewComment"]
|
||
|
|
}],
|
||
|
|
createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"],
|
||
|
|
createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
|
||
|
|
createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"],
|
||
|
|
createReviewCommentReply: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", {}, {
|
||
|
|
renamed: ["pulls", "createReplyForReviewComment"]
|
||
|
|
}],
|
||
|
|
createReviewRequest: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", {}, {
|
||
|
|
renamed: ["pulls", "requestReviewers"]
|
||
|
|
}],
|
||
|
|
deleteComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}", {}, {
|
||
|
|
renamed: ["pulls", "deleteReviewComment"]
|
||
|
|
}],
|
||
|
|
deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
|
||
|
|
deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
|
||
|
|
deleteReviewRequest: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", {}, {
|
||
|
|
renamed: ["pulls", "removeRequestedReviewers"]
|
||
|
|
}],
|
||
|
|
dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"],
|
||
|
|
get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"],
|
||
|
|
getComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}", {}, {
|
||
|
|
renamed: ["pulls", "getReviewComment"]
|
||
|
|
}],
|
||
|
|
getCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", {}, {
|
||
|
|
renamed: ["pulls", "listCommentsForReview"]
|
||
|
|
}],
|
||
|
|
getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
|
||
|
|
getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
|
||
|
|
list: ["GET /repos/{owner}/{repo}/pulls"],
|
||
|
|
listComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", {}, {
|
||
|
|
renamed: ["pulls", "listReviewComments"]
|
||
|
|
}],
|
||
|
|
listCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments", {}, {
|
||
|
|
renamed: ["pulls", "listReviewCommentsForRepo"]
|
||
|
|
}],
|
||
|
|
listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"],
|
||
|
|
listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"],
|
||
|
|
listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"],
|
||
|
|
listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
|
||
|
|
listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"],
|
||
|
|
listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"],
|
||
|
|
listReviewRequests: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", {}, {
|
||
|
|
renamed: ["pulls", "listRequestedReviewers"]
|
||
|
|
}],
|
||
|
|
listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
|
||
|
|
merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
|
||
|
|
removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
|
||
|
|
requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
|
||
|
|
submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"],
|
||
|
|
update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"],
|
||
|
|
updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["lydian"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
updateComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}", {}, {
|
||
|
|
renamed: ["pulls", "updateReviewComment"]
|
||
|
|
}],
|
||
|
|
updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
|
||
|
|
updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"]
|
||
|
|
},
|
||
|
|
rateLimit: {
|
||
|
|
get: ["GET /rate_limit"]
|
||
|
|
},
|
||
|
|
reactions: {
|
||
|
|
createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
delete: ["DELETE /reactions/{reaction_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["reactions", "deleteLegacy"]
|
||
|
|
}],
|
||
|
|
deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
deleteLegacy: ["DELETE /reactions/{reaction_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
deprecated: "octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy"
|
||
|
|
}],
|
||
|
|
listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["squirrel-girl"]
|
||
|
|
}
|
||
|
|
}]
|
||
|
|
},
|
||
|
|
repos: {
|
||
|
|
acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"],
|
||
|
|
addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
|
||
|
|
mapToData: "apps"
|
||
|
|
}],
|
||
|
|
addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"],
|
||
|
|
addDeployKey: ["POST /repos/{owner}/{repo}/keys", {}, {
|
||
|
|
renamed: ["repos", "createDeployKey"]
|
||
|
|
}],
|
||
|
|
addProtectedBranchAdminEnforcement: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", {}, {
|
||
|
|
renamed: ["repos", "setAdminBranchProtection"]
|
||
|
|
}],
|
||
|
|
addProtectedBranchAppRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
|
||
|
|
mapToData: "apps",
|
||
|
|
renamed: ["repos", "addAppAccessRestrictions"]
|
||
|
|
}],
|
||
|
|
addProtectedBranchRequiredSignatures: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["zzzax"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["repos", "createCommitSignatureProtection"]
|
||
|
|
}],
|
||
|
|
addProtectedBranchRequiredStatusChecksContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
|
||
|
|
mapToData: "contexts",
|
||
|
|
renamed: ["repos", "addStatusCheckContexts"]
|
||
|
|
}],
|
||
|
|
addProtectedBranchTeamRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
|
||
|
|
mapToData: "teams",
|
||
|
|
renamed: ["repos", "addTeamAccessRestrictions"]
|
||
|
|
}],
|
||
|
|
addProtectedBranchUserRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
|
||
|
|
mapToData: "users",
|
||
|
|
renamed: ["repos", "addUserAccessRestrictions"]
|
||
|
|
}],
|
||
|
|
addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
|
||
|
|
mapToData: "contexts"
|
||
|
|
}],
|
||
|
|
addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
|
||
|
|
mapToData: "teams"
|
||
|
|
}],
|
||
|
|
addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
|
||
|
|
mapToData: "users"
|
||
|
|
}],
|
||
|
|
checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"],
|
||
|
|
checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["dorian"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"],
|
||
|
|
createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"],
|
||
|
|
createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["zzzax"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"],
|
||
|
|
createDeployKey: ["POST /repos/{owner}/{repo}/keys"],
|
||
|
|
createDeployment: ["POST /repos/{owner}/{repo}/deployments"],
|
||
|
|
createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"],
|
||
|
|
createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"],
|
||
|
|
createForAuthenticatedUser: ["POST /user/repos"],
|
||
|
|
createFork: ["POST /repos/{owner}/{repo}/forks"],
|
||
|
|
createHook: ["POST /repos/{owner}/{repo}/hooks", {}, {
|
||
|
|
renamed: ["repos", "createWebhook"]
|
||
|
|
}],
|
||
|
|
createInOrg: ["POST /orgs/{org}/repos"],
|
||
|
|
createOrUpdateFile: ["PUT /repos/{owner}/{repo}/contents/{path}", {}, {
|
||
|
|
renamed: ["repos", "createOrUpdateFileContents"]
|
||
|
|
}],
|
||
|
|
createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"],
|
||
|
|
createPagesSite: ["POST /repos/{owner}/{repo}/pages", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["switcheroo"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createRelease: ["POST /repos/{owner}/{repo}/releases"],
|
||
|
|
createStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}", {}, {
|
||
|
|
renamed: ["repos", "createCommitStatus"]
|
||
|
|
}],
|
||
|
|
createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["baptiste"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
createWebhook: ["POST /repos/{owner}/{repo}/hooks"],
|
||
|
|
declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"],
|
||
|
|
delete: ["DELETE /repos/{owner}/{repo}"],
|
||
|
|
deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"],
|
||
|
|
deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
|
||
|
|
deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"],
|
||
|
|
deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"],
|
||
|
|
deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["zzzax"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"],
|
||
|
|
deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"],
|
||
|
|
deleteDownload: ["DELETE /repos/{owner}/{repo}/downloads/{download_id}"],
|
||
|
|
deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"],
|
||
|
|
deleteHook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}", {}, {
|
||
|
|
renamed: ["repos", "deleteWebhook"]
|
||
|
|
}],
|
||
|
|
deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"],
|
||
|
|
deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["switcheroo"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
|
||
|
|
deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"],
|
||
|
|
deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"],
|
||
|
|
deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"],
|
||
|
|
disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["london"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
disablePagesSite: ["DELETE /repos/{owner}/{repo}/pages", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["switcheroo"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["repos", "deletePagesSite"]
|
||
|
|
}],
|
||
|
|
disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["dorian"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
downloadArchive: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}"],
|
||
|
|
enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["london"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
enablePagesSite: ["POST /repos/{owner}/{repo}/pages", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["switcheroo"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["repos", "createPagesSite"]
|
||
|
|
}],
|
||
|
|
enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["dorian"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
get: ["GET /repos/{owner}/{repo}"],
|
||
|
|
getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"],
|
||
|
|
getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
|
||
|
|
getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"],
|
||
|
|
getAllTopics: ["GET /repos/{owner}/{repo}/topics", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["mercy"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"],
|
||
|
|
getArchiveLink: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}", {}, {
|
||
|
|
renamed: ["repos", "downloadArchive"]
|
||
|
|
}],
|
||
|
|
getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"],
|
||
|
|
getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"],
|
||
|
|
getClones: ["GET /repos/{owner}/{repo}/traffic/clones"],
|
||
|
|
getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"],
|
||
|
|
getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"],
|
||
|
|
getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"],
|
||
|
|
getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"],
|
||
|
|
getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"],
|
||
|
|
getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"],
|
||
|
|
getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["zzzax"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"],
|
||
|
|
getContent: ["GET /repos/{owner}/{repo}/contents/{path}"],
|
||
|
|
getContents: ["GET /repos/{owner}/{repo}/contents/{path}", {}, {
|
||
|
|
renamed: ["repos", "getContent"]
|
||
|
|
}],
|
||
|
|
getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"],
|
||
|
|
getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"],
|
||
|
|
getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"],
|
||
|
|
getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"],
|
||
|
|
getDownload: ["GET /repos/{owner}/{repo}/downloads/{download_id}"],
|
||
|
|
getHook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}", {}, {
|
||
|
|
renamed: ["repos", "getWebhook"]
|
||
|
|
}],
|
||
|
|
getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"],
|
||
|
|
getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"],
|
||
|
|
getPages: ["GET /repos/{owner}/{repo}/pages"],
|
||
|
|
getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"],
|
||
|
|
getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"],
|
||
|
|
getProtectedBranchAdminEnforcement: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", {}, {
|
||
|
|
renamed: ["repos", "getAdminBranchProtection"]
|
||
|
|
}],
|
||
|
|
getProtectedBranchPullRequestReviewEnforcement: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", {}, {
|
||
|
|
renamed: ["repos", "getPullRequestReviewProtection"]
|
||
|
|
}],
|
||
|
|
getProtectedBranchRequiredSignatures: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["zzzax"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["repos", "getCommitSignatureProtection"]
|
||
|
|
}],
|
||
|
|
getProtectedBranchRequiredStatusChecks: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, {
|
||
|
|
renamed: ["repos", "getStatusChecksProtection"]
|
||
|
|
}],
|
||
|
|
getProtectedBranchRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", {}, {
|
||
|
|
renamed: ["repos", "getAccessRestrictions"]
|
||
|
|
}],
|
||
|
|
getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
|
||
|
|
getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"],
|
||
|
|
getReadme: ["GET /repos/{owner}/{repo}/readme"],
|
||
|
|
getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"],
|
||
|
|
getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"],
|
||
|
|
getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"],
|
||
|
|
getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
|
||
|
|
getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"],
|
||
|
|
getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"],
|
||
|
|
getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"],
|
||
|
|
getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"],
|
||
|
|
getViews: ["GET /repos/{owner}/{repo}/traffic/views"],
|
||
|
|
getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"],
|
||
|
|
list: ["GET /user/repos", {}, {
|
||
|
|
renamed: ["repos", "listForAuthenticatedUser"]
|
||
|
|
}],
|
||
|
|
listAssetsForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets", {}, {
|
||
|
|
renamed: ["repos", "listReleaseAssets"]
|
||
|
|
}],
|
||
|
|
listBranches: ["GET /repos/{owner}/{repo}/branches"],
|
||
|
|
listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["groot"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"],
|
||
|
|
listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"],
|
||
|
|
listCommitComments: ["GET /repos/{owner}/{repo}/comments", {}, {
|
||
|
|
renamed: ["repos", "listCommitCommentsForRepo"]
|
||
|
|
}],
|
||
|
|
listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"],
|
||
|
|
listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"],
|
||
|
|
listCommits: ["GET /repos/{owner}/{repo}/commits"],
|
||
|
|
listContributors: ["GET /repos/{owner}/{repo}/contributors"],
|
||
|
|
listDeployKeys: ["GET /repos/{owner}/{repo}/keys"],
|
||
|
|
listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"],
|
||
|
|
listDeployments: ["GET /repos/{owner}/{repo}/deployments"],
|
||
|
|
listDownloads: ["GET /repos/{owner}/{repo}/downloads"],
|
||
|
|
listForAuthenticatedUser: ["GET /user/repos"],
|
||
|
|
listForOrg: ["GET /orgs/{org}/repos"],
|
||
|
|
listForUser: ["GET /users/{username}/repos"],
|
||
|
|
listForks: ["GET /repos/{owner}/{repo}/forks"],
|
||
|
|
listHooks: ["GET /repos/{owner}/{repo}/hooks", {}, {
|
||
|
|
renamed: ["repos", "listWebhooks"]
|
||
|
|
}],
|
||
|
|
listInvitations: ["GET /repos/{owner}/{repo}/invitations"],
|
||
|
|
listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"],
|
||
|
|
listLanguages: ["GET /repos/{owner}/{repo}/languages"],
|
||
|
|
listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"],
|
||
|
|
listProtectedBranchRequiredStatusChecksContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
|
||
|
|
renamed: ["repos", "getAllStatusCheckContexts"]
|
||
|
|
}],
|
||
|
|
listPublic: ["GET /repositories"],
|
||
|
|
listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["groot"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"],
|
||
|
|
listReleases: ["GET /repos/{owner}/{repo}/releases"],
|
||
|
|
listStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses", {}, {
|
||
|
|
renamed: ["repos", "listCommitStatusesForRef"]
|
||
|
|
}],
|
||
|
|
listTags: ["GET /repos/{owner}/{repo}/tags"],
|
||
|
|
listTeams: ["GET /repos/{owner}/{repo}/teams"],
|
||
|
|
listTopics: ["GET /repos/{owner}/{repo}/topics", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["mercy"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["repos", "getAllTopics"]
|
||
|
|
}],
|
||
|
|
listWebhooks: ["GET /repos/{owner}/{repo}/hooks"],
|
||
|
|
merge: ["POST /repos/{owner}/{repo}/merges"],
|
||
|
|
pingHook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings", {}, {
|
||
|
|
renamed: ["repos", "pingWebhook"]
|
||
|
|
}],
|
||
|
|
pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"],
|
||
|
|
removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
|
||
|
|
mapToData: "apps"
|
||
|
|
}],
|
||
|
|
removeBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection", {}, {
|
||
|
|
renamed: ["repos", "deleteBranchProtection"]
|
||
|
|
}],
|
||
|
|
removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"],
|
||
|
|
removeDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}", {}, {
|
||
|
|
renamed: ["repos", "deleteDeployKey"]
|
||
|
|
}],
|
||
|
|
removeProtectedBranchAdminEnforcement: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", {}, {
|
||
|
|
renamed: ["repos", "deleteAdminBranchProtection"]
|
||
|
|
}],
|
||
|
|
removeProtectedBranchAppRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
|
||
|
|
mapToData: "apps",
|
||
|
|
renamed: ["repos", "removeAppAccessRestrictions"]
|
||
|
|
}],
|
||
|
|
removeProtectedBranchPullRequestReviewEnforcement: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", {}, {
|
||
|
|
renamed: ["repos", "deletePullRequestReviewProtection"]
|
||
|
|
}],
|
||
|
|
removeProtectedBranchRequiredSignatures: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["zzzax"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["repos", "deleteCommitSignatureProtection"]
|
||
|
|
}],
|
||
|
|
removeProtectedBranchRequiredStatusChecks: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, {
|
||
|
|
renamed: ["repos", "removeStatusChecksProtection"]
|
||
|
|
}],
|
||
|
|
removeProtectedBranchRequiredStatusChecksContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
|
||
|
|
mapToData: "contexts",
|
||
|
|
renamed: ["repos", "removeStatusCheckContexts"]
|
||
|
|
}],
|
||
|
|
removeProtectedBranchRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", {}, {
|
||
|
|
renamed: ["repos", "deleteAccessRestrictions"]
|
||
|
|
}],
|
||
|
|
removeProtectedBranchTeamRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
|
||
|
|
mapToData: "teams",
|
||
|
|
renamed: ["repos", "removeTeamAccessRestrictions"]
|
||
|
|
}],
|
||
|
|
removeProtectedBranchUserRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
|
||
|
|
mapToData: "users",
|
||
|
|
renamed: ["repos", "removeUserAccessRestrictions"]
|
||
|
|
}],
|
||
|
|
removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
|
||
|
|
mapToData: "contexts"
|
||
|
|
}],
|
||
|
|
removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
|
||
|
|
removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
|
||
|
|
mapToData: "teams"
|
||
|
|
}],
|
||
|
|
removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
|
||
|
|
mapToData: "users"
|
||
|
|
}],
|
||
|
|
replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["mercy"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
replaceProtectedBranchAppRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
|
||
|
|
mapToData: "apps",
|
||
|
|
renamed: ["repos", "setAppAccessRestrictions"]
|
||
|
|
}],
|
||
|
|
replaceProtectedBranchRequiredStatusChecksContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
|
||
|
|
mapToData: "contexts",
|
||
|
|
renamed: ["repos", "setStatusCheckContexts"]
|
||
|
|
}],
|
||
|
|
replaceProtectedBranchTeamRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
|
||
|
|
mapToData: "teams",
|
||
|
|
renamed: ["repos", "setTeamAccessRestrictions"]
|
||
|
|
}],
|
||
|
|
replaceProtectedBranchUserRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
|
||
|
|
mapToData: "users",
|
||
|
|
renamed: ["repos", "setUserAccessRestrictions"]
|
||
|
|
}],
|
||
|
|
replaceTopics: ["PUT /repos/{owner}/{repo}/topics", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["mercy"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["repos", "replaceAllTopics"]
|
||
|
|
}],
|
||
|
|
requestPageBuild: ["POST /repos/{owner}/{repo}/pages/builds", {}, {
|
||
|
|
renamed: ["repos", "requestPagesBuild"]
|
||
|
|
}],
|
||
|
|
requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"],
|
||
|
|
retrieveCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile", {}, {
|
||
|
|
renamed: ["repos", "getCommunityProfileMetrics"]
|
||
|
|
}],
|
||
|
|
setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
|
||
|
|
setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
|
||
|
|
mapToData: "apps"
|
||
|
|
}],
|
||
|
|
setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
|
||
|
|
mapToData: "contexts"
|
||
|
|
}],
|
||
|
|
setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
|
||
|
|
mapToData: "teams"
|
||
|
|
}],
|
||
|
|
setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
|
||
|
|
mapToData: "users"
|
||
|
|
}],
|
||
|
|
testPushHook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests", {}, {
|
||
|
|
renamed: ["repos", "testPushWebhook"]
|
||
|
|
}],
|
||
|
|
testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"],
|
||
|
|
transfer: ["POST /repos/{owner}/{repo}/transfer"],
|
||
|
|
update: ["PATCH /repos/{owner}/{repo}"],
|
||
|
|
updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"],
|
||
|
|
updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"],
|
||
|
|
updateHook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}", {}, {
|
||
|
|
renamed: ["repos", "updateWebhook"]
|
||
|
|
}],
|
||
|
|
updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"],
|
||
|
|
updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"],
|
||
|
|
updateProtectedBranchPullRequestReviewEnforcement: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", {}, {
|
||
|
|
renamed: ["repos", "updatePullRequestReviewProtection"]
|
||
|
|
}],
|
||
|
|
updateProtectedBranchRequiredStatusChecks: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, {
|
||
|
|
renamed: ["repos", "updateStatusChecksProtection"]
|
||
|
|
}],
|
||
|
|
updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
|
||
|
|
updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"],
|
||
|
|
updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"],
|
||
|
|
updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
|
||
|
|
updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"],
|
||
|
|
uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", {
|
||
|
|
baseUrl: "https://uploads.github.com"
|
||
|
|
}]
|
||
|
|
},
|
||
|
|
search: {
|
||
|
|
code: ["GET /search/code"],
|
||
|
|
commits: ["GET /search/commits", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["cloak"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
issuesAndPullRequests: ["GET /search/issues"],
|
||
|
|
labels: ["GET /search/labels"],
|
||
|
|
repos: ["GET /search/repositories"],
|
||
|
|
topics: ["GET /search/topics"],
|
||
|
|
users: ["GET /search/users"]
|
||
|
|
},
|
||
|
|
teams: {
|
||
|
|
addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"],
|
||
|
|
addOrUpdateMembershipInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}", {}, {
|
||
|
|
renamed: ["teams", "addOrUpdateMembershipForUserInOrg"]
|
||
|
|
}],
|
||
|
|
addOrUpdateProjectInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["teams", "addOrUpdateProjectPermissionsInOrg"]
|
||
|
|
}],
|
||
|
|
addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
addOrUpdateRepoInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", {}, {
|
||
|
|
renamed: ["teams", "addOrUpdateRepoPermissionsInOrg"]
|
||
|
|
}],
|
||
|
|
addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
|
||
|
|
checkManagesRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", {}, {
|
||
|
|
renamed: ["teams", "checkPermissionsForRepoInOrg"]
|
||
|
|
}],
|
||
|
|
checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
|
||
|
|
create: ["POST /orgs/{org}/teams"],
|
||
|
|
createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"],
|
||
|
|
createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"],
|
||
|
|
deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
|
||
|
|
deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
|
||
|
|
deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"],
|
||
|
|
getByName: ["GET /orgs/{org}/teams/{team_slug}"],
|
||
|
|
getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
|
||
|
|
getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
|
||
|
|
getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"],
|
||
|
|
getMembershipInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}", {}, {
|
||
|
|
renamed: ["teams", "getMembershipForUserInOrg"]
|
||
|
|
}],
|
||
|
|
list: ["GET /orgs/{org}/teams"],
|
||
|
|
listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"],
|
||
|
|
listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"],
|
||
|
|
listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"],
|
||
|
|
listForAuthenticatedUser: ["GET /user/teams"],
|
||
|
|
listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"],
|
||
|
|
listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"],
|
||
|
|
listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}],
|
||
|
|
listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"],
|
||
|
|
removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"],
|
||
|
|
removeMembershipInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}", {}, {
|
||
|
|
renamed: ["teams", "removeMembershipForUserInOrg"]
|
||
|
|
}],
|
||
|
|
removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
|
||
|
|
removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
|
||
|
|
reviewProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", {
|
||
|
|
mediaType: {
|
||
|
|
previews: ["inertia"]
|
||
|
|
}
|
||
|
|
}, {
|
||
|
|
renamed: ["teams", "checkPermissionsForProjectInOrg"]
|
||
|
|
}],
|
||
|
|
updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
|
||
|
|
updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
|
||
|
|
updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"]
|
||
|
|
},
|
||
|
|
users: {
|
||
|
|
addEmailForAuthenticated: ["POST /user/emails"],
|
||
|
|
addEmails: ["POST /user/emails", {}, {
|
||
|
|
renamed: ["users", "addEmailsForAuthenticated"]
|
||
|
|
}],
|
||
|
|
block: ["PUT /user/blocks/{username}"],
|
||
|
|
checkBlocked: ["GET /user/blocks/{username}"],
|
||
|
|
checkFollowing: ["GET /user/following/{username}", {}, {
|
||
|
|
renamed: ["users", "checkPersonIsFollowedByAuthenticated"]
|
||
|
|
}],
|
||
|
|
checkFollowingForUser: ["GET /users/{username}/following/{target_user}"],
|
||
|
|
checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"],
|
||
|
|
createGpgKey: ["POST /user/gpg_keys", {}, {
|
||
|
|
renamed: ["users", "createGpgKeyForAuthenticated"]
|
||
|
|
}],
|
||
|
|
createGpgKeyForAuthenticated: ["POST /user/gpg_keys"],
|
||
|
|
createPublicKey: ["POST /user/keys", {}, {
|
||
|
|
renamed: ["users", "createPublicSshKeyForAuthenticated"]
|
||
|
|
}],
|
||
|
|
createPublicSshKeyForAuthenticated: ["POST /user/keys"],
|
||
|
|
deleteEmailForAuthenticated: ["DELETE /user/emails"],
|
||
|
|
deleteEmails: ["DELETE /user/emails", {}, {
|
||
|
|
renamed: ["users", "deleteEmailsForAuthenticated"]
|
||
|
|
}],
|
||
|
|
deleteGpgKey: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, {
|
||
|
|
renamed: ["users", "deleteGpgKeyForAuthenticated"]
|
||
|
|
}],
|
||
|
|
deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"],
|
||
|
|
deletePublicKey: ["DELETE /user/keys/{key_id}", {}, {
|
||
|
|
renamed: ["users", "deletePublicSshKeyForAuthenticated"]
|
||
|
|
}],
|
||
|
|
deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"],
|
||
|
|
follow: ["PUT /user/following/{username}"],
|
||
|
|
getAuthenticated: ["GET /user"],
|
||
|
|
getByUsername: ["GET /users/{username}"],
|
||
|
|
getContextForUser: ["GET /users/{username}/hovercard"],
|
||
|
|
getGpgKey: ["GET /user/gpg_keys/{gpg_key_id}", {}, {
|
||
|
|
renamed: ["users", "getGpgKeyForAuthenticated"]
|
||
|
|
}],
|
||
|
|
getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"],
|
||
|
|
getPublicKey: ["GET /user/keys/{key_id}", {}, {
|
||
|
|
renamed: ["users", "getPublicSshKeyForAuthenticated"]
|
||
|
|
}],
|
||
|
|
getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"],
|
||
|
|
list: ["GET /users"],
|
||
|
|
listBlocked: ["GET /user/blocks", {}, {
|
||
|
|
renamed: ["users", "listBlockedByAuthenticated"]
|
||
|
|
}],
|
||
|
|
listBlockedByAuthenticated: ["GET /user/blocks"],
|
||
|
|
listEmails: ["GET /user/emails", {}, {
|
||
|
|
renamed: ["users", "listEmailsForAuthenticated"]
|
||
|
|
}],
|
||
|
|
listEmailsForAuthenticated: ["GET /user/emails"],
|
||
|
|
listFollowedByAuthenticated: ["GET /user/following"],
|
||
|
|
listFollowersForAuthenticatedUser: ["GET /user/followers"],
|
||
|
|
listFollowersForUser: ["GET /users/{username}/followers"],
|
||
|
|
listFollowingForAuthenticatedUser: ["GET /user/following", {}, {
|
||
|
|
renamed: ["users", "listFollowedByAuthenticated"]
|
||
|
|
}],
|
||
|
|
listFollowingForUser: ["GET /users/{username}/following"],
|
||
|
|
listGpgKeys: ["GET /user/gpg_keys", {}, {
|
||
|
|
renamed: ["users", "listGpgKeysForAuthenticated"]
|
||
|
|
}],
|
||
|
|
listGpgKeysForAuthenticated: ["GET /user/gpg_keys"],
|
||
|
|
listGpgKeysForUser: ["GET /users/{username}/gpg_keys"],
|
||
|
|
listPublicEmails: ["GET /user/public_emails", {}, {
|
||
|
|
renamed: ["users", "listPublicEmailsForAuthenticatedUser"]
|
||
|
|
}],
|
||
|
|
listPublicEmailsForAuthenticated: ["GET /user/public_emails"],
|
||
|
|
listPublicKeys: ["GET /user/keys", {}, {
|
||
|
|
renamed: ["users", "listPublicSshKeysForAuthenticated"]
|
||
|
|
}],
|
||
|
|
listPublicKeysForUser: ["GET /users/{username}/keys"],
|
||
|
|
listPublicSshKeysForAuthenticated: ["GET /user/keys"],
|
||
|
|
setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"],
|
||
|
|
togglePrimaryEmailVisibility: ["PATCH /user/email/visibility", {}, {
|
||
|
|
renamed: ["users", "setPrimaryEmailVisibilityForAuthenticated"]
|
||
|
|
}],
|
||
|
|
unblock: ["DELETE /user/blocks/{username}"],
|
||
|
|
unfollow: ["DELETE /user/following/{username}"],
|
||
|
|
updateAuthenticated: ["PATCH /user"]
|
||
|
|
}
|
||
|
|
};
|
||
|
|
|
||
|
|
const VERSION = "3.17.0";
|
||
|
|
|
||
|
|
function endpointsToMethods(octokit, endpointsMap) {
|
||
|
|
const newMethods = {};
|
||
|
|
|
||
|
|
for (const [scope, endpoints] of Object.entries(endpointsMap)) {
|
||
|
|
for (const [methodName, endpoint] of Object.entries(endpoints)) {
|
||
|
|
const [route, defaults, decorations] = endpoint;
|
||
|
|
const [method, url] = route.split(/ /);
|
||
|
|
const endpointDefaults = Object.assign({
|
||
|
|
method,
|
||
|
|
url
|
||
|
|
}, defaults);
|
||
|
|
|
||
|
|
if (!newMethods[scope]) {
|
||
|
|
newMethods[scope] = {};
|
||
|
|
}
|
||
|
|
|
||
|
|
const scopeMethods = newMethods[scope];
|
||
|
|
|
||
|
|
if (decorations) {
|
||
|
|
scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);
|
||
|
|
continue;
|
||
|
|
}
|
||
|
|
|
||
|
|
scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
return newMethods;
|
||
|
|
}
|
||
|
|
|
||
|
|
function decorate(octokit, scope, methodName, defaults, decorations) {
|
||
|
|
const requestWithDefaults = octokit.request.defaults(defaults);
|
||
|
|
|
||
|
|
function withDecorations(...args) {
|
||
|
|
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
|
||
|
|
let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`
|
||
|
|
|
||
|
|
if (decorations.mapToData) {
|
||
|
|
options = Object.assign({}, options, {
|
||
|
|
data: options[decorations.mapToData],
|
||
|
|
[decorations.mapToData]: undefined
|
||
|
|
});
|
||
|
|
return requestWithDefaults(options);
|
||
|
|
} // NOTE: there are currently no deprecations. But we keep the code
|
||
|
|
// below for future reference
|
||
|
|
|
||
|
|
|
||
|
|
if (decorations.renamed) {
|
||
|
|
const [newScope, newMethodName] = decorations.renamed;
|
||
|
|
octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);
|
||
|
|
}
|
||
|
|
|
||
|
|
if (decorations.deprecated) {
|
||
|
|
octokit.log.warn(decorations.deprecated);
|
||
|
|
}
|
||
|
|
|
||
|
|
if (decorations.renamedParameters) {
|
||
|
|
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
|
||
|
|
const options = requestWithDefaults.endpoint.merge(...args);
|
||
|
|
|
||
|
|
for (const [name, alias] of Object.entries(decorations.renamedParameters)) {
|
||
|
|
// There is currently no deprecated parameter that is optional,
|
||
|
|
// so we never hit the else branch below at this point.
|
||
|
|
|
||
|
|
/* istanbul ignore else */
|
||
|
|
if (name in options) {
|
||
|
|
octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`);
|
||
|
|
|
||
|
|
if (!(alias in options)) {
|
||
|
|
options[alias] = options[name];
|
||
|
|
}
|
||
|
|
|
||
|
|
delete options[name];
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
return requestWithDefaults(options);
|
||
|
|
} // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
|
||
|
|
|
||
|
|
|
||
|
|
return requestWithDefaults(...args);
|
||
|
|
}
|
||
|
|
|
||
|
|
return Object.assign(withDecorations, requestWithDefaults);
|
||
|
|
}
|
||
|
|
|
||
|
|
/**
|
||
|
|
* This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary
|
||
|
|
* goal is to rebuild @octokit/rest on top of @octokit/core. Once that is
|
||
|
|
* done, we will remove the registerEndpoints methods and return the methods
|
||
|
|
* directly as with the other plugins. At that point we will also remove the
|
||
|
|
* legacy workarounds and deprecations.
|
||
|
|
*
|
||
|
|
* See the plan at
|
||
|
|
* https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1
|
||
|
|
*/
|
||
|
|
|
||
|
|
function restEndpointMethods(octokit) {
|
||
|
|
return endpointsToMethods(octokit, Endpoints);
|
||
|
|
}
|
||
|
|
restEndpointMethods.VERSION = VERSION;
|
||
|
|
|
||
|
|
exports.restEndpointMethods = restEndpointMethods;
|
||
|
|
//# sourceMappingURL=index.js.map
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 854:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.is404 = void 0;
|
||
|
|
const node_fetch_1 = __importDefault(__webpack_require__(454));
|
||
|
|
function is404(url) {
|
||
|
|
return node_fetch_1.default(url).then(({ status }) => status === 404);
|
||
|
|
}
|
||
|
|
exports.is404 = is404;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 862:
|
||
|
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
function getUserAgent() {
|
||
|
|
if (typeof navigator === "object" && "userAgent" in navigator) {
|
||
|
|
return navigator.userAgent;
|
||
|
|
}
|
||
|
|
|
||
|
|
if (typeof process === "object" && "version" in process) {
|
||
|
|
return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;
|
||
|
|
}
|
||
|
|
|
||
|
|
return "<environment undetectable>";
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.getUserAgent = getUserAgent;
|
||
|
|
//# sourceMappingURL=index.js.map
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 863:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.action = exports.getActionParams = void 0;
|
||
|
|
const inputHelper_1 = __webpack_require__(649);
|
||
|
|
const createOctokit_1 = __webpack_require__(906);
|
||
|
|
exports.getActionParams = inputHelper_1.getActionParamsFactory({
|
||
|
|
"inputNameSubset": [
|
||
|
|
"owner",
|
||
|
|
"repo",
|
||
|
|
"event_type",
|
||
|
|
"client_payload_json",
|
||
|
|
"github_token"
|
||
|
|
]
|
||
|
|
}).getActionParams;
|
||
|
|
function action(_actionName, params, core) {
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
const { owner, repo, event_type, client_payload_json, github_token } = params;
|
||
|
|
core.debug(JSON.stringify({ _actionName, params }));
|
||
|
|
const octokit = createOctokit_1.createOctokit({ github_token });
|
||
|
|
yield octokit.repos.createDispatchEvent(Object.assign({ owner,
|
||
|
|
repo,
|
||
|
|
event_type }, (!!client_payload_json ?
|
||
|
|
{ "client_payload": JSON.parse(client_payload_json) } :
|
||
|
|
{})));
|
||
|
|
});
|
||
|
|
}
|
||
|
|
exports.action = action;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 866:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var shebangRegex = __webpack_require__(816);
|
||
|
|
|
||
|
|
module.exports = function (str) {
|
||
|
|
var match = str.match(shebangRegex);
|
||
|
|
|
||
|
|
if (!match) {
|
||
|
|
return null;
|
||
|
|
}
|
||
|
|
|
||
|
|
var arr = match[0].replace(/#! ?/, '').split(' ');
|
||
|
|
var bin = arr[0].split('/').pop();
|
||
|
|
var arg = arr[1];
|
||
|
|
|
||
|
|
return (bin === 'env' ?
|
||
|
|
arg :
|
||
|
|
bin + (arg ? ' ' + arg : '')
|
||
|
|
);
|
||
|
|
};
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 881:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
const isWin = process.platform === 'win32';
|
||
|
|
|
||
|
|
function notFoundError(original, syscall) {
|
||
|
|
return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), {
|
||
|
|
code: 'ENOENT',
|
||
|
|
errno: 'ENOENT',
|
||
|
|
syscall: `${syscall} ${original.command}`,
|
||
|
|
path: original.command,
|
||
|
|
spawnargs: original.args,
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
function hookChildProcess(cp, parsed) {
|
||
|
|
if (!isWin) {
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
const originalEmit = cp.emit;
|
||
|
|
|
||
|
|
cp.emit = function (name, arg1) {
|
||
|
|
// If emitting "exit" event and exit code is 1, we need to check if
|
||
|
|
// the command exists and emit an "error" instead
|
||
|
|
// See https://github.com/IndigoUnited/node-cross-spawn/issues/16
|
||
|
|
if (name === 'exit') {
|
||
|
|
const err = verifyENOENT(arg1, parsed, 'spawn');
|
||
|
|
|
||
|
|
if (err) {
|
||
|
|
return originalEmit.call(cp, 'error', err);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params
|
||
|
|
};
|
||
|
|
}
|
||
|
|
|
||
|
|
function verifyENOENT(status, parsed) {
|
||
|
|
if (isWin && status === 1 && !parsed.file) {
|
||
|
|
return notFoundError(parsed.original, 'spawn');
|
||
|
|
}
|
||
|
|
|
||
|
|
return null;
|
||
|
|
}
|
||
|
|
|
||
|
|
function verifyENOENTSync(status, parsed) {
|
||
|
|
if (isWin && status === 1 && !parsed.file) {
|
||
|
|
return notFoundError(parsed.original, 'spawnSync');
|
||
|
|
}
|
||
|
|
|
||
|
|
return null;
|
||
|
|
}
|
||
|
|
|
||
|
|
module.exports = {
|
||
|
|
hookChildProcess,
|
||
|
|
verifyENOENT,
|
||
|
|
verifyENOENTSync,
|
||
|
|
notFoundError,
|
||
|
|
};
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 889:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
var core = __webpack_require__(448);
|
||
|
|
var pluginRequestLog = __webpack_require__(916);
|
||
|
|
var pluginPaginateRest = __webpack_require__(299);
|
||
|
|
var pluginRestEndpointMethods = __webpack_require__(842);
|
||
|
|
|
||
|
|
const VERSION = "17.11.2";
|
||
|
|
|
||
|
|
const Octokit = core.Octokit.plugin(pluginRequestLog.requestLog, pluginRestEndpointMethods.restEndpointMethods, pluginPaginateRest.paginateRest).defaults({
|
||
|
|
userAgent: `octokit-rest.js/${VERSION}`
|
||
|
|
});
|
||
|
|
|
||
|
|
exports.Octokit = Octokit;
|
||
|
|
//# sourceMappingURL=index.js.map
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 898:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
var request = __webpack_require__(753);
|
||
|
|
var universalUserAgent = __webpack_require__(862);
|
||
|
|
|
||
|
|
const VERSION = "4.6.1";
|
||
|
|
|
||
|
|
class GraphqlError extends Error {
|
||
|
|
constructor(request, response) {
|
||
|
|
const message = response.data.errors[0].message;
|
||
|
|
super(message);
|
||
|
|
Object.assign(this, response.data);
|
||
|
|
Object.assign(this, {
|
||
|
|
headers: response.headers
|
||
|
|
});
|
||
|
|
this.name = "GraphqlError";
|
||
|
|
this.request = request; // Maintains proper stack trace (only available on V8)
|
||
|
|
|
||
|
|
/* istanbul ignore next */
|
||
|
|
|
||
|
|
if (Error.captureStackTrace) {
|
||
|
|
Error.captureStackTrace(this, this.constructor);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
}
|
||
|
|
|
||
|
|
const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"];
|
||
|
|
const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"];
|
||
|
|
const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/;
|
||
|
|
function graphql(request, query, options) {
|
||
|
|
if (options) {
|
||
|
|
if (typeof query === "string" && "query" in options) {
|
||
|
|
return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`));
|
||
|
|
}
|
||
|
|
|
||
|
|
for (const key in options) {
|
||
|
|
if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;
|
||
|
|
return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`));
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
const parsedOptions = typeof query === "string" ? Object.assign({
|
||
|
|
query
|
||
|
|
}, options) : query;
|
||
|
|
const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {
|
||
|
|
if (NON_VARIABLE_OPTIONS.includes(key)) {
|
||
|
|
result[key] = parsedOptions[key];
|
||
|
|
return result;
|
||
|
|
}
|
||
|
|
|
||
|
|
if (!result.variables) {
|
||
|
|
result.variables = {};
|
||
|
|
}
|
||
|
|
|
||
|
|
result.variables[key] = parsedOptions[key];
|
||
|
|
return result;
|
||
|
|
}, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix
|
||
|
|
// https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451
|
||
|
|
|
||
|
|
const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;
|
||
|
|
|
||
|
|
if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {
|
||
|
|
requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql");
|
||
|
|
}
|
||
|
|
|
||
|
|
return request(requestOptions).then(response => {
|
||
|
|
if (response.data.errors) {
|
||
|
|
const headers = {};
|
||
|
|
|
||
|
|
for (const key of Object.keys(response.headers)) {
|
||
|
|
headers[key] = response.headers[key];
|
||
|
|
}
|
||
|
|
|
||
|
|
throw new GraphqlError(requestOptions, {
|
||
|
|
headers,
|
||
|
|
data: response.data
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
return response.data.data;
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
function withDefaults(request$1, newDefaults) {
|
||
|
|
const newRequest = request$1.defaults(newDefaults);
|
||
|
|
|
||
|
|
const newApi = (query, options) => {
|
||
|
|
return graphql(newRequest, query, options);
|
||
|
|
};
|
||
|
|
|
||
|
|
return Object.assign(newApi, {
|
||
|
|
defaults: withDefaults.bind(null, newRequest),
|
||
|
|
endpoint: request.request.endpoint
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
const graphql$1 = withDefaults(request.request, {
|
||
|
|
headers: {
|
||
|
|
"user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`
|
||
|
|
},
|
||
|
|
method: "POST",
|
||
|
|
url: "/graphql"
|
||
|
|
});
|
||
|
|
function withCustomRequest(customRequest) {
|
||
|
|
return withDefaults(customRequest, {
|
||
|
|
method: "POST",
|
||
|
|
url: "/graphql"
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
exports.graphql = graphql$1;
|
||
|
|
exports.withCustomRequest = withCustomRequest;
|
||
|
|
//# sourceMappingURL=index.js.map
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 906:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.createOctokit = void 0;
|
||
|
|
const rest_1 = __webpack_require__(889);
|
||
|
|
function createOctokit(params) {
|
||
|
|
const { github_token } = params;
|
||
|
|
return new rest_1.Octokit(Object.assign({}, (github_token !== "" ? { "auth": github_token } : {})));
|
||
|
|
}
|
||
|
|
exports.createOctokit = createOctokit;
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 916:
|
||
|
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||
|
|
|
||
|
|
const VERSION = "1.0.3";
|
||
|
|
|
||
|
|
/**
|
||
|
|
* @param octokit Octokit instance
|
||
|
|
* @param options Options passed to Octokit constructor
|
||
|
|
*/
|
||
|
|
|
||
|
|
function requestLog(octokit) {
|
||
|
|
octokit.hook.wrap("request", (request, options) => {
|
||
|
|
octokit.log.debug("request", options);
|
||
|
|
const start = Date.now();
|
||
|
|
const requestOptions = octokit.request.endpoint.parse(options);
|
||
|
|
const path = requestOptions.url.replace(options.baseUrl, "");
|
||
|
|
return request(options).then(response => {
|
||
|
|
octokit.log.info(`${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`);
|
||
|
|
return response;
|
||
|
|
}).catch(error => {
|
||
|
|
octokit.log.info(`${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`);
|
||
|
|
throw error;
|
||
|
|
});
|
||
|
|
});
|
||
|
|
}
|
||
|
|
requestLog.VERSION = VERSION;
|
||
|
|
|
||
|
|
exports.requestLog = requestLog;
|
||
|
|
//# sourceMappingURL=index.js.map
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 948:
|
||
|
|
/***/ (function(module) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
|
||
|
|
/**
|
||
|
|
* Tries to execute a function and discards any error that occurs.
|
||
|
|
* @param {Function} fn - Function that might or might not throw an error.
|
||
|
|
* @returns {?*} Return-value of the function when no error occurred.
|
||
|
|
*/
|
||
|
|
module.exports = function(fn) {
|
||
|
|
|
||
|
|
try { return fn() } catch (e) {}
|
||
|
|
|
||
|
|
}
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 949:
|
||
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
|
});
|
||
|
|
};
|
||
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||
|
|
};
|
||
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
|
exports.action = exports.setOutput = exports.getActionParams = void 0;
|
||
|
|
const node_fetch_1 = __importDefault(__webpack_require__(454));
|
||
|
|
const urlJoin = __webpack_require__(683);
|
||
|
|
const outputHelper_1 = __webpack_require__(762);
|
||
|
|
const NpmModuleVersion_1 = __webpack_require__(395);
|
||
|
|
const inputHelper_1 = __webpack_require__(649);
|
||
|
|
const createOctokit_1 = __webpack_require__(906);
|
||
|
|
const getLatestSemVersionedTag_1 = __webpack_require__(472);
|
||
|
|
exports.getActionParams = inputHelper_1.getActionParamsFactory({
|
||
|
|
"inputNameSubset": [
|
||
|
|
"owner",
|
||
|
|
"repo",
|
||
|
|
"branch",
|
||
|
|
"github_token"
|
||
|
|
]
|
||
|
|
}).getActionParams;
|
||
|
|
exports.setOutput = outputHelper_1.setOutputFactory().setOutput;
|
||
|
|
function action(_actionName, params, core) {
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
core.debug(JSON.stringify(params));
|
||
|
|
const { owner, repo, branch, github_token } = params;
|
||
|
|
const to_version = yield getPackageJsonVersion({ owner, repo, branch });
|
||
|
|
if (to_version === undefined) {
|
||
|
|
throw new Error("No version in package.json on ${owner}/${repo}#${branch} (or repo is private)");
|
||
|
|
}
|
||
|
|
core.debug(`Version on ${owner}/${repo}#${branch} is ${NpmModuleVersion_1.NpmModuleVersion.stringify(to_version)}`);
|
||
|
|
const octokit = createOctokit_1.createOctokit({ github_token });
|
||
|
|
const { getLatestSemVersionedTag } = getLatestSemVersionedTag_1.getLatestSemVersionedTagFactory({ octokit });
|
||
|
|
const { version: from_version } = yield getLatestSemVersionedTag({ owner, repo })
|
||
|
|
.then(wrap => wrap === undefined ? { "version": NpmModuleVersion_1.NpmModuleVersion.parse("0.0.0") } : wrap);
|
||
|
|
core.debug(`Last version was ${NpmModuleVersion_1.NpmModuleVersion.stringify(from_version)}`);
|
||
|
|
const is_upgraded_version = NpmModuleVersion_1.NpmModuleVersion.compare(to_version, from_version) === 1 ? "true" : "false";
|
||
|
|
core.debug(`Is version upgraded: ${is_upgraded_version}`);
|
||
|
|
return {
|
||
|
|
"to_version": NpmModuleVersion_1.NpmModuleVersion.stringify(to_version),
|
||
|
|
"from_version": NpmModuleVersion_1.NpmModuleVersion.stringify(from_version),
|
||
|
|
is_upgraded_version
|
||
|
|
};
|
||
|
|
});
|
||
|
|
}
|
||
|
|
exports.action = action;
|
||
|
|
//TODO: Find a way to make it work with private repo
|
||
|
|
function getPackageJsonVersion(params) {
|
||
|
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
|
const { owner, repo, branch } = params;
|
||
|
|
const version = yield node_fetch_1.default(urlJoin(`https://raw.github.com`, owner, repo, branch, "package.json"))
|
||
|
|
.then(res => res.text())
|
||
|
|
.then(text => JSON.parse(text))
|
||
|
|
.then(({ version }) => version)
|
||
|
|
.catch(() => undefined);
|
||
|
|
if (version === undefined) {
|
||
|
|
return undefined;
|
||
|
|
}
|
||
|
|
return NpmModuleVersion_1.NpmModuleVersion.parse(version);
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 955:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
const path = __webpack_require__(622);
|
||
|
|
const childProcess = __webpack_require__(129);
|
||
|
|
const crossSpawn = __webpack_require__(20);
|
||
|
|
const stripEof = __webpack_require__(768);
|
||
|
|
const npmRunPath = __webpack_require__(621);
|
||
|
|
const isStream = __webpack_require__(323);
|
||
|
|
const _getStream = __webpack_require__(145);
|
||
|
|
const pFinally = __webpack_require__(697);
|
||
|
|
const onExit = __webpack_require__(260);
|
||
|
|
const errname = __webpack_require__(427);
|
||
|
|
const stdio = __webpack_require__(168);
|
||
|
|
|
||
|
|
const TEN_MEGABYTES = 1000 * 1000 * 10;
|
||
|
|
|
||
|
|
function handleArgs(cmd, args, opts) {
|
||
|
|
let parsed;
|
||
|
|
|
||
|
|
opts = Object.assign({
|
||
|
|
extendEnv: true,
|
||
|
|
env: {}
|
||
|
|
}, opts);
|
||
|
|
|
||
|
|
if (opts.extendEnv) {
|
||
|
|
opts.env = Object.assign({}, process.env, opts.env);
|
||
|
|
}
|
||
|
|
|
||
|
|
if (opts.__winShell === true) {
|
||
|
|
delete opts.__winShell;
|
||
|
|
parsed = {
|
||
|
|
command: cmd,
|
||
|
|
args,
|
||
|
|
options: opts,
|
||
|
|
file: cmd,
|
||
|
|
original: {
|
||
|
|
cmd,
|
||
|
|
args
|
||
|
|
}
|
||
|
|
};
|
||
|
|
} else {
|
||
|
|
parsed = crossSpawn._parse(cmd, args, opts);
|
||
|
|
}
|
||
|
|
|
||
|
|
opts = Object.assign({
|
||
|
|
maxBuffer: TEN_MEGABYTES,
|
||
|
|
buffer: true,
|
||
|
|
stripEof: true,
|
||
|
|
preferLocal: true,
|
||
|
|
localDir: parsed.options.cwd || process.cwd(),
|
||
|
|
encoding: 'utf8',
|
||
|
|
reject: true,
|
||
|
|
cleanup: true
|
||
|
|
}, parsed.options);
|
||
|
|
|
||
|
|
opts.stdio = stdio(opts);
|
||
|
|
|
||
|
|
if (opts.preferLocal) {
|
||
|
|
opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir}));
|
||
|
|
}
|
||
|
|
|
||
|
|
if (opts.detached) {
|
||
|
|
// #115
|
||
|
|
opts.cleanup = false;
|
||
|
|
}
|
||
|
|
|
||
|
|
if (process.platform === 'win32' && path.basename(parsed.command) === 'cmd.exe') {
|
||
|
|
// #116
|
||
|
|
parsed.args.unshift('/q');
|
||
|
|
}
|
||
|
|
|
||
|
|
return {
|
||
|
|
cmd: parsed.command,
|
||
|
|
args: parsed.args,
|
||
|
|
opts,
|
||
|
|
parsed
|
||
|
|
};
|
||
|
|
}
|
||
|
|
|
||
|
|
function handleInput(spawned, input) {
|
||
|
|
if (input === null || input === undefined) {
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
if (isStream(input)) {
|
||
|
|
input.pipe(spawned.stdin);
|
||
|
|
} else {
|
||
|
|
spawned.stdin.end(input);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
function handleOutput(opts, val) {
|
||
|
|
if (val && opts.stripEof) {
|
||
|
|
val = stripEof(val);
|
||
|
|
}
|
||
|
|
|
||
|
|
return val;
|
||
|
|
}
|
||
|
|
|
||
|
|
function handleShell(fn, cmd, opts) {
|
||
|
|
let file = '/bin/sh';
|
||
|
|
let args = ['-c', cmd];
|
||
|
|
|
||
|
|
opts = Object.assign({}, opts);
|
||
|
|
|
||
|
|
if (process.platform === 'win32') {
|
||
|
|
opts.__winShell = true;
|
||
|
|
file = process.env.comspec || 'cmd.exe';
|
||
|
|
args = ['/s', '/c', `"${cmd}"`];
|
||
|
|
opts.windowsVerbatimArguments = true;
|
||
|
|
}
|
||
|
|
|
||
|
|
if (opts.shell) {
|
||
|
|
file = opts.shell;
|
||
|
|
delete opts.shell;
|
||
|
|
}
|
||
|
|
|
||
|
|
return fn(file, args, opts);
|
||
|
|
}
|
||
|
|
|
||
|
|
function getStream(process, stream, {encoding, buffer, maxBuffer}) {
|
||
|
|
if (!process[stream]) {
|
||
|
|
return null;
|
||
|
|
}
|
||
|
|
|
||
|
|
let ret;
|
||
|
|
|
||
|
|
if (!buffer) {
|
||
|
|
// TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10
|
||
|
|
ret = new Promise((resolve, reject) => {
|
||
|
|
process[stream]
|
||
|
|
.once('end', resolve)
|
||
|
|
.once('error', reject);
|
||
|
|
});
|
||
|
|
} else if (encoding) {
|
||
|
|
ret = _getStream(process[stream], {
|
||
|
|
encoding,
|
||
|
|
maxBuffer
|
||
|
|
});
|
||
|
|
} else {
|
||
|
|
ret = _getStream.buffer(process[stream], {maxBuffer});
|
||
|
|
}
|
||
|
|
|
||
|
|
return ret.catch(err => {
|
||
|
|
err.stream = stream;
|
||
|
|
err.message = `${stream} ${err.message}`;
|
||
|
|
throw err;
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
function makeError(result, options) {
|
||
|
|
const {stdout, stderr} = result;
|
||
|
|
|
||
|
|
let err = result.error;
|
||
|
|
const {code, signal} = result;
|
||
|
|
|
||
|
|
const {parsed, joinedCmd} = options;
|
||
|
|
const timedOut = options.timedOut || false;
|
||
|
|
|
||
|
|
if (!err) {
|
||
|
|
let output = '';
|
||
|
|
|
||
|
|
if (Array.isArray(parsed.opts.stdio)) {
|
||
|
|
if (parsed.opts.stdio[2] !== 'inherit') {
|
||
|
|
output += output.length > 0 ? stderr : `\n${stderr}`;
|
||
|
|
}
|
||
|
|
|
||
|
|
if (parsed.opts.stdio[1] !== 'inherit') {
|
||
|
|
output += `\n${stdout}`;
|
||
|
|
}
|
||
|
|
} else if (parsed.opts.stdio !== 'inherit') {
|
||
|
|
output = `\n${stderr}${stdout}`;
|
||
|
|
}
|
||
|
|
|
||
|
|
err = new Error(`Command failed: ${joinedCmd}${output}`);
|
||
|
|
err.code = code < 0 ? errname(code) : code;
|
||
|
|
}
|
||
|
|
|
||
|
|
err.stdout = stdout;
|
||
|
|
err.stderr = stderr;
|
||
|
|
err.failed = true;
|
||
|
|
err.signal = signal || null;
|
||
|
|
err.cmd = joinedCmd;
|
||
|
|
err.timedOut = timedOut;
|
||
|
|
|
||
|
|
return err;
|
||
|
|
}
|
||
|
|
|
||
|
|
function joinCmd(cmd, args) {
|
||
|
|
let joinedCmd = cmd;
|
||
|
|
|
||
|
|
if (Array.isArray(args) && args.length > 0) {
|
||
|
|
joinedCmd += ' ' + args.join(' ');
|
||
|
|
}
|
||
|
|
|
||
|
|
return joinedCmd;
|
||
|
|
}
|
||
|
|
|
||
|
|
module.exports = (cmd, args, opts) => {
|
||
|
|
const parsed = handleArgs(cmd, args, opts);
|
||
|
|
const {encoding, buffer, maxBuffer} = parsed.opts;
|
||
|
|
const joinedCmd = joinCmd(cmd, args);
|
||
|
|
|
||
|
|
let spawned;
|
||
|
|
try {
|
||
|
|
spawned = childProcess.spawn(parsed.cmd, parsed.args, parsed.opts);
|
||
|
|
} catch (err) {
|
||
|
|
return Promise.reject(err);
|
||
|
|
}
|
||
|
|
|
||
|
|
let removeExitHandler;
|
||
|
|
if (parsed.opts.cleanup) {
|
||
|
|
removeExitHandler = onExit(() => {
|
||
|
|
spawned.kill();
|
||
|
|
});
|
||
|
|
}
|
||
|
|
|
||
|
|
let timeoutId = null;
|
||
|
|
let timedOut = false;
|
||
|
|
|
||
|
|
const cleanup = () => {
|
||
|
|
if (timeoutId) {
|
||
|
|
clearTimeout(timeoutId);
|
||
|
|
timeoutId = null;
|
||
|
|
}
|
||
|
|
|
||
|
|
if (removeExitHandler) {
|
||
|
|
removeExitHandler();
|
||
|
|
}
|
||
|
|
};
|
||
|
|
|
||
|
|
if (parsed.opts.timeout > 0) {
|
||
|
|
timeoutId = setTimeout(() => {
|
||
|
|
timeoutId = null;
|
||
|
|
timedOut = true;
|
||
|
|
spawned.kill(parsed.opts.killSignal);
|
||
|
|
}, parsed.opts.timeout);
|
||
|
|
}
|
||
|
|
|
||
|
|
const processDone = new Promise(resolve => {
|
||
|
|
spawned.on('exit', (code, signal) => {
|
||
|
|
cleanup();
|
||
|
|
resolve({code, signal});
|
||
|
|
});
|
||
|
|
|
||
|
|
spawned.on('error', err => {
|
||
|
|
cleanup();
|
||
|
|
resolve({error: err});
|
||
|
|
});
|
||
|
|
|
||
|
|
if (spawned.stdin) {
|
||
|
|
spawned.stdin.on('error', err => {
|
||
|
|
cleanup();
|
||
|
|
resolve({error: err});
|
||
|
|
});
|
||
|
|
}
|
||
|
|
});
|
||
|
|
|
||
|
|
function destroy() {
|
||
|
|
if (spawned.stdout) {
|
||
|
|
spawned.stdout.destroy();
|
||
|
|
}
|
||
|
|
|
||
|
|
if (spawned.stderr) {
|
||
|
|
spawned.stderr.destroy();
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
const handlePromise = () => pFinally(Promise.all([
|
||
|
|
processDone,
|
||
|
|
getStream(spawned, 'stdout', {encoding, buffer, maxBuffer}),
|
||
|
|
getStream(spawned, 'stderr', {encoding, buffer, maxBuffer})
|
||
|
|
]).then(arr => {
|
||
|
|
const result = arr[0];
|
||
|
|
result.stdout = arr[1];
|
||
|
|
result.stderr = arr[2];
|
||
|
|
|
||
|
|
if (result.error || result.code !== 0 || result.signal !== null) {
|
||
|
|
const err = makeError(result, {
|
||
|
|
joinedCmd,
|
||
|
|
parsed,
|
||
|
|
timedOut
|
||
|
|
});
|
||
|
|
|
||
|
|
// TODO: missing some timeout logic for killed
|
||
|
|
// https://github.com/nodejs/node/blob/master/lib/child_process.js#L203
|
||
|
|
// err.killed = spawned.killed || killed;
|
||
|
|
err.killed = err.killed || spawned.killed;
|
||
|
|
|
||
|
|
if (!parsed.opts.reject) {
|
||
|
|
return err;
|
||
|
|
}
|
||
|
|
|
||
|
|
throw err;
|
||
|
|
}
|
||
|
|
|
||
|
|
return {
|
||
|
|
stdout: handleOutput(parsed.opts, result.stdout),
|
||
|
|
stderr: handleOutput(parsed.opts, result.stderr),
|
||
|
|
code: 0,
|
||
|
|
failed: false,
|
||
|
|
killed: false,
|
||
|
|
signal: null,
|
||
|
|
cmd: joinedCmd,
|
||
|
|
timedOut: false
|
||
|
|
};
|
||
|
|
}), destroy);
|
||
|
|
|
||
|
|
crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed);
|
||
|
|
|
||
|
|
handleInput(spawned, parsed.opts.input);
|
||
|
|
|
||
|
|
spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected);
|
||
|
|
spawned.catch = onrejected => handlePromise().catch(onrejected);
|
||
|
|
|
||
|
|
return spawned;
|
||
|
|
};
|
||
|
|
|
||
|
|
// TODO: set `stderr: 'ignore'` when that option is implemented
|
||
|
|
module.exports.stdout = (...args) => module.exports(...args).then(x => x.stdout);
|
||
|
|
|
||
|
|
// TODO: set `stdout: 'ignore'` when that option is implemented
|
||
|
|
module.exports.stderr = (...args) => module.exports(...args).then(x => x.stderr);
|
||
|
|
|
||
|
|
module.exports.shell = (cmd, opts) => handleShell(module.exports, cmd, opts);
|
||
|
|
|
||
|
|
module.exports.sync = (cmd, args, opts) => {
|
||
|
|
const parsed = handleArgs(cmd, args, opts);
|
||
|
|
const joinedCmd = joinCmd(cmd, args);
|
||
|
|
|
||
|
|
if (isStream(parsed.opts.input)) {
|
||
|
|
throw new TypeError('The `input` option cannot be a stream in sync mode');
|
||
|
|
}
|
||
|
|
|
||
|
|
const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts);
|
||
|
|
result.code = result.status;
|
||
|
|
|
||
|
|
if (result.error || result.status !== 0 || result.signal !== null) {
|
||
|
|
const err = makeError(result, {
|
||
|
|
joinedCmd,
|
||
|
|
parsed
|
||
|
|
});
|
||
|
|
|
||
|
|
if (!parsed.opts.reject) {
|
||
|
|
return err;
|
||
|
|
}
|
||
|
|
|
||
|
|
throw err;
|
||
|
|
}
|
||
|
|
|
||
|
|
return {
|
||
|
|
stdout: handleOutput(parsed.opts, result.stdout),
|
||
|
|
stderr: handleOutput(parsed.opts, result.stderr),
|
||
|
|
code: 0,
|
||
|
|
failed: false,
|
||
|
|
signal: null,
|
||
|
|
cmd: joinedCmd,
|
||
|
|
timedOut: false
|
||
|
|
};
|
||
|
|
};
|
||
|
|
|
||
|
|
module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd, opts);
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 966:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
"use strict";
|
||
|
|
|
||
|
|
const {PassThrough} = __webpack_require__(413);
|
||
|
|
|
||
|
|
module.exports = options => {
|
||
|
|
options = Object.assign({}, options);
|
||
|
|
|
||
|
|
const {array} = options;
|
||
|
|
let {encoding} = options;
|
||
|
|
const buffer = encoding === 'buffer';
|
||
|
|
let objectMode = false;
|
||
|
|
|
||
|
|
if (array) {
|
||
|
|
objectMode = !(encoding || buffer);
|
||
|
|
} else {
|
||
|
|
encoding = encoding || 'utf8';
|
||
|
|
}
|
||
|
|
|
||
|
|
if (buffer) {
|
||
|
|
encoding = null;
|
||
|
|
}
|
||
|
|
|
||
|
|
let len = 0;
|
||
|
|
const ret = [];
|
||
|
|
const stream = new PassThrough({objectMode});
|
||
|
|
|
||
|
|
if (encoding) {
|
||
|
|
stream.setEncoding(encoding);
|
||
|
|
}
|
||
|
|
|
||
|
|
stream.on('data', chunk => {
|
||
|
|
ret.push(chunk);
|
||
|
|
|
||
|
|
if (objectMode) {
|
||
|
|
len = ret.length;
|
||
|
|
} else {
|
||
|
|
len += chunk.length;
|
||
|
|
}
|
||
|
|
});
|
||
|
|
|
||
|
|
stream.getBufferedValue = () => {
|
||
|
|
if (array) {
|
||
|
|
return ret;
|
||
|
|
}
|
||
|
|
|
||
|
|
return buffer ? Buffer.concat(ret, len) : ret.join('');
|
||
|
|
};
|
||
|
|
|
||
|
|
stream.getBufferedLength = () => len;
|
||
|
|
|
||
|
|
return stream;
|
||
|
|
};
|
||
|
|
|
||
|
|
|
||
|
|
/***/ }),
|
||
|
|
|
||
|
|
/***/ 969:
|
||
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
|
||
|
|
var wrappy = __webpack_require__(11)
|
||
|
|
module.exports = wrappy(once)
|
||
|
|
module.exports.strict = wrappy(onceStrict)
|
||
|
|
|
||
|
|
once.proto = once(function () {
|
||
|
|
Object.defineProperty(Function.prototype, 'once', {
|
||
|
|
value: function () {
|
||
|
|
return once(this)
|
||
|
|
},
|
||
|
|
configurable: true
|
||
|
|
})
|
||
|
|
|
||
|
|
Object.defineProperty(Function.prototype, 'onceStrict', {
|
||
|
|
value: function () {
|
||
|
|
return onceStrict(this)
|
||
|
|
},
|
||
|
|
configurable: true
|
||
|
|
})
|
||
|
|
})
|
||
|
|
|
||
|
|
function once (fn) {
|
||
|
|
var f = function () {
|
||
|
|
if (f.called) return f.value
|
||
|
|
f.called = true
|
||
|
|
return f.value = fn.apply(this, arguments)
|
||
|
|
}
|
||
|
|
f.called = false
|
||
|
|
return f
|
||
|
|
}
|
||
|
|
|
||
|
|
function onceStrict (fn) {
|
||
|
|
var f = function () {
|
||
|
|
if (f.called)
|
||
|
|
throw new Error(f.onceError)
|
||
|
|
f.called = true
|
||
|
|
return f.value = fn.apply(this, arguments)
|
||
|
|
}
|
||
|
|
var name = fn.name || 'Function wrapped with `once`'
|
||
|
|
f.onceError = name + " shouldn't be called more than once"
|
||
|
|
f.called = false
|
||
|
|
return f
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
/***/ })
|
||
|
|
|
||
|
|
/******/ });
|