From cee852004873a7660376c0596a07f5f7cde95c20 Mon Sep 17 00:00:00 2001 From: BetaHuhn Date: Sat, 29 May 2021 13:31:24 +0200 Subject: [PATCH] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Throttle=20requests=20when?= =?UTF-8?q?=20hitting=20rate=20limit,=20fixes=20#49?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dist/index.js | 41340 +++++++++++++------------------------------- package-lock.json | 12 +- package.json | 1 + src/git.js | 29 +- src/index.js | 4 +- 5 files changed, 11958 insertions(+), 29428 deletions(-) diff --git a/dist/index.js b/dist/index.js index a389fd85..3c24c03d 100644 --- a/dist/index.js +++ b/dist/index.js @@ -1,27 +1,32 @@ /******/ (() => { // webpackBootstrap /******/ var __webpack_modules__ = ({ -/***/ 913: -/***/ ((module) => { - -"use strict"; -module.exports = JSON.parse('{"_args":[["@octokit/rest@16.43.2","/home/betahuhn/programming/repo-file-sync-action"]],"_from":"@octokit/rest@16.43.2","_id":"@octokit/rest@16.43.2","_inBundle":false,"_integrity":"sha512-ngDBevLbBTFfrHZeiS7SAMAZ6ssuVmXuya+F/7RaVvlysgGa1JKJkKWY+jV6TCJYcW0OALfJ7nTIGXcBXzycfQ==","_location":"/@actions/github/@octokit/rest","_phantomChildren":{},"_requested":{"type":"version","registry":true,"raw":"@octokit/rest@16.43.2","name":"@octokit/rest","escapedName":"@octokit%2frest","scope":"@octokit","rawSpec":"16.43.2","saveSpec":null,"fetchSpec":"16.43.2"},"_requiredBy":["/@actions/github"],"_resolved":"https://registry.npmjs.org/@octokit/rest/-/rest-16.43.2.tgz","_spec":"16.43.2","_where":"/home/betahuhn/programming/repo-file-sync-action","author":{"name":"Gregor Martynus","url":"https://github.com/gr2m"},"bugs":{"url":"https://github.com/octokit/rest.js/issues"},"bundlesize":[{"path":"./dist/octokit-rest.min.js.gz","maxSize":"33 kB"}],"contributors":[{"name":"Mike de Boer","email":"info@mikedeboer.nl"},{"name":"Fabian Jakobs","email":"fabian@c9.io"},{"name":"Joe Gallo","email":"joe@brassafrax.com"},{"name":"Gregor Martynus","url":"https://github.com/gr2m"}],"dependencies":{"@octokit/auth-token":"^2.4.0","@octokit/plugin-paginate-rest":"^1.1.1","@octokit/plugin-request-log":"^1.0.0","@octokit/plugin-rest-endpoint-methods":"2.4.0","@octokit/request":"^5.2.0","@octokit/request-error":"^1.0.2","atob-lite":"^2.0.0","before-after-hook":"^2.0.0","btoa-lite":"^1.0.0","deprecation":"^2.0.0","lodash.get":"^4.4.2","lodash.set":"^4.3.2","lodash.uniq":"^4.5.0","octokit-pagination-methods":"^1.1.0","once":"^1.4.0","universal-user-agent":"^4.0.0"},"description":"GitHub REST API client for Node.js","devDependencies":{"@gimenete/type-writer":"^0.1.3","@octokit/auth":"^1.1.1","@octokit/fixtures-server":"^5.0.6","@octokit/graphql":"^4.2.0","@types/node":"^13.1.0","bundlesize":"^0.18.0","chai":"^4.1.2","compression-webpack-plugin":"^3.1.0","cypress":"^4.0.0","glob":"^7.1.2","http-proxy-agent":"^4.0.0","lodash.camelcase":"^4.3.0","lodash.merge":"^4.6.1","lodash.upperfirst":"^4.3.1","lolex":"^6.0.0","mkdirp":"^1.0.0","mocha":"^7.0.1","mustache":"^4.0.0","nock":"^11.3.3","npm-run-all":"^4.1.2","nyc":"^15.0.0","prettier":"^1.14.2","proxy":"^1.0.0","semantic-release":"^17.0.0","sinon":"^8.0.0","sinon-chai":"^3.0.0","sort-keys":"^4.0.0","string-to-arraybuffer":"^1.0.0","string-to-jsdoc-comment":"^1.0.0","typescript":"^3.3.1","webpack":"^4.0.0","webpack-bundle-analyzer":"^3.0.0","webpack-cli":"^3.0.0"},"files":["index.js","index.d.ts","lib","plugins"],"homepage":"https://github.com/octokit/rest.js#readme","keywords":["octokit","github","rest","api-client"],"license":"MIT","name":"@octokit/rest","nyc":{"ignore":["test"]},"publishConfig":{"access":"public"},"release":{"publish":["@semantic-release/npm",{"path":"@semantic-release/github","assets":["dist/*","!dist/*.map.gz"]}]},"repository":{"type":"git","url":"git+https://github.com/octokit/rest.js.git"},"scripts":{"build":"npm-run-all build:*","build:browser":"npm-run-all build:browser:*","build:browser:development":"webpack --mode development --entry . --output-library=Octokit --output=./dist/octokit-rest.js --profile --json > dist/bundle-stats.json","build:browser:production":"webpack --mode production --entry . --plugin=compression-webpack-plugin --output-library=Octokit --output-path=./dist --output-filename=octokit-rest.min.js --devtool source-map","build:ts":"npm run -s update-endpoints:typescript","coverage":"nyc report --reporter=html && open coverage/index.html","generate-bundle-report":"webpack-bundle-analyzer dist/bundle-stats.json --mode=static --no-open --report dist/bundle-report.html","lint":"prettier --check \'{lib,plugins,scripts,test}/**/*.{js,json,ts}\' \'docs/*.{js,json}\' \'docs/src/**/*\' index.js README.md package.json","lint:fix":"prettier --write \'{lib,plugins,scripts,test}/**/*.{js,json,ts}\' \'docs/*.{js,json}\' \'docs/src/**/*\' index.js README.md package.json","postvalidate:ts":"tsc --noEmit --target es6 test/typescript-validate.ts","prebuild:browser":"mkdirp dist/","pretest":"npm run -s lint","prevalidate:ts":"npm run -s build:ts","start-fixtures-server":"octokit-fixtures-server","test":"nyc mocha test/mocha-node-setup.js \\"test/*/**/*-test.js\\"","test:browser":"cypress run --browser chrome","update-endpoints":"npm-run-all update-endpoints:*","update-endpoints:fetch-json":"node scripts/update-endpoints/fetch-json","update-endpoints:typescript":"node scripts/update-endpoints/typescript","validate:ts":"tsc --target es6 --noImplicitAny index.d.ts"},"types":"index.d.ts","version":"16.43.2"}'); - -/***/ }), - /***/ 7351: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.issue = exports.issueCommand = void 0; const os = __importStar(__nccwpck_require__(2087)); const utils_1 = __nccwpck_require__(5278); /** @@ -100,6 +105,25 @@ function escapeProperty(s) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -109,14 +133,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; const command_1 = __nccwpck_require__(7351); const file_command_1 = __nccwpck_require__(717); const utils_1 = __nccwpck_require__(5278); @@ -183,7 +201,9 @@ function addPath(inputPath) { } exports.addPath = addPath; /** - * Gets the value of an input. The value is also trimmed. + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. * * @param name name of the input to get * @param options optional. See InputOptions. @@ -194,9 +214,34 @@ function getInput(name, options) { if (options && options.required && !val) { throw new Error(`Input required and not supplied: ${name}`); } + if (options && options.trimWhitespace === false) { + return val; + } return val.trim(); } exports.getInput = getInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; /** * Sets the value of an output. * @@ -347,14 +392,27 @@ exports.getState = getState; "use strict"; // For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.issueCommand = void 0; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ const fs = __importStar(__nccwpck_require__(5747)); @@ -385,6 +443,7 @@ exports.issueCommand = issueCommand; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toCommandValue = void 0; /** * Sanitizes an input into a string so it can be passed into issueCommand safely * @param input input to sanitize into a string @@ -409,6 +468,7 @@ exports.toCommandValue = toCommandValue; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Context = void 0; const fs_1 = __nccwpck_require__(5747); const os_1 = __nccwpck_require__(2087); class Context { @@ -416,6 +476,7 @@ class Context { * Hydrate the context from the environment */ constructor() { + var _a, _b, _c; this.payload = {}; if (process.env.GITHUB_EVENT_PATH) { if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { @@ -432,6 +493,12 @@ class Context { this.workflow = process.env.GITHUB_WORKFLOW; this.action = process.env.GITHUB_ACTION; this.actor = process.env.GITHUB_ACTOR; + this.job = process.env.GITHUB_JOB; + this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); + this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); + this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; + this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; + this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; } get issue() { const payload = this.payload; @@ -456,1393 +523,256 @@ exports.Context = Context; /***/ }), -/***/ 5438: +/***/ 7914: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -// Originally pulled from https://github.com/JasonEtco/actions-toolkit/blob/master/src/github.ts -const graphql_1 = __nccwpck_require__(8467); -const rest_1 = __nccwpck_require__(239); -const Context = __importStar(__nccwpck_require__(4087)); +exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; const httpClient = __importStar(__nccwpck_require__(9925)); -// We need this in order to extend Octokit -rest_1.Octokit.prototype = new rest_1.Octokit(); -exports.context = new Context.Context(); -class GitHub extends rest_1.Octokit { - constructor(token, opts) { - super(GitHub.getOctokitOptions(GitHub.disambiguate(token, opts))); - this.graphql = GitHub.getGraphQL(GitHub.disambiguate(token, opts)); - } - /** - * Disambiguates the constructor overload parameters - */ - static disambiguate(token, opts) { - return [ - typeof token === 'string' ? token : '', - typeof token === 'object' ? token : opts || {} - ]; - } - static getOctokitOptions(args) { - const token = args[0]; - const options = Object.assign({}, args[1]); // Shallow clone - don't mutate the object provided by the caller - // Base URL - GHES or Dotcom - options.baseUrl = options.baseUrl || this.getApiBaseUrl(); - // Auth - const auth = GitHub.getAuthString(token, options); - if (auth) { - options.auth = auth; - } - // Proxy - const agent = GitHub.getProxyAgent(options.baseUrl, options); - if (agent) { - // Shallow clone - don't mutate the object provided by the caller - options.request = options.request ? Object.assign({}, options.request) : {}; - // Set the agent - options.request.agent = agent; - } - return options; - } - static getGraphQL(args) { - const defaults = {}; - defaults.baseUrl = this.getGraphQLBaseUrl(); - const token = args[0]; - const options = args[1]; - // Authorization - const auth = this.getAuthString(token, options); - if (auth) { - defaults.headers = { - authorization: auth - }; - } - // Proxy - const agent = GitHub.getProxyAgent(defaults.baseUrl, options); - if (agent) { - defaults.request = { agent }; - } - return graphql_1.graphql.defaults(defaults); - } - static getAuthString(token, options) { - // Validate args - if (!token && !options.auth) { - throw new Error('Parameter token or opts.auth is required'); - } - else if (token && options.auth) { - throw new Error('Parameters token and opts.auth may not both be specified'); - } - return typeof options.auth === 'string' ? options.auth : `token ${token}`; - } - static getProxyAgent(destinationUrl, options) { - var _a; - if (!((_a = options.request) === null || _a === void 0 ? void 0 : _a.agent)) { - if (httpClient.getProxyUrl(destinationUrl)) { - const hc = new httpClient.HttpClient(); - return hc.getAgent(destinationUrl); - } - } - return undefined; - } - static getApiBaseUrl() { - return process.env['GITHUB_API_URL'] || 'https://api.github.com'; +function getAuthString(token, options) { + if (!token && !options.auth) { + throw new Error('Parameter token or opts.auth is required'); } - static getGraphQLBaseUrl() { - let url = process.env['GITHUB_GRAPHQL_URL'] || 'https://api.github.com/graphql'; - // Shouldn't be a trailing slash, but remove if so - if (url.endsWith('/')) { - url = url.substr(0, url.length - 1); - } - // Remove trailing "/graphql" - if (url.toUpperCase().endsWith('/GRAPHQL')) { - url = url.substr(0, url.length - '/graphql'.length); - } - return url; + else if (token && options.auth) { + throw new Error('Parameters token and opts.auth may not both be specified'); } + return typeof options.auth === 'string' ? options.auth : `token ${token}`; +} +exports.getAuthString = getAuthString; +function getProxyAgent(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgent(destinationUrl); +} +exports.getProxyAgent = getProxyAgent; +function getApiBaseUrl() { + return process.env['GITHUB_API_URL'] || 'https://api.github.com'; } -exports.GitHub = GitHub; -//# sourceMappingURL=github.js.map +exports.getApiBaseUrl = getApiBaseUrl; +//# sourceMappingURL=utils.js.map /***/ }), -/***/ 8945: -/***/ ((__unused_webpack_module, exports) => { +/***/ 3030: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); - -const VERSION = "1.1.2"; - +exports.getOctokitOptions = exports.GitHub = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(4087)); +const Utils = __importStar(__nccwpck_require__(7914)); +// octokit + plugins +const core_1 = __nccwpck_require__(6762); +const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044); +const plugin_paginate_rest_1 = __nccwpck_require__(4193); +exports.context = new Context.Context(); +const baseUrl = Utils.getApiBaseUrl(); +const defaults = { + baseUrl, + request: { + agent: Utils.getProxyAgent(baseUrl) + } +}; +exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults); /** - * Some “list” response that can be paginated have a different response structure - * - * They have a `total_count` key in the response (search also has `incomplete_results`, - * /installation/repositories also has `repository_selection`), as well as a key with - * the list of the items which name varies from endpoint to endpoint: + * Convience function to correctly format Octokit Options to pass into the constructor. * - * - https://developer.github.com/v3/search/#example (key `items`) - * - https://developer.github.com/v3/checks/runs/#response-3 (key: `check_runs`) - * - https://developer.github.com/v3/checks/suites/#response-1 (key: `check_suites`) - * - https://developer.github.com/v3/apps/installations/#list-repositories (key: `repositories`) - * - https://developer.github.com/v3/apps/installations/#list-installations-for-a-user (key `installations`) - * - * Octokit normalizes these responses so that paginated results are always returned following - * the same structure. One challenge is that if the list response has only one page, no Link - * header is provided, so this header alone is not sufficient to check wether a response is - * paginated or not. For the exceptions with the namespace, a fallback check for the route - * paths has to be added in order to normalize the response. We cannot check for the total_count - * property because it also exists in the response of Get the combined status for a specific ref. + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set */ -const REGEX = [/^\/search\//, /^\/repos\/[^/]+\/[^/]+\/commits\/[^/]+\/(check-runs|check-suites)([^/]|$)/, /^\/installation\/repositories([^/]|$)/, /^\/user\/installations([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/secrets([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/workflows(\/[^/]+\/runs)?([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/runs(\/[^/]+\/(artifacts|jobs))?([^/]|$)/]; -function normalizePaginatedListResponse(octokit, url, response) { - const path = url.replace(octokit.request.endpoint.DEFAULTS.baseUrl, ""); - const responseNeedsNormalization = REGEX.find(regex => regex.test(path)); - if (!responseNeedsNormalization) return; // keep the additional properties intact as there is currently no other way - // to retrieve the same information. - - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - - response.data.total_count = totalCount; - Object.defineProperty(response.data, namespaceKey, { - get() { - octokit.log.warn(`[@octokit/paginate-rest] "response.data.${namespaceKey}" is deprecated for "GET ${path}". Get the results directly from "response.data"`); - return Array.from(data); - } - - }); -} - -function iterator(octokit, route, parameters) { - const options = octokit.request.endpoint(route, parameters); - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - next() { - if (!url) { - return Promise.resolve({ - done: true - }); - } - - return octokit.request({ - method, - url, - headers - }).then(response => { - normalizePaginatedListResponse(octokit, url, response); // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - - url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: response - }; - }); - } - - }) - }; -} - -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = undefined; - } - - return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); -} - -function gather(octokit, results, iterator, mapFn) { - return iterator.next().then(result => { - if (result.done) { - return results; - } - - let earlyExit = false; - - function done() { - earlyExit = true; - } - - results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - - if (earlyExit) { - return results; +function getOctokitOptions(token, options) { + const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller + // Auth + const auth = Utils.getAuthString(token, opts); + if (auth) { + opts.auth = auth; } - - return gather(octokit, results, iterator, mapFn); - }); -} - -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - -function paginateRest(octokit) { - return { - paginate: Object.assign(paginate.bind(null, octokit), { - iterator: iterator.bind(null, octokit) - }) - }; + return opts; } -paginateRest.VERSION = VERSION; - -exports.paginateRest = paginateRest; -//# sourceMappingURL=index.js.map - +exports.getOctokitOptions = getOctokitOptions; +//# sourceMappingURL=utils.js.map /***/ }), -/***/ 7471: +/***/ 9925: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var deprecation = __nccwpck_require__(8932); -var once = _interopDefault(__nccwpck_require__(1223)); - -const logOnce = once(deprecation => console.warn(deprecation)); +const http = __nccwpck_require__(8605); +const https = __nccwpck_require__(7211); +const pm = __nccwpck_require__(6443); +let tunnel; +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); /** - * Error with extra properties to help with debugging + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ - -class RequestError extends Error { - constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = "HttpError"; - this.status = statusCode; - Object.defineProperty(this, "code", { - get() { - logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - } - - }); - this.headers = options.headers || {}; // redact request credentials without mutating original request options - - const requestCopy = Object.assign({}, options.request); - - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") - }); - } - - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; - } - +function getProxyUrl(serverUrl) { + let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; } - -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 239: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const { requestLog } = __nccwpck_require__(8883); -const { - restEndpointMethods -} = __nccwpck_require__(3044); - -const Core = __nccwpck_require__(2348); - -const CORE_PLUGINS = [ - __nccwpck_require__(2796), - __nccwpck_require__(7662), // deprecated: remove in v17 - requestLog, - __nccwpck_require__(5155), - restEndpointMethods, - __nccwpck_require__(7291), - - __nccwpck_require__(2072) // deprecated: remove in v17 +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect ]; - -const OctokitRest = Core.plugin(CORE_PLUGINS); - -function DeprecatedOctokit(options) { - const warn = - options && options.log && options.log.warn - ? options.log.warn - : console.warn; - warn( - '[@octokit/rest] `const Octokit = require("@octokit/rest")` is deprecated. Use `const { Octokit } = require("@octokit/rest")` instead' - ); - return new OctokitRest(options); -} - -const Octokit = Object.assign(DeprecatedOctokit, { - Octokit: OctokitRest -}); - -Object.keys(OctokitRest).forEach(key => { - /* istanbul ignore else */ - if (OctokitRest.hasOwnProperty(key)) { - Octokit[key] = OctokitRest[key]; - } -}); - -module.exports = Octokit; - - -/***/ }), - -/***/ 7535: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = Octokit; - -const { request } = __nccwpck_require__(6234); -const Hook = __nccwpck_require__(3682); - -const parseClientOptions = __nccwpck_require__(6184); - -function Octokit(plugins, options) { - options = options || {}; - const hook = new Hook.Collection(); - const log = Object.assign( - { - debug: () => {}, - info: () => {}, - warn: console.warn, - error: console.error - }, - options && options.log - ); - const api = { - hook, - log, - request: request.defaults(parseClientOptions(options, log, hook)) - }; - - plugins.forEach(pluginFunction => pluginFunction(api, options)); - - return api; -} - - -/***/ }), - -/***/ 2348: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const factory = __nccwpck_require__(7692); - -module.exports = factory(); - - -/***/ }), - -/***/ 7692: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = factory; - -const Octokit = __nccwpck_require__(7535); -const registerPlugin = __nccwpck_require__(9927); - -function factory(plugins) { - const Api = Octokit.bind(null, plugins || []); - Api.plugin = registerPlugin.bind(null, plugins || []); - return Api; -} - - -/***/ }), - -/***/ 6184: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = parseOptions; - -const { Deprecation } = __nccwpck_require__(8932); -const { getUserAgent } = __nccwpck_require__(129); -const once = __nccwpck_require__(1223); - -const pkg = __nccwpck_require__(913); - -const deprecateOptionsTimeout = once((log, deprecation) => - log.warn(deprecation) -); -const deprecateOptionsAgent = once((log, deprecation) => log.warn(deprecation)); -const deprecateOptionsHeaders = once((log, deprecation) => - log.warn(deprecation) -); - -function parseOptions(options, log, hook) { - if (options.headers) { - options.headers = Object.keys(options.headers).reduce((newObj, key) => { - newObj[key.toLowerCase()] = options.headers[key]; - return newObj; - }, {}); - } - - const clientDefaults = { - headers: options.headers || {}, - request: options.request || {}, - mediaType: { - previews: [], - format: "" +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); } - }; - - if (options.baseUrl) { - clientDefaults.baseUrl = options.baseUrl; - } - - if (options.userAgent) { - clientDefaults.headers["user-agent"] = options.userAgent; - } - - if (options.previews) { - clientDefaults.mediaType.previews = options.previews; - } - - if (options.timeZone) { - clientDefaults.headers["time-zone"] = options.timeZone; - } - - if (options.timeout) { - deprecateOptionsTimeout( - log, - new Deprecation( - "[@octokit/rest] new Octokit({timeout}) is deprecated. Use {request: {timeout}} instead. See https://github.com/octokit/request.js#request" - ) - ); - clientDefaults.request.timeout = options.timeout; - } - - if (options.agent) { - deprecateOptionsAgent( - log, - new Deprecation( - "[@octokit/rest] new Octokit({agent}) is deprecated. Use {request: {agent}} instead. See https://github.com/octokit/request.js#request" - ) - ); - clientDefaults.request.agent = options.agent; - } - - if (options.headers) { - deprecateOptionsHeaders( - log, - new Deprecation( - "[@octokit/rest] new Octokit({headers}) is deprecated. Use {userAgent, previews} instead. See https://github.com/octokit/request.js#request" - ) - ); - } - - const userAgentOption = clientDefaults.headers["user-agent"]; - const defaultUserAgent = `octokit.js/${pkg.version} ${getUserAgent()}`; - - clientDefaults.headers["user-agent"] = [userAgentOption, defaultUserAgent] - .filter(Boolean) - .join(" "); - - clientDefaults.request.hook = hook.bind(null, "request"); - - return clientDefaults; -} - - -/***/ }), - -/***/ 9927: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = registerPlugin; - -const factory = __nccwpck_require__(7692); - -function registerPlugin(plugins, pluginFunction) { - return factory( - plugins.includes(pluginFunction) ? plugins : plugins.concat(pluginFunction) - ); -} - - -/***/ }), - -/***/ 3086: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = authenticate; - -const { Deprecation } = __nccwpck_require__(8932); -const once = __nccwpck_require__(1223); - -const deprecateAuthenticate = once((log, deprecation) => log.warn(deprecation)); - -function authenticate(state, options) { - deprecateAuthenticate( - state.octokit.log, - new Deprecation( - '[@octokit/rest] octokit.authenticate() is deprecated. Use "auth" constructor option instead.' - ) - ); - - if (!options) { - state.auth = false; - return; - } - - switch (options.type) { - case "basic": - if (!options.username || !options.password) { - throw new Error( - "Basic authentication requires both a username and password to be set" - ); - } - break; - - case "oauth": - if (!options.token && !(options.key && options.secret)) { - throw new Error( - "OAuth2 authentication requires a token or key & secret to be set" - ); - } - break; - - case "token": - case "app": - if (!options.token) { - throw new Error("Token authentication requires a token to be set"); - } - break; - - default: - throw new Error( - "Invalid authentication type, must be 'basic', 'oauth', 'token' or 'app'" - ); - } - - state.auth = options; -} - - -/***/ }), - -/***/ 6888: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = authenticationBeforeRequest; - -const btoa = __nccwpck_require__(2358); -const uniq = __nccwpck_require__(8216); - -function authenticationBeforeRequest(state, options) { - if (!state.auth.type) { - return; - } - - if (state.auth.type === "basic") { - const hash = btoa(`${state.auth.username}:${state.auth.password}`); - options.headers.authorization = `Basic ${hash}`; - return; - } - - if (state.auth.type === "token") { - options.headers.authorization = `token ${state.auth.token}`; - return; - } - - if (state.auth.type === "app") { - options.headers.authorization = `Bearer ${state.auth.token}`; - const acceptHeaders = options.headers.accept - .split(",") - .concat("application/vnd.github.machine-man-preview+json"); - options.headers.accept = uniq(acceptHeaders) - .filter(Boolean) - .join(","); - return; - } - - options.url += options.url.indexOf("?") === -1 ? "?" : "&"; - - if (state.auth.token) { - options.url += `access_token=${encodeURIComponent(state.auth.token)}`; - return; - } - - const key = encodeURIComponent(state.auth.key); - const secret = encodeURIComponent(state.auth.secret); - options.url += `client_id=${key}&client_secret=${secret}`; -} - - -/***/ }), - -/***/ 7662: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = authenticationPlugin; - -const { Deprecation } = __nccwpck_require__(8932); -const once = __nccwpck_require__(1223); - -const deprecateAuthenticate = once((log, deprecation) => log.warn(deprecation)); - -const authenticate = __nccwpck_require__(3086); -const beforeRequest = __nccwpck_require__(6888); -const requestError = __nccwpck_require__(2532); - -function authenticationPlugin(octokit, options) { - if (options.auth) { - octokit.authenticate = () => { - deprecateAuthenticate( - octokit.log, - new Deprecation( - '[@octokit/rest] octokit.authenticate() is deprecated and has no effect when "auth" option is set on Octokit constructor' - ) - ); - }; - return; - } - const state = { - octokit, - auth: false - }; - octokit.authenticate = authenticate.bind(null, state); - octokit.hook.before("request", beforeRequest.bind(null, state)); - octokit.hook.error("request", requestError.bind(null, state)); -} - - -/***/ }), - -/***/ 2532: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = authenticationRequestError; - -const { RequestError } = __nccwpck_require__(7471); - -function authenticationRequestError(state, error, options) { - /* istanbul ignore next */ - if (!error.headers) throw error; - - const otpRequired = /required/.test(error.headers["x-github-otp"] || ""); - // handle "2FA required" error only - if (error.status !== 401 || !otpRequired) { - throw error; - } - - if ( - error.status === 401 && - otpRequired && - error.request && - error.request.headers["x-github-otp"] - ) { - throw new RequestError( - "Invalid one-time password for two-factor authentication", - 401, - { - headers: error.headers, - request: options - } - ); - } - - if (typeof state.auth.on2fa !== "function") { - throw new RequestError( - "2FA required, but options.on2fa is not a function. See https://github.com/octokit/rest.js#authentication", - 401, - { - headers: error.headers, - request: options - } - ); - } - - return Promise.resolve() - .then(() => { - return state.auth.on2fa(); - }) - .then(oneTimePassword => { - const newOptions = Object.assign(options, { - headers: Object.assign( - { "x-github-otp": oneTimePassword }, - options.headers - ) - }); - return state.octokit.request(newOptions); - }); } - - -/***/ }), - -/***/ 4729: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = authenticationBeforeRequest; - -const btoa = __nccwpck_require__(2358); - -const withAuthorizationPrefix = __nccwpck_require__(2885); - -function authenticationBeforeRequest(state, options) { - if (typeof state.auth === "string") { - options.headers.authorization = withAuthorizationPrefix(state.auth); - return; - } - - if (state.auth.username) { - const hash = btoa(`${state.auth.username}:${state.auth.password}`); - options.headers.authorization = `Basic ${hash}`; - if (state.otp) { - options.headers["x-github-otp"] = state.otp; +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; } - return; - } - - if (state.auth.clientId) { - // There is a special case for OAuth applications, when `clientId` and `clientSecret` is passed as - // Basic Authorization instead of query parameters. The only routes where that applies share the same - // URL though: `/applications/:client_id/tokens/:access_token`. - // - // 1. [Check an authorization](https://developer.github.com/v3/oauth_authorizations/#check-an-authorization) - // 2. [Reset an authorization](https://developer.github.com/v3/oauth_authorizations/#reset-an-authorization) - // 3. [Revoke an authorization for an application](https://developer.github.com/v3/oauth_authorizations/#revoke-an-authorization-for-an-application) - // - // We identify by checking the URL. It must merge both "/applications/:client_id/tokens/:access_token" - // as well as "/applications/123/tokens/token456" - if (/\/applications\/:?[\w_]+\/tokens\/:?[\w_]+($|\?)/.test(options.url)) { - const hash = btoa(`${state.auth.clientId}:${state.auth.clientSecret}`); - options.headers.authorization = `Basic ${hash}`; - return; + readBody() { + return new Promise(async (resolve, reject) => { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + }); } - - options.url += options.url.indexOf("?") === -1 ? "?" : "&"; - options.url += `client_id=${state.auth.clientId}&client_secret=${state.auth.clientSecret}`; - return; - } - - return Promise.resolve() - - .then(() => { - return state.auth(); - }) - - .then(authorization => { - options.headers.authorization = withAuthorizationPrefix(authorization); - }); -} - - -/***/ }), - -/***/ 2796: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = authenticationPlugin; - -const { createTokenAuth } = __nccwpck_require__(334); -const { Deprecation } = __nccwpck_require__(8932); -const once = __nccwpck_require__(1223); - -const beforeRequest = __nccwpck_require__(4729); -const requestError = __nccwpck_require__(1983); -const validate = __nccwpck_require__(8326); -const withAuthorizationPrefix = __nccwpck_require__(2885); - -const deprecateAuthBasic = once((log, deprecation) => log.warn(deprecation)); -const deprecateAuthObject = once((log, deprecation) => log.warn(deprecation)); - -function authenticationPlugin(octokit, options) { - // If `options.authStrategy` is set then use it and pass in `options.auth` - if (options.authStrategy) { - const auth = options.authStrategy(options.auth); - octokit.hook.wrap("request", auth.hook); - octokit.auth = auth; - return; - } - - // If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance - // is unauthenticated. The `octokit.auth()` method is a no-op and no request hook is registred. - if (!options.auth) { - octokit.auth = () => - Promise.resolve({ - type: "unauthenticated" - }); - return; - } - - const isBasicAuthString = - typeof options.auth === "string" && - /^basic/.test(withAuthorizationPrefix(options.auth)); - - // If only `options.auth` is set to a string, use the default token authentication strategy. - if (typeof options.auth === "string" && !isBasicAuthString) { - const auth = createTokenAuth(options.auth); - octokit.hook.wrap("request", auth.hook); - octokit.auth = auth; - return; - } - - // Otherwise log a deprecation message - const [deprecationMethod, deprecationMessapge] = isBasicAuthString - ? [ - deprecateAuthBasic, - 'Setting the "new Octokit({ auth })" option to a Basic Auth string is deprecated. Use https://github.com/octokit/auth-basic.js instead. See (https://octokit.github.io/rest.js/#authentication)' - ] - : [ - deprecateAuthObject, - 'Setting the "new Octokit({ auth })" option to an object without also setting the "authStrategy" option is deprecated and will be removed in v17. See (https://octokit.github.io/rest.js/#authentication)' - ]; - deprecationMethod( - octokit.log, - new Deprecation("[@octokit/rest] " + deprecationMessapge) - ); - - octokit.auth = () => - Promise.resolve({ - type: "deprecated", - message: deprecationMessapge - }); - - validate(options.auth); - - const state = { - octokit, - auth: options.auth - }; - - octokit.hook.before("request", beforeRequest.bind(null, state)); - octokit.hook.error("request", requestError.bind(null, state)); -} - - -/***/ }), - -/***/ 1983: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = authenticationRequestError; - -const { RequestError } = __nccwpck_require__(7471); - -function authenticationRequestError(state, error, options) { - if (!error.headers) throw error; - - const otpRequired = /required/.test(error.headers["x-github-otp"] || ""); - // handle "2FA required" error only - if (error.status !== 401 || !otpRequired) { - throw error; - } - - if ( - error.status === 401 && - otpRequired && - error.request && - error.request.headers["x-github-otp"] - ) { - if (state.otp) { - delete state.otp; // no longer valid, request again - } else { - throw new RequestError( - "Invalid one-time password for two-factor authentication", - 401, - { - headers: error.headers, - request: options - } - ); - } - } - - if (typeof state.auth.on2fa !== "function") { - throw new RequestError( - "2FA required, but options.on2fa is not a function. See https://github.com/octokit/rest.js#authentication", - 401, - { - headers: error.headers, - request: options - } - ); - } - - return Promise.resolve() - .then(() => { - return state.auth.on2fa(); - }) - .then(oneTimePassword => { - const newOptions = Object.assign(options, { - headers: Object.assign(options.headers, { - "x-github-otp": oneTimePassword - }) - }); - return state.octokit.request(newOptions).then(response => { - // If OTP still valid, then persist it for following requests - state.otp = oneTimePassword; - return response; - }); - }); -} - - -/***/ }), - -/***/ 8326: -/***/ ((module) => { - -module.exports = validateAuth; - -function validateAuth(auth) { - if (typeof auth === "string") { - return; - } - - if (typeof auth === "function") { - return; - } - - if (auth.username && auth.password) { - return; - } - - if (auth.clientId && auth.clientSecret) { - return; - } - - throw new Error(`Invalid "auth" option: ${JSON.stringify(auth)}`); -} - - -/***/ }), - -/***/ 2885: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = withAuthorizationPrefix; - -const atob = __nccwpck_require__(5224); - -const REGEX_IS_BASIC_AUTH = /^[\w-]+:/; - -function withAuthorizationPrefix(authorization) { - if (/^(basic|bearer|token) /i.test(authorization)) { - return authorization; - } - - try { - if (REGEX_IS_BASIC_AUTH.test(atob(authorization))) { - return `basic ${authorization}`; - } - } catch (error) {} - - if (authorization.split(/\./).length === 3) { - return `bearer ${authorization}`; - } - - return `token ${authorization}`; -} - - -/***/ }), - -/***/ 5155: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = paginatePlugin; - -const { paginateRest } = __nccwpck_require__(8945); - -function paginatePlugin(octokit) { - Object.assign(octokit, paginateRest(octokit)); } - - -/***/ }), - -/***/ 7291: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = octokitValidate; - -const validate = __nccwpck_require__(9324); - -function octokitValidate(octokit) { - octokit.hook.before("request", validate.bind(null, octokit)); +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + let parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; } - - -/***/ }), - -/***/ 9324: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -module.exports = validate; - -const { RequestError } = __nccwpck_require__(7471); -const get = __nccwpck_require__(9197); -const set = __nccwpck_require__(1552); - -function validate(octokit, options) { - if (!options.request.validate) { - return; - } - const { validate: params } = options.request; - - Object.keys(params).forEach(parameterName => { - const parameter = get(params, parameterName); - - const expectedType = parameter.type; - let parentParameterName; - let parentValue; - let parentParamIsPresent = true; - let parentParameterIsArray = false; - - if (/\./.test(parameterName)) { - parentParameterName = parameterName.replace(/\.[^.]+$/, ""); - parentParameterIsArray = parentParameterName.slice(-2) === "[]"; - if (parentParameterIsArray) { - parentParameterName = parentParameterName.slice(0, -2); - } - parentValue = get(options, parentParameterName); - parentParamIsPresent = - parentParameterName === "headers" || - (typeof parentValue === "object" && parentValue !== null); - } - - const values = parentParameterIsArray - ? (get(options, parentParameterName) || []).map( - value => value[parameterName.split(/\./).pop()] - ) - : [get(options, parameterName)]; - - values.forEach((value, i) => { - const valueIsPresent = typeof value !== "undefined"; - const valueIsNull = value === null; - const currentParameterName = parentParameterIsArray - ? parameterName.replace(/\[\]/, `[${i}]`) - : parameterName; - - if (!parameter.required && !valueIsPresent) { - return; - } - - // if the parent parameter is of type object but allows null - // then the child parameters can be ignored - if (!parentParamIsPresent) { - return; - } - - if (parameter.allowNull && valueIsNull) { - return; - } - - if (!parameter.allowNull && valueIsNull) { - throw new RequestError( - `'${currentParameterName}' cannot be null`, - 400, - { - request: options - } - ); - } - - if (parameter.required && !valueIsPresent) { - throw new RequestError( - `Empty value for parameter '${currentParameterName}': ${JSON.stringify( - value - )}`, - 400, - { - request: options - } - ); - } - - // parse to integer before checking for enum - // so that string "1" will match enum with number 1 - if (expectedType === "integer") { - const unparsedValue = value; - value = parseInt(value, 10); - if (isNaN(value)) { - throw new RequestError( - `Invalid value for parameter '${currentParameterName}': ${JSON.stringify( - unparsedValue - )} is NaN`, - 400, - { - request: options +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; } - ); - } - } - - if (parameter.enum && parameter.enum.indexOf(String(value)) === -1) { - throw new RequestError( - `Invalid value for parameter '${currentParameterName}': ${JSON.stringify( - value - )}`, - 400, - { - request: options - } - ); - } - - if (parameter.validation) { - const regex = new RegExp(parameter.validation); - if (!regex.test(value)) { - throw new RequestError( - `Invalid value for parameter '${currentParameterName}': ${JSON.stringify( - value - )}`, - 400, - { - request: options + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; } - ); - } - } - - if (expectedType === "object" && typeof value === "string") { - try { - value = JSON.parse(value); - } catch (exception) { - throw new RequestError( - `JSON parse error of value for parameter '${currentParameterName}': ${JSON.stringify( - value - )}`, - 400, - { - request: options - } - ); - } - } - - set(options, parameter.mapTo || currentParameterName, value); - }); - }); - - return options; -} - - -/***/ }), - -/***/ 129: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var osName = _interopDefault(__nccwpck_require__(4824)); - -function getUserAgent() { - try { - return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`; - } catch (error) { - if (/wmic os get Caption/.test(error.message)) { - return "Windows "; - } - - throw error; - } -} - -exports.getUserAgent = getUserAgent; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 9925: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const http = __nccwpck_require__(8605); -const https = __nccwpck_require__(7211); -const pm = __nccwpck_require__(6443); -let tunnel; -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; - HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; - HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; - HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); -var Headers; -(function (Headers) { - Headers["Accept"] = "accept"; - Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); -var MediaTypes; -(function (MediaTypes) { - MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); -/** - * Returns the proxy URL, depending upon the supplied url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ -function getProxyUrl(serverUrl) { - let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); - return proxyUrl ? proxyUrl.href : ''; -} -exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; -const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; -const ExponentialBackoffCeiling = 10; -const ExponentialBackoffTimeSlice = 5; -class HttpClientError extends Error { - constructor(message, statusCode) { - super(message); - this.name = 'HttpClientError'; - this.statusCode = statusCode; - Object.setPrototypeOf(this, HttpClientError.prototype); - } -} -exports.HttpClientError = HttpClientError; -class HttpClientResponse { - constructor(message) { - this.message = message; - } - readBody() { - return new Promise(async (resolve, reject) => { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); - }); - } -} -exports.HttpClientResponse = HttpClientResponse; -function isHttps(requestUrl) { - let parsedUrl = new URL(requestUrl); - return parsedUrl.protocol === 'https:'; -} -exports.isHttps = isHttps; -class HttpClient { - constructor(userAgent, handlers, requestOptions) { - this._ignoreSslError = false; - this._allowRedirects = true; - this._allowRedirectDowngrade = false; - this._maxRedirects = 50; - this._allowRetries = false; - this._maxRetries = 1; - this._keepAlive = false; - this._disposed = false; - this.userAgent = userAgent; - this.handlers = handlers || []; - this.requestOptions = requestOptions; - if (requestOptions) { - if (requestOptions.ignoreSslError != null) { - this._ignoreSslError = requestOptions.ignoreSslError; - } - this._socketTimeout = requestOptions.socketTimeout; - if (requestOptions.allowRedirects != null) { - this._allowRedirects = requestOptions.allowRedirects; - } - if (requestOptions.allowRedirectDowngrade != null) { - this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; - } - if (requestOptions.maxRedirects != null) { - this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); - } - if (requestOptions.keepAlive != null) { - this._keepAlive = requestOptions.keepAlive; - } - if (requestOptions.allowRetries != null) { - this._allowRetries = requestOptions.allowRetries; - } - if (requestOptions.maxRetries != null) { - this._maxRetries = requestOptions.maxRetries; + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; } } } @@ -2137,7 +1067,9 @@ class HttpClient { maxSockets: maxSockets, keepAlive: this._keepAlive, proxy: { - proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`, + ...((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + }), host: proxyUrl.hostname, port: proxyUrl.port } @@ -2225,30550 +1157,14085 @@ class HttpClient { if (obj && obj.message) { msg = obj.message; } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; - } - else { - msg = 'Failed request: (' + statusCode + ')'; - } - let err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); - } - }); - } -} -exports.HttpClient = HttpClient; - - -/***/ }), - -/***/ 6443: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -function getProxyUrl(reqUrl) { - let usingSsl = reqUrl.protocol === 'https:'; - let proxyUrl; - if (checkBypass(reqUrl)) { - return proxyUrl; - } - let proxyVar; - if (usingSsl) { - proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; - } - else { - proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; - } - if (proxyVar) { - proxyUrl = new URL(proxyVar); - } - return proxyUrl; -} -exports.getProxyUrl = getProxyUrl; -function checkBypass(reqUrl) { - if (!reqUrl.hostname) { - return false; - } - let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; - if (!noProxy) { - return false; - } - // Determine the request port - let reqPort; - if (reqUrl.port) { - reqPort = Number(reqUrl.port); - } - else if (reqUrl.protocol === 'http:') { - reqPort = 80; - } - else if (reqUrl.protocol === 'https:') { - reqPort = 443; - } - // Format the request hostname and hostname with port - let upperReqHosts = [reqUrl.hostname.toUpperCase()]; - if (typeof reqPort === 'number') { - upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); - } - // Compare request host against noproxy - for (let upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { - if (upperReqHosts.some(x => x === upperNoProxyItem)) { - return true; - } - } - return false; -} -exports.checkBypass = checkBypass; - - -/***/ }), - -/***/ 334: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -async function auth(token) { - const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; - return { - type: "token", - token: token, - tokenType - }; -} - -/** - * Prefix token for usage in the Authorization header - * - * @param token OAuth token or JSON Web Token - */ -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; - } - - return `token ${token}`; -} - -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); -} - -const createTokenAuth = function createTokenAuth(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); - } - - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); - } - - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; - -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 9440: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var isPlainObject = __nccwpck_require__(3287); -var universalUserAgent = __nccwpck_require__(5030); - -function lowercaseKeys(object) { - if (!object) { - return {}; - } - - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); -} - -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject.isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { - [key]: options[key] - }); - } - }); - return result; -} - -function removeUndefinedProperties(obj) { - for (const key in obj) { - if (obj[key] === undefined) { - delete obj[key]; - } - } - - return obj; -} - -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); - } else { - options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates - - - options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging - - removeUndefinedProperties(options); - removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten - - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); - } - - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); - return mergedOptions; -} - -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - - if (names.length === 0) { - return url; - } - - return url + separator + names.map(name => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); - } - - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); -} - -const urlVariableRegex = /\{[^}]+\}/g; - -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); -} - -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - - if (!matches) { - return []; - } - - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); -} - -function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); -} - -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* istanbul ignore file */ -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); - } - - return part; - }).join(""); -} - -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} - -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } -} - -function isDefined(value) { - return value !== undefined && value !== null; -} - -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; -} - -function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; - - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } - - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; - - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } - - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); - } - } - } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); - } - } - - return result; -} - -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; -} - -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - - if (operator && operator !== "+") { - var separator = ","; - - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } - } else { - return encodeReserved(literal); - } - }); -} - -function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - - if (!isBinaryRequest) { - if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); - } - - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - - - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } else { - headers["content-length"] = 0; - } - } - } // default content-type for JSON if body is set - - - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string - - - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } // Only return body/request keys if present - - - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); -} - -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); -} - -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), - parse - }); -} - -const VERSION = "6.0.10"; - -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. - -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "", - previews: [] - } -}; - -const endpoint = withDefaults(null, DEFAULTS); - -exports.endpoint = endpoint; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 8467: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var request = __nccwpck_require__(6234); -var universalUserAgent = __nccwpck_require__(5030); - -const VERSION = "4.5.8"; - -class GraphqlError extends Error { - constructor(request, response) { - const message = response.data.errors[0].message; - super(message); - Object.assign(this, response.data); - Object.assign(this, { - headers: response.headers - }); - this.name = "GraphqlError"; - this.request = request; // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - } - -} - -const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; -const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; -function graphql(request, query, options) { - if (typeof query === "string" && options && "query" in options) { - return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); - } - - const parsedOptions = typeof query === "string" ? Object.assign({ - query - }, options) : query; - const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = parsedOptions[key]; - return result; - } - - if (!result.variables) { - result.variables = {}; - } - - result.variables[key] = parsedOptions[key]; - return result; - }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix - // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 - - const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; - - if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { - requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); - } - - return request(requestOptions).then(response => { - if (response.data.errors) { - const headers = {}; - - for (const key of Object.keys(response.headers)) { - headers[key] = response.headers[key]; - } - - throw new GraphqlError(requestOptions, { - headers, - data: response.data - }); - } - - return response.data.data; - }); -} - -function withDefaults(request$1, newDefaults) { - const newRequest = request$1.defaults(newDefaults); - - const newApi = (query, options) => { - return graphql(newRequest, query, options); - }; - - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: request.request.endpoint - }); -} - -const graphql$1 = withDefaults(request.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` - }, - method: "POST", - url: "/graphql" -}); -function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" - }); -} - -exports.graphql = graphql$1; -exports.withCustomRequest = withCustomRequest; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 8883: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const VERSION = "1.0.2"; - -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - -function requestLog(octokit) { - octokit.hook.wrap("request", (request, options) => { - octokit.log.debug("request", options); - const start = Date.now(); - const requestOptions = octokit.request.endpoint.parse(options); - const path = requestOptions.url.replace(options.baseUrl, ""); - return request(options).then(response => { - octokit.log.info(`${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`); - return response; - }).catch(error => { - octokit.log.info(`${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`); - throw error; - }); - }); -} -requestLog.VERSION = VERSION; - -exports.requestLog = requestLog; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 3044: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var deprecation = __nccwpck_require__(8932); - -var endpointsByScope = { - actions: { - cancelWorkflowRun: { - method: "POST", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - run_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/runs/:run_id/cancel" - }, - createOrUpdateSecretForRepo: { - method: "PUT", - params: { - encrypted_value: { - type: "string" - }, - key_id: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/secrets/:name" - }, - createRegistrationToken: { - method: "POST", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/runners/registration-token" - }, - createRemoveToken: { - method: "POST", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/runners/remove-token" - }, - deleteArtifact: { - method: "DELETE", - params: { - artifact_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/artifacts/:artifact_id" - }, - deleteSecretFromRepo: { - method: "DELETE", - params: { - name: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/secrets/:name" - }, - downloadArtifact: { - method: "GET", - params: { - archive_format: { - required: true, - type: "string" - }, - artifact_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/artifacts/:artifact_id/:archive_format" - }, - getArtifact: { - method: "GET", - params: { - artifact_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/artifacts/:artifact_id" - }, - getPublicKey: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/secrets/public-key" - }, - getSecret: { - method: "GET", - params: { - name: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/secrets/:name" - }, - getSelfHostedRunner: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - runner_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/runners/:runner_id" - }, - getWorkflow: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - workflow_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/workflows/:workflow_id" - }, - getWorkflowJob: { - method: "GET", - params: { - job_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/jobs/:job_id" - }, - getWorkflowRun: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - run_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/runs/:run_id" - }, - listDownloadsForSelfHostedRunnerApplication: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/runners/downloads" - }, - listJobsForWorkflowRun: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - run_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/runs/:run_id/jobs" - }, - listRepoWorkflowRuns: { - method: "GET", - params: { - actor: { - type: "string" - }, - branch: { - type: "string" - }, - event: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - status: { - enum: ["completed", "status", "conclusion"], - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/runs" - }, - listRepoWorkflows: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/workflows" - }, - listSecretsForRepo: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/secrets" - }, - listSelfHostedRunnersForRepo: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/runners" - }, - listWorkflowJobLogs: { - method: "GET", - params: { - job_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/jobs/:job_id/logs" - }, - listWorkflowRunArtifacts: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - run_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/runs/:run_id/artifacts" - }, - listWorkflowRunLogs: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - run_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/runs/:run_id/logs" - }, - listWorkflowRuns: { - method: "GET", - params: { - actor: { - type: "string" - }, - branch: { - type: "string" - }, - event: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - status: { - enum: ["completed", "status", "conclusion"], - type: "string" - }, - workflow_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/workflows/:workflow_id/runs" - }, - reRunWorkflow: { - method: "POST", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - run_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/runs/:run_id/rerun" - }, - removeSelfHostedRunner: { - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - runner_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/runners/:runner_id" - } - }, - activity: { - checkStarringRepo: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/user/starred/:owner/:repo" - }, - deleteRepoSubscription: { - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/subscription" - }, - deleteThreadSubscription: { - method: "DELETE", - params: { - thread_id: { - required: true, - type: "integer" - } - }, - url: "/notifications/threads/:thread_id/subscription" - }, - getRepoSubscription: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/subscription" - }, - getThread: { - method: "GET", - params: { - thread_id: { - required: true, - type: "integer" - } - }, - url: "/notifications/threads/:thread_id" - }, - getThreadSubscription: { - method: "GET", - params: { - thread_id: { - required: true, - type: "integer" - } - }, - url: "/notifications/threads/:thread_id/subscription" - }, - listEventsForOrg: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/events/orgs/:org" - }, - listEventsForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/events" - }, - listFeeds: { - method: "GET", - params: {}, - url: "/feeds" - }, - listNotifications: { - method: "GET", - params: { - all: { - type: "boolean" - }, - before: { - type: "string" - }, - page: { - type: "integer" - }, - participating: { - type: "boolean" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - } - }, - url: "/notifications" - }, - listNotificationsForRepo: { - method: "GET", - params: { - all: { - type: "boolean" - }, - before: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - participating: { - type: "boolean" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - since: { - type: "string" - } - }, - url: "/repos/:owner/:repo/notifications" - }, - listPublicEvents: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/events" - }, - listPublicEventsForOrg: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/events" - }, - listPublicEventsForRepoNetwork: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/networks/:owner/:repo/events" - }, - listPublicEventsForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/events/public" - }, - listReceivedEventsForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/received_events" - }, - listReceivedPublicEventsForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/received_events/public" - }, - listRepoEvents: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/events" - }, - listReposStarredByAuthenticatedUser: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - sort: { - enum: ["created", "updated"], - type: "string" - } - }, - url: "/user/starred" - }, - listReposStarredByUser: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - sort: { - enum: ["created", "updated"], - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/starred" - }, - listReposWatchedByUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/subscriptions" - }, - listStargazersForRepo: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/stargazers" - }, - listWatchedReposForAuthenticatedUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/subscriptions" - }, - listWatchersForRepo: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/subscribers" - }, - markAsRead: { - method: "PUT", - params: { - last_read_at: { - type: "string" - } - }, - url: "/notifications" - }, - markNotificationsAsReadForRepo: { - method: "PUT", - params: { - last_read_at: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/notifications" - }, - markThreadAsRead: { - method: "PATCH", - params: { - thread_id: { - required: true, - type: "integer" - } - }, - url: "/notifications/threads/:thread_id" - }, - setRepoSubscription: { - method: "PUT", - params: { - ignored: { - type: "boolean" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - subscribed: { - type: "boolean" - } - }, - url: "/repos/:owner/:repo/subscription" - }, - setThreadSubscription: { - method: "PUT", - params: { - ignored: { - type: "boolean" - }, - thread_id: { - required: true, - type: "integer" - } - }, - url: "/notifications/threads/:thread_id/subscription" - }, - starRepo: { - method: "PUT", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/user/starred/:owner/:repo" - }, - unstarRepo: { - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/user/starred/:owner/:repo" - } - }, - apps: { - addRepoToInstallation: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "PUT", - params: { - installation_id: { - required: true, - type: "integer" - }, - repository_id: { - required: true, - type: "integer" - } - }, - url: "/user/installations/:installation_id/repositories/:repository_id" - }, - checkAccountIsAssociatedWithAny: { - method: "GET", - params: { - account_id: { - required: true, - type: "integer" - } - }, - url: "/marketplace_listing/accounts/:account_id" - }, - checkAccountIsAssociatedWithAnyStubbed: { - method: "GET", - params: { - account_id: { - required: true, - type: "integer" - } - }, - url: "/marketplace_listing/stubbed/accounts/:account_id" - }, - checkAuthorization: { - deprecated: "octokit.apps.checkAuthorization() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#check-an-authorization", - method: "GET", - params: { - access_token: { - required: true, - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/tokens/:access_token" - }, - checkToken: { - headers: { - accept: "application/vnd.github.doctor-strange-preview+json" - }, - method: "POST", - params: { - access_token: { - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/token" - }, - createContentAttachment: { - headers: { - accept: "application/vnd.github.corsair-preview+json" - }, - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - content_reference_id: { - required: true, - type: "integer" - }, - title: { - required: true, - type: "string" - } - }, - url: "/content_references/:content_reference_id/attachments" - }, - createFromManifest: { - headers: { - accept: "application/vnd.github.fury-preview+json" - }, - method: "POST", - params: { - code: { - required: true, - type: "string" - } - }, - url: "/app-manifests/:code/conversions" - }, - createInstallationToken: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "POST", - params: { - installation_id: { - required: true, - type: "integer" - }, - permissions: { - type: "object" - }, - repository_ids: { - type: "integer[]" - } - }, - url: "/app/installations/:installation_id/access_tokens" - }, - deleteAuthorization: { - headers: { - accept: "application/vnd.github.doctor-strange-preview+json" - }, - method: "DELETE", - params: { - access_token: { - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/grant" - }, - deleteInstallation: { - headers: { - accept: "application/vnd.github.gambit-preview+json,application/vnd.github.machine-man-preview+json" - }, - method: "DELETE", - params: { - installation_id: { - required: true, - type: "integer" - } - }, - url: "/app/installations/:installation_id" - }, - deleteToken: { - headers: { - accept: "application/vnd.github.doctor-strange-preview+json" - }, - method: "DELETE", - params: { - access_token: { - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/token" - }, - findOrgInstallation: { - deprecated: "octokit.apps.findOrgInstallation() has been renamed to octokit.apps.getOrgInstallation() (2019-04-10)", - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/installation" - }, - findRepoInstallation: { - deprecated: "octokit.apps.findRepoInstallation() has been renamed to octokit.apps.getRepoInstallation() (2019-04-10)", - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/installation" - }, - findUserInstallation: { - deprecated: "octokit.apps.findUserInstallation() has been renamed to octokit.apps.getUserInstallation() (2019-04-10)", - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/installation" - }, - getAuthenticated: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: {}, - url: "/app" - }, - getBySlug: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - app_slug: { - required: true, - type: "string" - } - }, - url: "/apps/:app_slug" - }, - getInstallation: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - installation_id: { - required: true, - type: "integer" - } - }, - url: "/app/installations/:installation_id" - }, - getOrgInstallation: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/installation" - }, - getRepoInstallation: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/installation" - }, - getUserInstallation: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/installation" - }, - listAccountsUserOrOrgOnPlan: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - plan_id: { - required: true, - type: "integer" - }, - sort: { - enum: ["created", "updated"], - type: "string" - } - }, - url: "/marketplace_listing/plans/:plan_id/accounts" - }, - listAccountsUserOrOrgOnPlanStubbed: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - plan_id: { - required: true, - type: "integer" - }, - sort: { - enum: ["created", "updated"], - type: "string" - } - }, - url: "/marketplace_listing/stubbed/plans/:plan_id/accounts" - }, - listInstallationReposForAuthenticatedUser: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - installation_id: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/installations/:installation_id/repositories" - }, - listInstallations: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/app/installations" - }, - listInstallationsForAuthenticatedUser: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/installations" - }, - listMarketplacePurchasesForAuthenticatedUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/marketplace_purchases" - }, - listMarketplacePurchasesForAuthenticatedUserStubbed: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/marketplace_purchases/stubbed" - }, - listPlans: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/marketplace_listing/plans" - }, - listPlansStubbed: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/marketplace_listing/stubbed/plans" - }, - listRepos: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/installation/repositories" - }, - removeRepoFromInstallation: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "DELETE", - params: { - installation_id: { - required: true, - type: "integer" - }, - repository_id: { - required: true, - type: "integer" - } - }, - url: "/user/installations/:installation_id/repositories/:repository_id" - }, - resetAuthorization: { - deprecated: "octokit.apps.resetAuthorization() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#reset-an-authorization", - method: "POST", - params: { - access_token: { - required: true, - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/tokens/:access_token" - }, - resetToken: { - headers: { - accept: "application/vnd.github.doctor-strange-preview+json" - }, - method: "PATCH", - params: { - access_token: { - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/token" - }, - revokeAuthorizationForApplication: { - deprecated: "octokit.apps.revokeAuthorizationForApplication() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#revoke-an-authorization-for-an-application", - method: "DELETE", - params: { - access_token: { - required: true, - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/tokens/:access_token" - }, - revokeGrantForApplication: { - deprecated: "octokit.apps.revokeGrantForApplication() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#revoke-a-grant-for-an-application", - method: "DELETE", - params: { - access_token: { - required: true, - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/grants/:access_token" - }, - revokeInstallationToken: { - headers: { - accept: "application/vnd.github.gambit-preview+json" - }, - method: "DELETE", - params: {}, - url: "/installation/token" - } - }, - checks: { - create: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "POST", - params: { - actions: { - type: "object[]" - }, - "actions[].description": { - required: true, - type: "string" - }, - "actions[].identifier": { - required: true, - type: "string" - }, - "actions[].label": { - required: true, - type: "string" - }, - completed_at: { - type: "string" - }, - conclusion: { - enum: ["success", "failure", "neutral", "cancelled", "timed_out", "action_required"], - type: "string" - }, - details_url: { - type: "string" - }, - external_id: { - type: "string" - }, - head_sha: { - required: true, - type: "string" - }, - name: { - required: true, - type: "string" - }, - output: { - type: "object" - }, - "output.annotations": { - type: "object[]" - }, - "output.annotations[].annotation_level": { - enum: ["notice", "warning", "failure"], - required: true, - type: "string" - }, - "output.annotations[].end_column": { - type: "integer" - }, - "output.annotations[].end_line": { - required: true, - type: "integer" - }, - "output.annotations[].message": { - required: true, - type: "string" - }, - "output.annotations[].path": { - required: true, - type: "string" - }, - "output.annotations[].raw_details": { - type: "string" - }, - "output.annotations[].start_column": { - type: "integer" - }, - "output.annotations[].start_line": { - required: true, - type: "integer" - }, - "output.annotations[].title": { - type: "string" - }, - "output.images": { - type: "object[]" - }, - "output.images[].alt": { - required: true, - type: "string" - }, - "output.images[].caption": { - type: "string" - }, - "output.images[].image_url": { - required: true, - type: "string" - }, - "output.summary": { - required: true, - type: "string" - }, - "output.text": { - type: "string" - }, - "output.title": { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - started_at: { - type: "string" - }, - status: { - enum: ["queued", "in_progress", "completed"], - type: "string" - } - }, - url: "/repos/:owner/:repo/check-runs" - }, - createSuite: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "POST", - params: { - head_sha: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/check-suites" - }, - get: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "GET", - params: { - check_run_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/check-runs/:check_run_id" - }, - getSuite: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "GET", - params: { - check_suite_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/check-suites/:check_suite_id" - }, - listAnnotations: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "GET", - params: { - check_run_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/check-runs/:check_run_id/annotations" - }, - listForRef: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "GET", - params: { - check_name: { - type: "string" - }, - filter: { - enum: ["latest", "all"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - status: { - enum: ["queued", "in_progress", "completed"], - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:ref/check-runs" - }, - listForSuite: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "GET", - params: { - check_name: { - type: "string" - }, - check_suite_id: { - required: true, - type: "integer" - }, - filter: { - enum: ["latest", "all"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - status: { - enum: ["queued", "in_progress", "completed"], - type: "string" - } - }, - url: "/repos/:owner/:repo/check-suites/:check_suite_id/check-runs" - }, - listSuitesForRef: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "GET", - params: { - app_id: { - type: "integer" - }, - check_name: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:ref/check-suites" - }, - rerequestSuite: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "POST", - params: { - check_suite_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/check-suites/:check_suite_id/rerequest" - }, - setSuitesPreferences: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "PATCH", - params: { - auto_trigger_checks: { - type: "object[]" - }, - "auto_trigger_checks[].app_id": { - required: true, - type: "integer" - }, - "auto_trigger_checks[].setting": { - required: true, - type: "boolean" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/check-suites/preferences" - }, - update: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "PATCH", - params: { - actions: { - type: "object[]" - }, - "actions[].description": { - required: true, - type: "string" - }, - "actions[].identifier": { - required: true, - type: "string" - }, - "actions[].label": { - required: true, - type: "string" - }, - check_run_id: { - required: true, - type: "integer" - }, - completed_at: { - type: "string" - }, - conclusion: { - enum: ["success", "failure", "neutral", "cancelled", "timed_out", "action_required"], - type: "string" - }, - details_url: { - type: "string" - }, - external_id: { - type: "string" - }, - name: { - type: "string" - }, - output: { - type: "object" - }, - "output.annotations": { - type: "object[]" - }, - "output.annotations[].annotation_level": { - enum: ["notice", "warning", "failure"], - required: true, - type: "string" - }, - "output.annotations[].end_column": { - type: "integer" - }, - "output.annotations[].end_line": { - required: true, - type: "integer" - }, - "output.annotations[].message": { - required: true, - type: "string" - }, - "output.annotations[].path": { - required: true, - type: "string" - }, - "output.annotations[].raw_details": { - type: "string" - }, - "output.annotations[].start_column": { - type: "integer" - }, - "output.annotations[].start_line": { - required: true, - type: "integer" - }, - "output.annotations[].title": { - type: "string" - }, - "output.images": { - type: "object[]" - }, - "output.images[].alt": { - required: true, - type: "string" - }, - "output.images[].caption": { - type: "string" - }, - "output.images[].image_url": { - required: true, - type: "string" - }, - "output.summary": { - required: true, - type: "string" - }, - "output.text": { - type: "string" - }, - "output.title": { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - started_at: { - type: "string" - }, - status: { - enum: ["queued", "in_progress", "completed"], - type: "string" - } - }, - url: "/repos/:owner/:repo/check-runs/:check_run_id" - } - }, - codesOfConduct: { - getConductCode: { - headers: { - accept: "application/vnd.github.scarlet-witch-preview+json" - }, - method: "GET", - params: { - key: { - required: true, - type: "string" - } - }, - url: "/codes_of_conduct/:key" - }, - getForRepo: { - headers: { - accept: "application/vnd.github.scarlet-witch-preview+json" - }, - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/community/code_of_conduct" - }, - listConductCodes: { - headers: { - accept: "application/vnd.github.scarlet-witch-preview+json" - }, - method: "GET", - params: {}, - url: "/codes_of_conduct" - } - }, - emojis: { - get: { - method: "GET", - params: {}, - url: "/emojis" - } - }, - gists: { - checkIsStarred: { - method: "GET", - params: { - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/star" - }, - create: { - method: "POST", - params: { - description: { - type: "string" - }, - files: { - required: true, - type: "object" - }, - "files.content": { - type: "string" - }, - public: { - type: "boolean" - } - }, - url: "/gists" - }, - createComment: { - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/comments" - }, - delete: { - method: "DELETE", - params: { - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id" - }, - deleteComment: { - method: "DELETE", - params: { - comment_id: { - required: true, - type: "integer" - }, - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/comments/:comment_id" - }, - fork: { - method: "POST", - params: { - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/forks" - }, - get: { - method: "GET", - params: { - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id" - }, - getComment: { - method: "GET", - params: { - comment_id: { - required: true, - type: "integer" - }, - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/comments/:comment_id" - }, - getRevision: { - method: "GET", - params: { - gist_id: { - required: true, - type: "string" - }, - sha: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/:sha" - }, - list: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - } - }, - url: "/gists" - }, - listComments: { - method: "GET", - params: { - gist_id: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/gists/:gist_id/comments" - }, - listCommits: { - method: "GET", - params: { - gist_id: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/gists/:gist_id/commits" - }, - listForks: { - method: "GET", - params: { - gist_id: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/gists/:gist_id/forks" - }, - listPublic: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - } - }, - url: "/gists/public" - }, - listPublicForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/gists" - }, - listStarred: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - } - }, - url: "/gists/starred" - }, - star: { - method: "PUT", - params: { - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/star" - }, - unstar: { - method: "DELETE", - params: { - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/star" - }, - update: { - method: "PATCH", - params: { - description: { - type: "string" - }, - files: { - type: "object" - }, - "files.content": { - type: "string" - }, - "files.filename": { - type: "string" - }, - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id" - }, - updateComment: { - method: "PATCH", - params: { - body: { - required: true, - type: "string" - }, - comment_id: { - required: true, - type: "integer" - }, - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/comments/:comment_id" - } - }, - git: { - createBlob: { - method: "POST", - params: { - content: { - required: true, - type: "string" - }, - encoding: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/blobs" - }, - createCommit: { - method: "POST", - params: { - author: { - type: "object" - }, - "author.date": { - type: "string" - }, - "author.email": { - type: "string" - }, - "author.name": { - type: "string" - }, - committer: { - type: "object" - }, - "committer.date": { - type: "string" - }, - "committer.email": { - type: "string" - }, - "committer.name": { - type: "string" - }, - message: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - parents: { - required: true, - type: "string[]" - }, - repo: { - required: true, - type: "string" - }, - signature: { - type: "string" - }, - tree: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/commits" - }, - createRef: { - method: "POST", - params: { - owner: { - required: true, - type: "string" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/refs" - }, - createTag: { - method: "POST", - params: { - message: { - required: true, - type: "string" - }, - object: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - tag: { - required: true, - type: "string" - }, - tagger: { - type: "object" - }, - "tagger.date": { - type: "string" - }, - "tagger.email": { - type: "string" - }, - "tagger.name": { - type: "string" - }, - type: { - enum: ["commit", "tree", "blob"], - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/tags" - }, - createTree: { - method: "POST", - params: { - base_tree: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - tree: { - required: true, - type: "object[]" - }, - "tree[].content": { - type: "string" - }, - "tree[].mode": { - enum: ["100644", "100755", "040000", "160000", "120000"], - type: "string" - }, - "tree[].path": { - type: "string" - }, - "tree[].sha": { - allowNull: true, - type: "string" - }, - "tree[].type": { - enum: ["blob", "tree", "commit"], - type: "string" - } - }, - url: "/repos/:owner/:repo/git/trees" - }, - deleteRef: { - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/refs/:ref" - }, - getBlob: { - method: "GET", - params: { - file_sha: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/blobs/:file_sha" - }, - getCommit: { - method: "GET", - params: { - commit_sha: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/commits/:commit_sha" - }, - getRef: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/ref/:ref" - }, - getTag: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - tag_sha: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/tags/:tag_sha" - }, - getTree: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - recursive: { - enum: ["1"], - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - tree_sha: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/trees/:tree_sha" - }, - listMatchingRefs: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/matching-refs/:ref" - }, - listRefs: { - method: "GET", - params: { - namespace: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/refs/:namespace" - }, - updateRef: { - method: "PATCH", - params: { - force: { - type: "boolean" - }, - owner: { - required: true, - type: "string" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/refs/:ref" - } - }, - gitignore: { - getTemplate: { - method: "GET", - params: { - name: { - required: true, - type: "string" - } - }, - url: "/gitignore/templates/:name" - }, - listTemplates: { - method: "GET", - params: {}, - url: "/gitignore/templates" - } - }, - interactions: { - addOrUpdateRestrictionsForOrg: { - headers: { - accept: "application/vnd.github.sombra-preview+json" - }, - method: "PUT", - params: { - limit: { - enum: ["existing_users", "contributors_only", "collaborators_only"], - required: true, - type: "string" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/interaction-limits" - }, - addOrUpdateRestrictionsForRepo: { - headers: { - accept: "application/vnd.github.sombra-preview+json" - }, - method: "PUT", - params: { - limit: { - enum: ["existing_users", "contributors_only", "collaborators_only"], - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/interaction-limits" - }, - getRestrictionsForOrg: { - headers: { - accept: "application/vnd.github.sombra-preview+json" - }, - method: "GET", - params: { - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/interaction-limits" - }, - getRestrictionsForRepo: { - headers: { - accept: "application/vnd.github.sombra-preview+json" - }, - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/interaction-limits" - }, - removeRestrictionsForOrg: { - headers: { - accept: "application/vnd.github.sombra-preview+json" - }, - method: "DELETE", - params: { - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/interaction-limits" - }, - removeRestrictionsForRepo: { - headers: { - accept: "application/vnd.github.sombra-preview+json" - }, - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/interaction-limits" - } - }, - issues: { - addAssignees: { - method: "POST", - params: { - assignees: { - type: "string[]" - }, - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/assignees" - }, - addLabels: { - method: "POST", - params: { - issue_number: { - required: true, - type: "integer" - }, - labels: { - required: true, - type: "string[]" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/labels" - }, - checkAssignee: { - method: "GET", - params: { - assignee: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/assignees/:assignee" - }, - create: { - method: "POST", - params: { - assignee: { - type: "string" - }, - assignees: { - type: "string[]" - }, - body: { - type: "string" - }, - labels: { - type: "string[]" - }, - milestone: { - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - title: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues" - }, - createComment: { - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/comments" - }, - createLabel: { - method: "POST", - params: { - color: { - required: true, - type: "string" - }, - description: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/labels" - }, - createMilestone: { - method: "POST", - params: { - description: { - type: "string" - }, - due_on: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - state: { - enum: ["open", "closed"], - type: "string" - }, - title: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/milestones" - }, - deleteComment: { - method: "DELETE", - params: { - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/comments/:comment_id" - }, - deleteLabel: { - method: "DELETE", - params: { - name: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/labels/:name" - }, - deleteMilestone: { - method: "DELETE", - params: { - milestone_number: { - required: true, - type: "integer" - }, - number: { - alias: "milestone_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/milestones/:milestone_number" - }, - get: { - method: "GET", - params: { - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number" - }, - getComment: { - method: "GET", - params: { - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/comments/:comment_id" - }, - getEvent: { - method: "GET", - params: { - event_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/events/:event_id" - }, - getLabel: { - method: "GET", - params: { - name: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/labels/:name" - }, - getMilestone: { - method: "GET", - params: { - milestone_number: { - required: true, - type: "integer" - }, - number: { - alias: "milestone_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/milestones/:milestone_number" - }, - list: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - filter: { - enum: ["assigned", "created", "mentioned", "subscribed", "all"], - type: "string" - }, - labels: { - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - }, - sort: { - enum: ["created", "updated", "comments"], - type: "string" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - } - }, - url: "/issues" - }, - listAssignees: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/assignees" - }, - listComments: { - method: "GET", - params: { - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - since: { - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/comments" - }, - listCommentsForRepo: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - since: { - type: "string" - }, - sort: { - enum: ["created", "updated"], - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/comments" - }, - listEvents: { - method: "GET", - params: { - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/events" - }, - listEventsForRepo: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/events" - }, - listEventsForTimeline: { - headers: { - accept: "application/vnd.github.mockingbird-preview+json" - }, - method: "GET", - params: { - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/timeline" - }, - listForAuthenticatedUser: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - filter: { - enum: ["assigned", "created", "mentioned", "subscribed", "all"], - type: "string" - }, - labels: { - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - }, - sort: { - enum: ["created", "updated", "comments"], - type: "string" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - } - }, - url: "/user/issues" - }, - listForOrg: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - filter: { - enum: ["assigned", "created", "mentioned", "subscribed", "all"], - type: "string" - }, - labels: { - type: "string" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - }, - sort: { - enum: ["created", "updated", "comments"], - type: "string" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - } - }, - url: "/orgs/:org/issues" - }, - listForRepo: { - method: "GET", - params: { - assignee: { - type: "string" - }, - creator: { - type: "string" - }, - direction: { - enum: ["asc", "desc"], - type: "string" - }, - labels: { - type: "string" - }, - mentioned: { - type: "string" - }, - milestone: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - since: { - type: "string" - }, - sort: { - enum: ["created", "updated", "comments"], - type: "string" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - } - }, - url: "/repos/:owner/:repo/issues" - }, - listLabelsForMilestone: { - method: "GET", - params: { - milestone_number: { - required: true, - type: "integer" - }, - number: { - alias: "milestone_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/milestones/:milestone_number/labels" - }, - listLabelsForRepo: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/labels" - }, - listLabelsOnIssue: { - method: "GET", - params: { - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/labels" - }, - listMilestonesForRepo: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - sort: { - enum: ["due_on", "completeness"], - type: "string" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - } - }, - url: "/repos/:owner/:repo/milestones" - }, - lock: { - method: "PUT", - params: { - issue_number: { - required: true, - type: "integer" - }, - lock_reason: { - enum: ["off-topic", "too heated", "resolved", "spam"], - type: "string" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/lock" - }, - removeAssignees: { - method: "DELETE", - params: { - assignees: { - type: "string[]" - }, - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/assignees" - }, - removeLabel: { - method: "DELETE", - params: { - issue_number: { - required: true, - type: "integer" - }, - name: { - required: true, - type: "string" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/labels/:name" - }, - removeLabels: { - method: "DELETE", - params: { - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/labels" - }, - replaceLabels: { - method: "PUT", - params: { - issue_number: { - required: true, - type: "integer" - }, - labels: { - type: "string[]" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/labels" - }, - unlock: { - method: "DELETE", - params: { - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/lock" - }, - update: { - method: "PATCH", - params: { - assignee: { - type: "string" - }, - assignees: { - type: "string[]" - }, - body: { - type: "string" - }, - issue_number: { - required: true, - type: "integer" - }, - labels: { - type: "string[]" - }, - milestone: { - allowNull: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - state: { - enum: ["open", "closed"], - type: "string" - }, - title: { - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number" - }, - updateComment: { - method: "PATCH", - params: { - body: { - required: true, - type: "string" - }, - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/comments/:comment_id" - }, - updateLabel: { - method: "PATCH", - params: { - color: { - type: "string" - }, - current_name: { - required: true, - type: "string" - }, - description: { - type: "string" - }, - name: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/labels/:current_name" - }, - updateMilestone: { - method: "PATCH", - params: { - description: { - type: "string" - }, - due_on: { - type: "string" - }, - milestone_number: { - required: true, - type: "integer" - }, - number: { - alias: "milestone_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - state: { - enum: ["open", "closed"], - type: "string" - }, - title: { - type: "string" - } - }, - url: "/repos/:owner/:repo/milestones/:milestone_number" - } - }, - licenses: { - get: { - method: "GET", - params: { - license: { - required: true, - type: "string" - } - }, - url: "/licenses/:license" - }, - getForRepo: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/license" - }, - list: { - deprecated: "octokit.licenses.list() has been renamed to octokit.licenses.listCommonlyUsed() (2019-03-05)", - method: "GET", - params: {}, - url: "/licenses" - }, - listCommonlyUsed: { - method: "GET", - params: {}, - url: "/licenses" - } - }, - markdown: { - render: { - method: "POST", - params: { - context: { - type: "string" - }, - mode: { - enum: ["markdown", "gfm"], - type: "string" - }, - text: { - required: true, - type: "string" - } - }, - url: "/markdown" - }, - renderRaw: { - headers: { - "content-type": "text/plain; charset=utf-8" - }, - method: "POST", - params: { - data: { - mapTo: "data", - required: true, - type: "string" - } - }, - url: "/markdown/raw" - } - }, - meta: { - get: { - method: "GET", - params: {}, - url: "/meta" - } - }, - migrations: { - cancelImport: { - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/import" - }, - deleteArchiveForAuthenticatedUser: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "DELETE", - params: { - migration_id: { - required: true, - type: "integer" - } - }, - url: "/user/migrations/:migration_id/archive" - }, - deleteArchiveForOrg: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "DELETE", - params: { - migration_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/migrations/:migration_id/archive" - }, - downloadArchiveForOrg: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - migration_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/migrations/:migration_id/archive" - }, - getArchiveForAuthenticatedUser: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - migration_id: { - required: true, - type: "integer" - } - }, - url: "/user/migrations/:migration_id/archive" - }, - getArchiveForOrg: { - deprecated: "octokit.migrations.getArchiveForOrg() has been renamed to octokit.migrations.downloadArchiveForOrg() (2020-01-27)", - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - migration_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/migrations/:migration_id/archive" - }, - getCommitAuthors: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - since: { - type: "string" - } - }, - url: "/repos/:owner/:repo/import/authors" - }, - getImportProgress: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/import" - }, - getLargeFiles: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/import/large_files" - }, - getStatusForAuthenticatedUser: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - migration_id: { - required: true, - type: "integer" - } - }, - url: "/user/migrations/:migration_id" - }, - getStatusForOrg: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - migration_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/migrations/:migration_id" - }, - listForAuthenticatedUser: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/migrations" - }, - listForOrg: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/migrations" - }, - listReposForOrg: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - migration_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/migrations/:migration_id/repositories" - }, - listReposForUser: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - migration_id: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/:migration_id/repositories" - }, - mapCommitAuthor: { - method: "PATCH", - params: { - author_id: { - required: true, - type: "integer" - }, - email: { - type: "string" - }, - name: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/import/authors/:author_id" - }, - setLfsPreference: { - method: "PATCH", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - use_lfs: { - enum: ["opt_in", "opt_out"], - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/import/lfs" - }, - startForAuthenticatedUser: { - method: "POST", - params: { - exclude_attachments: { - type: "boolean" - }, - lock_repositories: { - type: "boolean" - }, - repositories: { - required: true, - type: "string[]" - } - }, - url: "/user/migrations" - }, - startForOrg: { - method: "POST", - params: { - exclude_attachments: { - type: "boolean" - }, - lock_repositories: { - type: "boolean" - }, - org: { - required: true, - type: "string" - }, - repositories: { - required: true, - type: "string[]" - } - }, - url: "/orgs/:org/migrations" - }, - startImport: { - method: "PUT", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - tfvc_project: { - type: "string" - }, - vcs: { - enum: ["subversion", "git", "mercurial", "tfvc"], - type: "string" - }, - vcs_password: { - type: "string" - }, - vcs_url: { - required: true, - type: "string" - }, - vcs_username: { - type: "string" - } - }, - url: "/repos/:owner/:repo/import" - }, - unlockRepoForAuthenticatedUser: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "DELETE", - params: { - migration_id: { - required: true, - type: "integer" - }, - repo_name: { - required: true, - type: "string" - } - }, - url: "/user/migrations/:migration_id/repos/:repo_name/lock" - }, - unlockRepoForOrg: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "DELETE", - params: { - migration_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - repo_name: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/migrations/:migration_id/repos/:repo_name/lock" - }, - updateImport: { - method: "PATCH", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - vcs_password: { - type: "string" - }, - vcs_username: { - type: "string" - } - }, - url: "/repos/:owner/:repo/import" - } - }, - oauthAuthorizations: { - checkAuthorization: { - deprecated: "octokit.oauthAuthorizations.checkAuthorization() has been renamed to octokit.apps.checkAuthorization() (2019-11-05)", - method: "GET", - params: { - access_token: { - required: true, - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/tokens/:access_token" - }, - createAuthorization: { - deprecated: "octokit.oauthAuthorizations.createAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#create-a-new-authorization", - method: "POST", - params: { - client_id: { - type: "string" - }, - client_secret: { - type: "string" - }, - fingerprint: { - type: "string" - }, - note: { - required: true, - type: "string" - }, - note_url: { - type: "string" - }, - scopes: { - type: "string[]" - } - }, - url: "/authorizations" - }, - deleteAuthorization: { - deprecated: "octokit.oauthAuthorizations.deleteAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#delete-an-authorization", - method: "DELETE", - params: { - authorization_id: { - required: true, - type: "integer" - } - }, - url: "/authorizations/:authorization_id" - }, - deleteGrant: { - deprecated: "octokit.oauthAuthorizations.deleteGrant() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#delete-a-grant", - method: "DELETE", - params: { - grant_id: { - required: true, - type: "integer" - } - }, - url: "/applications/grants/:grant_id" - }, - getAuthorization: { - deprecated: "octokit.oauthAuthorizations.getAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-a-single-authorization", - method: "GET", - params: { - authorization_id: { - required: true, - type: "integer" - } - }, - url: "/authorizations/:authorization_id" - }, - getGrant: { - deprecated: "octokit.oauthAuthorizations.getGrant() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-a-single-grant", - method: "GET", - params: { - grant_id: { - required: true, - type: "integer" - } - }, - url: "/applications/grants/:grant_id" - }, - getOrCreateAuthorizationForApp: { - deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForApp() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-or-create-an-authorization-for-a-specific-app", - method: "PUT", - params: { - client_id: { - required: true, - type: "string" - }, - client_secret: { - required: true, - type: "string" - }, - fingerprint: { - type: "string" - }, - note: { - type: "string" - }, - note_url: { - type: "string" - }, - scopes: { - type: "string[]" - } - }, - url: "/authorizations/clients/:client_id" - }, - getOrCreateAuthorizationForAppAndFingerprint: { - deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForAppAndFingerprint() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-or-create-an-authorization-for-a-specific-app-and-fingerprint", - method: "PUT", - params: { - client_id: { - required: true, - type: "string" - }, - client_secret: { - required: true, - type: "string" - }, - fingerprint: { - required: true, - type: "string" - }, - note: { - type: "string" - }, - note_url: { - type: "string" - }, - scopes: { - type: "string[]" - } - }, - url: "/authorizations/clients/:client_id/:fingerprint" - }, - getOrCreateAuthorizationForAppFingerprint: { - deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForAppFingerprint() has been renamed to octokit.oauthAuthorizations.getOrCreateAuthorizationForAppAndFingerprint() (2018-12-27)", - method: "PUT", - params: { - client_id: { - required: true, - type: "string" - }, - client_secret: { - required: true, - type: "string" - }, - fingerprint: { - required: true, - type: "string" - }, - note: { - type: "string" - }, - note_url: { - type: "string" - }, - scopes: { - type: "string[]" - } - }, - url: "/authorizations/clients/:client_id/:fingerprint" - }, - listAuthorizations: { - deprecated: "octokit.oauthAuthorizations.listAuthorizations() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#list-your-authorizations", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/authorizations" - }, - listGrants: { - deprecated: "octokit.oauthAuthorizations.listGrants() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#list-your-grants", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/applications/grants" - }, - resetAuthorization: { - deprecated: "octokit.oauthAuthorizations.resetAuthorization() has been renamed to octokit.apps.resetAuthorization() (2019-11-05)", - method: "POST", - params: { - access_token: { - required: true, - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/tokens/:access_token" - }, - revokeAuthorizationForApplication: { - deprecated: "octokit.oauthAuthorizations.revokeAuthorizationForApplication() has been renamed to octokit.apps.revokeAuthorizationForApplication() (2019-11-05)", - method: "DELETE", - params: { - access_token: { - required: true, - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/tokens/:access_token" - }, - revokeGrantForApplication: { - deprecated: "octokit.oauthAuthorizations.revokeGrantForApplication() has been renamed to octokit.apps.revokeGrantForApplication() (2019-11-05)", - method: "DELETE", - params: { - access_token: { - required: true, - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/grants/:access_token" - }, - updateAuthorization: { - deprecated: "octokit.oauthAuthorizations.updateAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#update-an-existing-authorization", - method: "PATCH", - params: { - add_scopes: { - type: "string[]" - }, - authorization_id: { - required: true, - type: "integer" - }, - fingerprint: { - type: "string" - }, - note: { - type: "string" - }, - note_url: { - type: "string" - }, - remove_scopes: { - type: "string[]" - }, - scopes: { - type: "string[]" - } - }, - url: "/authorizations/:authorization_id" - } - }, - orgs: { - addOrUpdateMembership: { - method: "PUT", - params: { - org: { - required: true, - type: "string" - }, - role: { - enum: ["admin", "member"], - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/memberships/:username" - }, - blockUser: { - method: "PUT", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/blocks/:username" - }, - checkBlockedUser: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/blocks/:username" - }, - checkMembership: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/members/:username" - }, - checkPublicMembership: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/public_members/:username" - }, - concealMembership: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/public_members/:username" - }, - convertMemberToOutsideCollaborator: { - method: "PUT", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/outside_collaborators/:username" - }, - createHook: { - method: "POST", - params: { - active: { - type: "boolean" - }, - config: { - required: true, - type: "object" - }, - "config.content_type": { - type: "string" - }, - "config.insecure_ssl": { - type: "string" - }, - "config.secret": { - type: "string" - }, - "config.url": { - required: true, - type: "string" - }, - events: { - type: "string[]" - }, - name: { - required: true, - type: "string" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/hooks" - }, - createInvitation: { - method: "POST", - params: { - email: { - type: "string" - }, - invitee_id: { - type: "integer" - }, - org: { - required: true, - type: "string" - }, - role: { - enum: ["admin", "direct_member", "billing_manager"], - type: "string" - }, - team_ids: { - type: "integer[]" - } - }, - url: "/orgs/:org/invitations" - }, - deleteHook: { - method: "DELETE", - params: { - hook_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/hooks/:hook_id" - }, - get: { - method: "GET", - params: { - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org" - }, - getHook: { - method: "GET", - params: { - hook_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/hooks/:hook_id" - }, - getMembership: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/memberships/:username" - }, - getMembershipForAuthenticatedUser: { - method: "GET", - params: { - org: { - required: true, - type: "string" - } - }, - url: "/user/memberships/orgs/:org" - }, - list: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "integer" - } - }, - url: "/organizations" - }, - listBlockedUsers: { - method: "GET", - params: { - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/blocks" - }, - listForAuthenticatedUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/orgs" - }, - listForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/orgs" - }, - listHooks: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/hooks" - }, - listInstallations: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/installations" - }, - listInvitationTeams: { - method: "GET", - params: { - invitation_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/invitations/:invitation_id/teams" - }, - listMembers: { - method: "GET", - params: { - filter: { - enum: ["2fa_disabled", "all"], - type: "string" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - role: { - enum: ["all", "admin", "member"], - type: "string" - } - }, - url: "/orgs/:org/members" - }, - listMemberships: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - state: { - enum: ["active", "pending"], - type: "string" - } - }, - url: "/user/memberships/orgs" - }, - listOutsideCollaborators: { - method: "GET", - params: { - filter: { - enum: ["2fa_disabled", "all"], - type: "string" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/outside_collaborators" - }, - listPendingInvitations: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/invitations" - }, - listPublicMembers: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/public_members" - }, - pingHook: { - method: "POST", - params: { - hook_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/hooks/:hook_id/pings" - }, - publicizeMembership: { - method: "PUT", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/public_members/:username" - }, - removeMember: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/members/:username" - }, - removeMembership: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/memberships/:username" - }, - removeOutsideCollaborator: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/outside_collaborators/:username" - }, - unblockUser: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/blocks/:username" - }, - update: { - method: "PATCH", - params: { - billing_email: { - type: "string" - }, - company: { - type: "string" - }, - default_repository_permission: { - enum: ["read", "write", "admin", "none"], - type: "string" - }, - description: { - type: "string" - }, - email: { - type: "string" - }, - has_organization_projects: { - type: "boolean" - }, - has_repository_projects: { - type: "boolean" - }, - location: { - type: "string" - }, - members_allowed_repository_creation_type: { - enum: ["all", "private", "none"], - type: "string" - }, - members_can_create_internal_repositories: { - type: "boolean" - }, - members_can_create_private_repositories: { - type: "boolean" - }, - members_can_create_public_repositories: { - type: "boolean" - }, - members_can_create_repositories: { - type: "boolean" - }, - name: { - type: "string" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org" - }, - updateHook: { - method: "PATCH", - params: { - active: { - type: "boolean" - }, - config: { - type: "object" - }, - "config.content_type": { - type: "string" - }, - "config.insecure_ssl": { - type: "string" - }, - "config.secret": { - type: "string" - }, - "config.url": { - required: true, - type: "string" - }, - events: { - type: "string[]" - }, - hook_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/hooks/:hook_id" - }, - updateMembership: { - method: "PATCH", - params: { - org: { - required: true, - type: "string" - }, - state: { - enum: ["active"], - required: true, - type: "string" - } - }, - url: "/user/memberships/orgs/:org" - } - }, - projects: { - addCollaborator: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "PUT", - params: { - permission: { - enum: ["read", "write", "admin"], - type: "string" - }, - project_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/projects/:project_id/collaborators/:username" - }, - createCard: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "POST", - params: { - column_id: { - required: true, - type: "integer" - }, - content_id: { - type: "integer" - }, - content_type: { - type: "string" - }, - note: { - type: "string" - } - }, - url: "/projects/columns/:column_id/cards" - }, - createColumn: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "POST", - params: { - name: { - required: true, - type: "string" - }, - project_id: { - required: true, - type: "integer" - } - }, - url: "/projects/:project_id/columns" - }, - createForAuthenticatedUser: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "POST", - params: { - body: { - type: "string" - }, - name: { - required: true, - type: "string" - } - }, - url: "/user/projects" - }, - createForOrg: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "POST", - params: { - body: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/projects" - }, - createForRepo: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "POST", - params: { - body: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/projects" - }, - delete: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "DELETE", - params: { - project_id: { - required: true, - type: "integer" - } - }, - url: "/projects/:project_id" - }, - deleteCard: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "DELETE", - params: { - card_id: { - required: true, - type: "integer" - } - }, - url: "/projects/columns/cards/:card_id" - }, - deleteColumn: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "DELETE", - params: { - column_id: { - required: true, - type: "integer" - } - }, - url: "/projects/columns/:column_id" - }, - get: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - project_id: { - required: true, - type: "integer" - } - }, - url: "/projects/:project_id" - }, - getCard: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - card_id: { - required: true, - type: "integer" - } - }, - url: "/projects/columns/cards/:card_id" - }, - getColumn: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - column_id: { - required: true, - type: "integer" - } - }, - url: "/projects/columns/:column_id" - }, - listCards: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - archived_state: { - enum: ["all", "archived", "not_archived"], - type: "string" - }, - column_id: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/projects/columns/:column_id/cards" - }, - listCollaborators: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - affiliation: { - enum: ["outside", "direct", "all"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - project_id: { - required: true, - type: "integer" - } - }, - url: "/projects/:project_id/collaborators" - }, - listColumns: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - project_id: { - required: true, - type: "integer" - } - }, - url: "/projects/:project_id/columns" - }, - listForOrg: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - } - }, - url: "/orgs/:org/projects" - }, - listForRepo: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - } - }, - url: "/repos/:owner/:repo/projects" - }, - listForUser: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/projects" - }, - moveCard: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "POST", - params: { - card_id: { - required: true, - type: "integer" - }, - column_id: { - type: "integer" - }, - position: { - required: true, - type: "string", - validation: "^(top|bottom|after:\\d+)$" - } - }, - url: "/projects/columns/cards/:card_id/moves" - }, - moveColumn: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "POST", - params: { - column_id: { - required: true, - type: "integer" - }, - position: { - required: true, - type: "string", - validation: "^(first|last|after:\\d+)$" - } - }, - url: "/projects/columns/:column_id/moves" - }, - removeCollaborator: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "DELETE", - params: { - project_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/projects/:project_id/collaborators/:username" - }, - reviewUserPermissionLevel: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - project_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/projects/:project_id/collaborators/:username/permission" - }, - update: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "PATCH", - params: { - body: { - type: "string" - }, - name: { - type: "string" - }, - organization_permission: { - type: "string" - }, - private: { - type: "boolean" - }, - project_id: { - required: true, - type: "integer" - }, - state: { - enum: ["open", "closed"], - type: "string" - } - }, - url: "/projects/:project_id" - }, - updateCard: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "PATCH", - params: { - archived: { - type: "boolean" - }, - card_id: { - required: true, - type: "integer" - }, - note: { - type: "string" - } - }, - url: "/projects/columns/cards/:card_id" - }, - updateColumn: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "PATCH", - params: { - column_id: { - required: true, - type: "integer" - }, - name: { - required: true, - type: "string" - } - }, - url: "/projects/columns/:column_id" - } - }, - pulls: { - checkIfMerged: { - method: "GET", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/merge" - }, - create: { - method: "POST", - params: { - base: { - required: true, - type: "string" - }, - body: { - type: "string" - }, - draft: { - type: "boolean" - }, - head: { - required: true, - type: "string" - }, - maintainer_can_modify: { - type: "boolean" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - title: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls" - }, - createComment: { - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - commit_id: { - required: true, - type: "string" - }, - in_reply_to: { - deprecated: true, - description: "The comment ID to reply to. **Note**: This must be the ID of a top-level comment, not a reply to that comment. Replies to replies are not supported.", - type: "integer" - }, - line: { - type: "integer" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - path: { - required: true, - type: "string" - }, - position: { - type: "integer" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - side: { - enum: ["LEFT", "RIGHT"], - type: "string" - }, - start_line: { - type: "integer" - }, - start_side: { - enum: ["LEFT", "RIGHT", "side"], - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/comments" - }, - createCommentReply: { - deprecated: "octokit.pulls.createCommentReply() has been renamed to octokit.pulls.createComment() (2019-09-09)", - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - commit_id: { - required: true, - type: "string" - }, - in_reply_to: { - deprecated: true, - description: "The comment ID to reply to. **Note**: This must be the ID of a top-level comment, not a reply to that comment. Replies to replies are not supported.", - type: "integer" - }, - line: { - type: "integer" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - path: { - required: true, - type: "string" - }, - position: { - type: "integer" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - side: { - enum: ["LEFT", "RIGHT"], - type: "string" - }, - start_line: { - type: "integer" - }, - start_side: { - enum: ["LEFT", "RIGHT", "side"], - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/comments" - }, - createFromIssue: { - deprecated: "octokit.pulls.createFromIssue() is deprecated, see https://developer.github.com/v3/pulls/#create-a-pull-request", - method: "POST", - params: { - base: { - required: true, - type: "string" - }, - draft: { - type: "boolean" - }, - head: { - required: true, - type: "string" - }, - issue: { - required: true, - type: "integer" - }, - maintainer_can_modify: { - type: "boolean" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls" - }, - createReview: { - method: "POST", - params: { - body: { - type: "string" - }, - comments: { - type: "object[]" - }, - "comments[].body": { - required: true, - type: "string" - }, - "comments[].path": { - required: true, - type: "string" - }, - "comments[].position": { - required: true, - type: "integer" - }, - commit_id: { - type: "string" - }, - event: { - enum: ["APPROVE", "REQUEST_CHANGES", "COMMENT"], - type: "string" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/reviews" - }, - createReviewCommentReply: { - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/comments/:comment_id/replies" - }, - createReviewRequest: { - method: "POST", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - reviewers: { - type: "string[]" - }, - team_reviewers: { - type: "string[]" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers" - }, - deleteComment: { - method: "DELETE", - params: { - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/comments/:comment_id" - }, - deletePendingReview: { - method: "DELETE", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - review_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id" - }, - deleteReviewRequest: { - method: "DELETE", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - reviewers: { - type: "string[]" - }, - team_reviewers: { - type: "string[]" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers" - }, - dismissReview: { - method: "PUT", - params: { - message: { - required: true, - type: "string" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - review_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/dismissals" - }, - get: { - method: "GET", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number" - }, - getComment: { - method: "GET", - params: { - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/comments/:comment_id" - }, - getCommentsForReview: { - method: "GET", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - review_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/comments" - }, - getReview: { - method: "GET", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - review_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id" - }, - list: { - method: "GET", - params: { - base: { - type: "string" - }, - direction: { - enum: ["asc", "desc"], - type: "string" - }, - head: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - sort: { - enum: ["created", "updated", "popularity", "long-running"], - type: "string" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls" - }, - listComments: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - since: { - type: "string" - }, - sort: { - enum: ["created", "updated"], - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/comments" - }, - listCommentsForRepo: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - since: { - type: "string" - }, - sort: { - enum: ["created", "updated"], - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/comments" - }, - listCommits: { - method: "GET", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/commits" - }, - listFiles: { - method: "GET", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/files" - }, - listReviewRequests: { - method: "GET", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers" - }, - listReviews: { - method: "GET", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/reviews" - }, - merge: { - method: "PUT", - params: { - commit_message: { - type: "string" - }, - commit_title: { - type: "string" - }, - merge_method: { - enum: ["merge", "squash", "rebase"], - type: "string" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - sha: { - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/merge" - }, - submitReview: { - method: "POST", - params: { - body: { - type: "string" - }, - event: { - enum: ["APPROVE", "REQUEST_CHANGES", "COMMENT"], - required: true, - type: "string" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - review_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/events" - }, - update: { - method: "PATCH", - params: { - base: { - type: "string" - }, - body: { - type: "string" - }, - maintainer_can_modify: { - type: "boolean" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - state: { - enum: ["open", "closed"], - type: "string" - }, - title: { - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number" - }, - updateBranch: { - headers: { - accept: "application/vnd.github.lydian-preview+json" - }, - method: "PUT", - params: { - expected_head_sha: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/update-branch" - }, - updateComment: { - method: "PATCH", - params: { - body: { - required: true, - type: "string" - }, - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/comments/:comment_id" - }, - updateReview: { - method: "PUT", - params: { - body: { - required: true, - type: "string" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - review_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id" - } - }, - rateLimit: { - get: { - method: "GET", - params: {}, - url: "/rate_limit" - } - }, - reactions: { - createForCommitComment: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - comment_id: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/comments/:comment_id/reactions" - }, - createForIssue: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/reactions" - }, - createForIssueComment: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - comment_id: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/comments/:comment_id/reactions" - }, - createForPullRequestReviewComment: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - comment_id: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/comments/:comment_id/reactions" - }, - createForTeamDiscussion: { - deprecated: "octokit.reactions.createForTeamDiscussion() has been renamed to octokit.reactions.createForTeamDiscussionLegacy() (2020-01-16)", - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/reactions" - }, - createForTeamDiscussionComment: { - deprecated: "octokit.reactions.createForTeamDiscussionComment() has been renamed to octokit.reactions.createForTeamDiscussionCommentLegacy() (2020-01-16)", - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - comment_number: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions" - }, - createForTeamDiscussionCommentInOrg: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - comment_number: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number/reactions" - }, - createForTeamDiscussionCommentLegacy: { - deprecated: "octokit.reactions.createForTeamDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/reactions/#create-reaction-for-a-team-discussion-comment-legacy", - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - comment_number: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions" - }, - createForTeamDiscussionInOrg: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/reactions" - }, - createForTeamDiscussionLegacy: { - deprecated: "octokit.reactions.createForTeamDiscussionLegacy() is deprecated, see https://developer.github.com/v3/reactions/#create-reaction-for-a-team-discussion-legacy", - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/reactions" - }, - delete: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "DELETE", - params: { - reaction_id: { - required: true, - type: "integer" - } - }, - url: "/reactions/:reaction_id" - }, - listForCommitComment: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - comment_id: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/comments/:comment_id/reactions" - }, - listForIssue: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/reactions" - }, - listForIssueComment: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - comment_id: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/comments/:comment_id/reactions" - }, - listForPullRequestReviewComment: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - comment_id: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/comments/:comment_id/reactions" - }, - listForTeamDiscussion: { - deprecated: "octokit.reactions.listForTeamDiscussion() has been renamed to octokit.reactions.listForTeamDiscussionLegacy() (2020-01-16)", - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/reactions" - }, - listForTeamDiscussionComment: { - deprecated: "octokit.reactions.listForTeamDiscussionComment() has been renamed to octokit.reactions.listForTeamDiscussionCommentLegacy() (2020-01-16)", - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - comment_number: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions" - }, - listForTeamDiscussionCommentInOrg: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - comment_number: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number/reactions" - }, - listForTeamDiscussionCommentLegacy: { - deprecated: "octokit.reactions.listForTeamDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/reactions/#list-reactions-for-a-team-discussion-comment-legacy", - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - comment_number: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions" - }, - listForTeamDiscussionInOrg: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/reactions" - }, - listForTeamDiscussionLegacy: { - deprecated: "octokit.reactions.listForTeamDiscussionLegacy() is deprecated, see https://developer.github.com/v3/reactions/#list-reactions-for-a-team-discussion-legacy", - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/reactions" - } - }, - repos: { - acceptInvitation: { - method: "PATCH", - params: { - invitation_id: { - required: true, - type: "integer" - } - }, - url: "/user/repository_invitations/:invitation_id" - }, - addCollaborator: { - method: "PUT", - params: { - owner: { - required: true, - type: "string" - }, - permission: { - enum: ["pull", "push", "admin"], - type: "string" - }, - repo: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/collaborators/:username" - }, - addDeployKey: { - method: "POST", - params: { - key: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - read_only: { - type: "boolean" - }, - repo: { - required: true, - type: "string" - }, - title: { - type: "string" - } - }, - url: "/repos/:owner/:repo/keys" - }, - addProtectedBranchAdminEnforcement: { - method: "POST", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins" - }, - addProtectedBranchAppRestrictions: { - method: "POST", - params: { - apps: { - mapTo: "data", - required: true, - type: "string[]" - }, - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" - }, - addProtectedBranchRequiredSignatures: { - headers: { - accept: "application/vnd.github.zzzax-preview+json" - }, - method: "POST", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures" - }, - addProtectedBranchRequiredStatusChecksContexts: { - method: "POST", - params: { - branch: { - required: true, - type: "string" - }, - contexts: { - mapTo: "data", - required: true, - type: "string[]" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts" - }, - addProtectedBranchTeamRestrictions: { - method: "POST", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - teams: { - mapTo: "data", - required: true, - type: "string[]" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" - }, - addProtectedBranchUserRestrictions: { - method: "POST", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - users: { - mapTo: "data", - required: true, - type: "string[]" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" - }, - checkCollaborator: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/collaborators/:username" - }, - checkVulnerabilityAlerts: { - headers: { - accept: "application/vnd.github.dorian-preview+json" - }, - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/vulnerability-alerts" - }, - compareCommits: { - method: "GET", - params: { - base: { - required: true, - type: "string" - }, - head: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/compare/:base...:head" - }, - createCommitComment: { - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - commit_sha: { - required: true, - type: "string" - }, - line: { - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - path: { - type: "string" - }, - position: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - sha: { - alias: "commit_sha", - deprecated: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:commit_sha/comments" - }, - createDeployment: { - method: "POST", - params: { - auto_merge: { - type: "boolean" - }, - description: { - type: "string" - }, - environment: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - payload: { - type: "string" - }, - production_environment: { - type: "boolean" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - required_contexts: { - type: "string[]" - }, - task: { - type: "string" - }, - transient_environment: { - type: "boolean" - } - }, - url: "/repos/:owner/:repo/deployments" - }, - createDeploymentStatus: { - method: "POST", - params: { - auto_inactive: { - type: "boolean" - }, - deployment_id: { - required: true, - type: "integer" - }, - description: { - type: "string" - }, - environment: { - enum: ["production", "staging", "qa"], - type: "string" - }, - environment_url: { - type: "string" - }, - log_url: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - state: { - enum: ["error", "failure", "inactive", "in_progress", "queued", "pending", "success"], - required: true, - type: "string" - }, - target_url: { - type: "string" - } - }, - url: "/repos/:owner/:repo/deployments/:deployment_id/statuses" - }, - createDispatchEvent: { - method: "POST", - params: { - client_payload: { - type: "object" - }, - event_type: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/dispatches" - }, - createFile: { - deprecated: "octokit.repos.createFile() has been renamed to octokit.repos.createOrUpdateFile() (2019-06-07)", - method: "PUT", - params: { - author: { - type: "object" - }, - "author.email": { - required: true, - type: "string" - }, - "author.name": { - required: true, - type: "string" - }, - branch: { - type: "string" - }, - committer: { - type: "object" - }, - "committer.email": { - required: true, - type: "string" - }, - "committer.name": { - required: true, - type: "string" - }, - content: { - required: true, - type: "string" - }, - message: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - path: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - type: "string" - } - }, - url: "/repos/:owner/:repo/contents/:path" - }, - createForAuthenticatedUser: { - method: "POST", - params: { - allow_merge_commit: { - type: "boolean" - }, - allow_rebase_merge: { - type: "boolean" - }, - allow_squash_merge: { - type: "boolean" - }, - auto_init: { - type: "boolean" - }, - delete_branch_on_merge: { - type: "boolean" - }, - description: { - type: "string" - }, - gitignore_template: { - type: "string" - }, - has_issues: { - type: "boolean" - }, - has_projects: { - type: "boolean" - }, - has_wiki: { - type: "boolean" - }, - homepage: { - type: "string" - }, - is_template: { - type: "boolean" - }, - license_template: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - private: { - type: "boolean" - }, - team_id: { - type: "integer" - }, - visibility: { - enum: ["public", "private", "visibility", "internal"], - type: "string" - } - }, - url: "/user/repos" - }, - createFork: { - method: "POST", - params: { - organization: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/forks" - }, - createHook: { - method: "POST", - params: { - active: { - type: "boolean" - }, - config: { - required: true, - type: "object" - }, - "config.content_type": { - type: "string" - }, - "config.insecure_ssl": { - type: "string" - }, - "config.secret": { - type: "string" - }, - "config.url": { - required: true, - type: "string" - }, - events: { - type: "string[]" - }, - name: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/hooks" - }, - createInOrg: { - method: "POST", - params: { - allow_merge_commit: { - type: "boolean" - }, - allow_rebase_merge: { - type: "boolean" - }, - allow_squash_merge: { - type: "boolean" - }, - auto_init: { - type: "boolean" - }, - delete_branch_on_merge: { - type: "boolean" - }, - description: { - type: "string" - }, - gitignore_template: { - type: "string" - }, - has_issues: { - type: "boolean" - }, - has_projects: { - type: "boolean" - }, - has_wiki: { - type: "boolean" - }, - homepage: { - type: "string" - }, - is_template: { - type: "boolean" - }, - license_template: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - org: { - required: true, - type: "string" - }, - private: { - type: "boolean" - }, - team_id: { - type: "integer" - }, - visibility: { - enum: ["public", "private", "visibility", "internal"], - type: "string" - } - }, - url: "/orgs/:org/repos" - }, - createOrUpdateFile: { - method: "PUT", - params: { - author: { - type: "object" - }, - "author.email": { - required: true, - type: "string" - }, - "author.name": { - required: true, - type: "string" - }, - branch: { - type: "string" - }, - committer: { - type: "object" - }, - "committer.email": { - required: true, - type: "string" - }, - "committer.name": { - required: true, - type: "string" - }, - content: { - required: true, - type: "string" - }, - message: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - path: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - type: "string" - } - }, - url: "/repos/:owner/:repo/contents/:path" - }, - createRelease: { - method: "POST", - params: { - body: { - type: "string" - }, - draft: { - type: "boolean" - }, - name: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - prerelease: { - type: "boolean" - }, - repo: { - required: true, - type: "string" - }, - tag_name: { - required: true, - type: "string" - }, - target_commitish: { - type: "string" - } - }, - url: "/repos/:owner/:repo/releases" - }, - createStatus: { - method: "POST", - params: { - context: { - type: "string" - }, - description: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - required: true, - type: "string" - }, - state: { - enum: ["error", "failure", "pending", "success"], - required: true, - type: "string" - }, - target_url: { - type: "string" - } - }, - url: "/repos/:owner/:repo/statuses/:sha" - }, - createUsingTemplate: { - headers: { - accept: "application/vnd.github.baptiste-preview+json" - }, - method: "POST", - params: { - description: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - owner: { - type: "string" - }, - private: { - type: "boolean" - }, - template_owner: { - required: true, - type: "string" - }, - template_repo: { - required: true, - type: "string" - } - }, - url: "/repos/:template_owner/:template_repo/generate" - }, - declineInvitation: { - method: "DELETE", - params: { - invitation_id: { - required: true, - type: "integer" - } - }, - url: "/user/repository_invitations/:invitation_id" - }, - delete: { - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo" - }, - deleteCommitComment: { - method: "DELETE", - params: { - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/comments/:comment_id" - }, - deleteDownload: { - method: "DELETE", - params: { - download_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/downloads/:download_id" - }, - deleteFile: { - method: "DELETE", - params: { - author: { - type: "object" - }, - "author.email": { - type: "string" - }, - "author.name": { - type: "string" - }, - branch: { - type: "string" - }, - committer: { - type: "object" - }, - "committer.email": { - type: "string" - }, - "committer.name": { - type: "string" - }, - message: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - path: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/contents/:path" - }, - deleteHook: { - method: "DELETE", - params: { - hook_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/hooks/:hook_id" - }, - deleteInvitation: { - method: "DELETE", - params: { - invitation_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/invitations/:invitation_id" - }, - deleteRelease: { - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - release_id: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/:release_id" - }, - deleteReleaseAsset: { - method: "DELETE", - params: { - asset_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/assets/:asset_id" - }, - disableAutomatedSecurityFixes: { - headers: { - accept: "application/vnd.github.london-preview+json" - }, - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/automated-security-fixes" - }, - disablePagesSite: { - headers: { - accept: "application/vnd.github.switcheroo-preview+json" - }, - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pages" - }, - disableVulnerabilityAlerts: { - headers: { - accept: "application/vnd.github.dorian-preview+json" - }, - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/vulnerability-alerts" - }, - enableAutomatedSecurityFixes: { - headers: { - accept: "application/vnd.github.london-preview+json" - }, - method: "PUT", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/automated-security-fixes" - }, - enablePagesSite: { - headers: { - accept: "application/vnd.github.switcheroo-preview+json" - }, - method: "POST", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - source: { - type: "object" - }, - "source.branch": { - enum: ["master", "gh-pages"], - type: "string" - }, - "source.path": { - type: "string" - } - }, - url: "/repos/:owner/:repo/pages" - }, - enableVulnerabilityAlerts: { - headers: { - accept: "application/vnd.github.dorian-preview+json" - }, - method: "PUT", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/vulnerability-alerts" - }, - get: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo" - }, - getAppsWithAccessToProtectedBranch: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" - }, - getArchiveLink: { - method: "GET", - params: { - archive_format: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/:archive_format/:ref" - }, - getBranch: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch" - }, - getBranchProtection: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection" - }, - getClones: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - per: { - enum: ["day", "week"], - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/traffic/clones" - }, - getCodeFrequencyStats: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/stats/code_frequency" - }, - getCollaboratorPermissionLevel: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/collaborators/:username/permission" - }, - getCombinedStatusForRef: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:ref/status" - }, - getCommit: { - method: "GET", - params: { - commit_sha: { - alias: "ref", - deprecated: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - alias: "ref", - deprecated: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:ref" - }, - getCommitActivityStats: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/stats/commit_activity" - }, - getCommitComment: { - method: "GET", - params: { - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/comments/:comment_id" - }, - getCommitRefSha: { - deprecated: "octokit.repos.getCommitRefSha() is deprecated, see https://developer.github.com/v3/repos/commits/#get-a-single-commit", - headers: { - accept: "application/vnd.github.v3.sha" - }, - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:ref" - }, - getContents: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - path: { - required: true, - type: "string" - }, - ref: { - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/contents/:path" - }, - getContributorsStats: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/stats/contributors" - }, - getDeployKey: { - method: "GET", - params: { - key_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/keys/:key_id" - }, - getDeployment: { - method: "GET", - params: { - deployment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/deployments/:deployment_id" - }, - getDeploymentStatus: { - method: "GET", - params: { - deployment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - status_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/deployments/:deployment_id/statuses/:status_id" - }, - getDownload: { - method: "GET", - params: { - download_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/downloads/:download_id" - }, - getHook: { - method: "GET", - params: { - hook_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/hooks/:hook_id" - }, - getLatestPagesBuild: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pages/builds/latest" - }, - getLatestRelease: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/latest" - }, - getPages: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pages" - }, - getPagesBuild: { - method: "GET", - params: { - build_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pages/builds/:build_id" - }, - getParticipationStats: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/stats/participation" - }, - getProtectedBranchAdminEnforcement: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins" - }, - getProtectedBranchPullRequestReviewEnforcement: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews" - }, - getProtectedBranchRequiredSignatures: { - headers: { - accept: "application/vnd.github.zzzax-preview+json" - }, - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures" - }, - getProtectedBranchRequiredStatusChecks: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks" - }, - getProtectedBranchRestrictions: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions" - }, - getPunchCardStats: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/stats/punch_card" - }, - getReadme: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - ref: { - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/readme" - }, - getRelease: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - release_id: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/:release_id" - }, - getReleaseAsset: { - method: "GET", - params: { - asset_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/assets/:asset_id" - }, - getReleaseByTag: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - tag: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/tags/:tag" - }, - getTeamsWithAccessToProtectedBranch: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" - }, - getTopPaths: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/traffic/popular/paths" - }, - getTopReferrers: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/traffic/popular/referrers" - }, - getUsersWithAccessToProtectedBranch: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" - }, - getViews: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - per: { - enum: ["day", "week"], - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/traffic/views" - }, - list: { - method: "GET", - params: { - affiliation: { - type: "string" - }, - direction: { - enum: ["asc", "desc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - sort: { - enum: ["created", "updated", "pushed", "full_name"], - type: "string" - }, - type: { - enum: ["all", "owner", "public", "private", "member"], - type: "string" - }, - visibility: { - enum: ["all", "public", "private"], - type: "string" - } - }, - url: "/user/repos" - }, - listAppsWithAccessToProtectedBranch: { - deprecated: "octokit.repos.listAppsWithAccessToProtectedBranch() has been renamed to octokit.repos.getAppsWithAccessToProtectedBranch() (2019-09-13)", - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" - }, - listAssetsForRelease: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - release_id: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/:release_id/assets" - }, - listBranches: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - protected: { - type: "boolean" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches" - }, - listBranchesForHeadCommit: { - headers: { - accept: "application/vnd.github.groot-preview+json" - }, - method: "GET", - params: { - commit_sha: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:commit_sha/branches-where-head" - }, - listCollaborators: { - method: "GET", - params: { - affiliation: { - enum: ["outside", "direct", "all"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/collaborators" - }, - listCommentsForCommit: { - method: "GET", - params: { - commit_sha: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - ref: { - alias: "commit_sha", - deprecated: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:commit_sha/comments" - }, - listCommitComments: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/comments" - }, - listCommits: { - method: "GET", - params: { - author: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - path: { - type: "string" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - sha: { - type: "string" - }, - since: { - type: "string" - }, - until: { - type: "string" - } - }, - url: "/repos/:owner/:repo/commits" - }, - listContributors: { - method: "GET", - params: { - anon: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/contributors" - }, - listDeployKeys: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/keys" - }, - listDeploymentStatuses: { - method: "GET", - params: { - deployment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/deployments/:deployment_id/statuses" - }, - listDeployments: { - method: "GET", - params: { - environment: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - ref: { - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - type: "string" - }, - task: { - type: "string" - } - }, - url: "/repos/:owner/:repo/deployments" - }, - listDownloads: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/downloads" - }, - listForOrg: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - sort: { - enum: ["created", "updated", "pushed", "full_name"], - type: "string" - }, - type: { - enum: ["all", "public", "private", "forks", "sources", "member", "internal"], - type: "string" - } - }, - url: "/orgs/:org/repos" - }, - listForUser: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - sort: { - enum: ["created", "updated", "pushed", "full_name"], - type: "string" - }, - type: { - enum: ["all", "owner", "member"], - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/repos" - }, - listForks: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - sort: { - enum: ["newest", "oldest", "stargazers"], - type: "string" - } - }, - url: "/repos/:owner/:repo/forks" - }, - listHooks: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/hooks" - }, - listInvitations: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/invitations" - }, - listInvitationsForAuthenticatedUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/repository_invitations" - }, - listLanguages: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/languages" - }, - listPagesBuilds: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pages/builds" - }, - listProtectedBranchRequiredStatusChecksContexts: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts" - }, - listProtectedBranchTeamRestrictions: { - deprecated: "octokit.repos.listProtectedBranchTeamRestrictions() has been renamed to octokit.repos.getTeamsWithAccessToProtectedBranch() (2019-09-09)", - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" - }, - listProtectedBranchUserRestrictions: { - deprecated: "octokit.repos.listProtectedBranchUserRestrictions() has been renamed to octokit.repos.getUsersWithAccessToProtectedBranch() (2019-09-09)", - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" - }, - listPublic: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "integer" - } - }, - url: "/repositories" - }, - listPullRequestsAssociatedWithCommit: { - headers: { - accept: "application/vnd.github.groot-preview+json" - }, - method: "GET", - params: { - commit_sha: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:commit_sha/pulls" - }, - listReleases: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases" - }, - listStatusesForRef: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:ref/statuses" - }, - listTags: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/tags" - }, - listTeams: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/teams" - }, - listTeamsWithAccessToProtectedBranch: { - deprecated: "octokit.repos.listTeamsWithAccessToProtectedBranch() has been renamed to octokit.repos.getTeamsWithAccessToProtectedBranch() (2019-09-13)", - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" - }, - listTopics: { - headers: { - accept: "application/vnd.github.mercy-preview+json" - }, - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/topics" - }, - listUsersWithAccessToProtectedBranch: { - deprecated: "octokit.repos.listUsersWithAccessToProtectedBranch() has been renamed to octokit.repos.getUsersWithAccessToProtectedBranch() (2019-09-13)", - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" - }, - merge: { - method: "POST", - params: { - base: { - required: true, - type: "string" - }, - commit_message: { - type: "string" - }, - head: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/merges" - }, - pingHook: { - method: "POST", - params: { - hook_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/hooks/:hook_id/pings" - }, - removeBranchProtection: { - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection" - }, - removeCollaborator: { - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/collaborators/:username" - }, - removeDeployKey: { - method: "DELETE", - params: { - key_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/keys/:key_id" - }, - removeProtectedBranchAdminEnforcement: { - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins" - }, - removeProtectedBranchAppRestrictions: { - method: "DELETE", - params: { - apps: { - mapTo: "data", - required: true, - type: "string[]" - }, - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" - }, - removeProtectedBranchPullRequestReviewEnforcement: { - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews" - }, - removeProtectedBranchRequiredSignatures: { - headers: { - accept: "application/vnd.github.zzzax-preview+json" - }, - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures" - }, - removeProtectedBranchRequiredStatusChecks: { - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks" - }, - removeProtectedBranchRequiredStatusChecksContexts: { - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - contexts: { - mapTo: "data", - required: true, - type: "string[]" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts" - }, - removeProtectedBranchRestrictions: { - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions" - }, - removeProtectedBranchTeamRestrictions: { - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - teams: { - mapTo: "data", - required: true, - type: "string[]" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" - }, - removeProtectedBranchUserRestrictions: { - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - users: { - mapTo: "data", - required: true, - type: "string[]" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" - }, - replaceProtectedBranchAppRestrictions: { - method: "PUT", - params: { - apps: { - mapTo: "data", - required: true, - type: "string[]" - }, - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" - }, - replaceProtectedBranchRequiredStatusChecksContexts: { - method: "PUT", - params: { - branch: { - required: true, - type: "string" - }, - contexts: { - mapTo: "data", - required: true, - type: "string[]" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts" - }, - replaceProtectedBranchTeamRestrictions: { - method: "PUT", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - teams: { - mapTo: "data", - required: true, - type: "string[]" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" - }, - replaceProtectedBranchUserRestrictions: { - method: "PUT", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - users: { - mapTo: "data", - required: true, - type: "string[]" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" - }, - replaceTopics: { - headers: { - accept: "application/vnd.github.mercy-preview+json" - }, - method: "PUT", - params: { - names: { - required: true, - type: "string[]" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/topics" - }, - requestPageBuild: { - method: "POST", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pages/builds" - }, - retrieveCommunityProfileMetrics: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/community/profile" - }, - testPushHook: { - method: "POST", - params: { - hook_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/hooks/:hook_id/tests" - }, - transfer: { - method: "POST", - params: { - new_owner: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_ids: { - type: "integer[]" - } - }, - url: "/repos/:owner/:repo/transfer" - }, - update: { - method: "PATCH", - params: { - allow_merge_commit: { - type: "boolean" - }, - allow_rebase_merge: { - type: "boolean" - }, - allow_squash_merge: { - type: "boolean" - }, - archived: { - type: "boolean" - }, - default_branch: { - type: "string" - }, - delete_branch_on_merge: { - type: "boolean" - }, - description: { - type: "string" - }, - has_issues: { - type: "boolean" - }, - has_projects: { - type: "boolean" - }, - has_wiki: { - type: "boolean" - }, - homepage: { - type: "string" - }, - is_template: { - type: "boolean" - }, - name: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - private: { - type: "boolean" - }, - repo: { - required: true, - type: "string" - }, - visibility: { - enum: ["public", "private", "visibility", "internal"], - type: "string" - } - }, - url: "/repos/:owner/:repo" - }, - updateBranchProtection: { - method: "PUT", - params: { - allow_deletions: { - type: "boolean" - }, - allow_force_pushes: { - allowNull: true, - type: "boolean" - }, - branch: { - required: true, - type: "string" - }, - enforce_admins: { - allowNull: true, - required: true, - type: "boolean" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - required_linear_history: { - type: "boolean" - }, - required_pull_request_reviews: { - allowNull: true, - required: true, - type: "object" - }, - "required_pull_request_reviews.dismiss_stale_reviews": { - type: "boolean" - }, - "required_pull_request_reviews.dismissal_restrictions": { - type: "object" - }, - "required_pull_request_reviews.dismissal_restrictions.teams": { - type: "string[]" - }, - "required_pull_request_reviews.dismissal_restrictions.users": { - type: "string[]" - }, - "required_pull_request_reviews.require_code_owner_reviews": { - type: "boolean" - }, - "required_pull_request_reviews.required_approving_review_count": { - type: "integer" - }, - required_status_checks: { - allowNull: true, - required: true, - type: "object" - }, - "required_status_checks.contexts": { - required: true, - type: "string[]" - }, - "required_status_checks.strict": { - required: true, - type: "boolean" - }, - restrictions: { - allowNull: true, - required: true, - type: "object" - }, - "restrictions.apps": { - type: "string[]" - }, - "restrictions.teams": { - required: true, - type: "string[]" - }, - "restrictions.users": { - required: true, - type: "string[]" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection" - }, - updateCommitComment: { - method: "PATCH", - params: { - body: { - required: true, - type: "string" - }, - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/comments/:comment_id" - }, - updateFile: { - deprecated: "octokit.repos.updateFile() has been renamed to octokit.repos.createOrUpdateFile() (2019-06-07)", - method: "PUT", - params: { - author: { - type: "object" - }, - "author.email": { - required: true, - type: "string" - }, - "author.name": { - required: true, - type: "string" - }, - branch: { - type: "string" - }, - committer: { - type: "object" - }, - "committer.email": { - required: true, - type: "string" - }, - "committer.name": { - required: true, - type: "string" - }, - content: { - required: true, - type: "string" - }, - message: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - path: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - type: "string" - } - }, - url: "/repos/:owner/:repo/contents/:path" - }, - updateHook: { - method: "PATCH", - params: { - active: { - type: "boolean" - }, - add_events: { - type: "string[]" - }, - config: { - type: "object" - }, - "config.content_type": { - type: "string" - }, - "config.insecure_ssl": { - type: "string" - }, - "config.secret": { - type: "string" - }, - "config.url": { - required: true, - type: "string" - }, - events: { - type: "string[]" - }, - hook_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - remove_events: { - type: "string[]" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/hooks/:hook_id" - }, - updateInformationAboutPagesSite: { - method: "PUT", - params: { - cname: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - source: { - enum: ['"gh-pages"', '"master"', '"master /docs"'], - type: "string" - } - }, - url: "/repos/:owner/:repo/pages" - }, - updateInvitation: { - method: "PATCH", - params: { - invitation_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - permissions: { - enum: ["read", "write", "admin"], - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/invitations/:invitation_id" - }, - updateProtectedBranchPullRequestReviewEnforcement: { - method: "PATCH", - params: { - branch: { - required: true, - type: "string" - }, - dismiss_stale_reviews: { - type: "boolean" - }, - dismissal_restrictions: { - type: "object" - }, - "dismissal_restrictions.teams": { - type: "string[]" - }, - "dismissal_restrictions.users": { - type: "string[]" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - require_code_owner_reviews: { - type: "boolean" - }, - required_approving_review_count: { - type: "integer" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews" - }, - updateProtectedBranchRequiredStatusChecks: { - method: "PATCH", - params: { - branch: { - required: true, - type: "string" - }, - contexts: { - type: "string[]" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - strict: { - type: "boolean" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks" - }, - updateRelease: { - method: "PATCH", - params: { - body: { - type: "string" - }, - draft: { - type: "boolean" - }, - name: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - prerelease: { - type: "boolean" - }, - release_id: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - tag_name: { - type: "string" - }, - target_commitish: { - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/:release_id" - }, - updateReleaseAsset: { - method: "PATCH", - params: { - asset_id: { - required: true, - type: "integer" - }, - label: { - type: "string" - }, - name: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/assets/:asset_id" - }, - uploadReleaseAsset: { - method: "POST", - params: { - data: { - mapTo: "data", - required: true, - type: "string | object" - }, - file: { - alias: "data", - deprecated: true, - type: "string | object" - }, - headers: { - required: true, - type: "object" - }, - "headers.content-length": { - required: true, - type: "integer" - }, - "headers.content-type": { - required: true, - type: "string" - }, - label: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - url: { - required: true, - type: "string" - } - }, - url: ":url" - } - }, - search: { - code: { - method: "GET", - params: { - order: { - enum: ["desc", "asc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - q: { - required: true, - type: "string" - }, - sort: { - enum: ["indexed"], - type: "string" - } - }, - url: "/search/code" - }, - commits: { - headers: { - accept: "application/vnd.github.cloak-preview+json" - }, - method: "GET", - params: { - order: { - enum: ["desc", "asc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - q: { - required: true, - type: "string" - }, - sort: { - enum: ["author-date", "committer-date"], - type: "string" - } - }, - url: "/search/commits" - }, - issues: { - deprecated: "octokit.search.issues() has been renamed to octokit.search.issuesAndPullRequests() (2018-12-27)", - method: "GET", - params: { - order: { - enum: ["desc", "asc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - q: { - required: true, - type: "string" - }, - sort: { - enum: ["comments", "reactions", "reactions-+1", "reactions--1", "reactions-smile", "reactions-thinking_face", "reactions-heart", "reactions-tada", "interactions", "created", "updated"], - type: "string" - } - }, - url: "/search/issues" - }, - issuesAndPullRequests: { - method: "GET", - params: { - order: { - enum: ["desc", "asc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - q: { - required: true, - type: "string" - }, - sort: { - enum: ["comments", "reactions", "reactions-+1", "reactions--1", "reactions-smile", "reactions-thinking_face", "reactions-heart", "reactions-tada", "interactions", "created", "updated"], - type: "string" - } - }, - url: "/search/issues" - }, - labels: { - method: "GET", - params: { - order: { - enum: ["desc", "asc"], - type: "string" - }, - q: { - required: true, - type: "string" - }, - repository_id: { - required: true, - type: "integer" - }, - sort: { - enum: ["created", "updated"], - type: "string" - } - }, - url: "/search/labels" - }, - repos: { - method: "GET", - params: { - order: { - enum: ["desc", "asc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - q: { - required: true, - type: "string" - }, - sort: { - enum: ["stars", "forks", "help-wanted-issues", "updated"], - type: "string" - } - }, - url: "/search/repositories" - }, - topics: { - method: "GET", - params: { - q: { - required: true, - type: "string" - } - }, - url: "/search/topics" - }, - users: { - method: "GET", - params: { - order: { - enum: ["desc", "asc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - q: { - required: true, - type: "string" - }, - sort: { - enum: ["followers", "repositories", "joined"], - type: "string" - } - }, - url: "/search/users" - } - }, - teams: { - addMember: { - deprecated: "octokit.teams.addMember() has been renamed to octokit.teams.addMemberLegacy() (2020-01-16)", - method: "PUT", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/members/:username" - }, - addMemberLegacy: { - deprecated: "octokit.teams.addMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#add-team-member-legacy", - method: "PUT", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/members/:username" - }, - addOrUpdateMembership: { - deprecated: "octokit.teams.addOrUpdateMembership() has been renamed to octokit.teams.addOrUpdateMembershipLegacy() (2020-01-16)", - method: "PUT", - params: { - role: { - enum: ["member", "maintainer"], - type: "string" - }, - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/memberships/:username" - }, - addOrUpdateMembershipInOrg: { - method: "PUT", - params: { - org: { - required: true, - type: "string" - }, - role: { - enum: ["member", "maintainer"], - type: "string" - }, - team_slug: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/memberships/:username" - }, - addOrUpdateMembershipLegacy: { - deprecated: "octokit.teams.addOrUpdateMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#add-or-update-team-membership-legacy", - method: "PUT", - params: { - role: { - enum: ["member", "maintainer"], - type: "string" - }, - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/memberships/:username" - }, - addOrUpdateProject: { - deprecated: "octokit.teams.addOrUpdateProject() has been renamed to octokit.teams.addOrUpdateProjectLegacy() (2020-01-16)", - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "PUT", - params: { - permission: { - enum: ["read", "write", "admin"], - type: "string" - }, - project_id: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/projects/:project_id" - }, - addOrUpdateProjectInOrg: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "PUT", - params: { - org: { - required: true, - type: "string" - }, - permission: { - enum: ["read", "write", "admin"], - type: "string" - }, - project_id: { - required: true, - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/projects/:project_id" - }, - addOrUpdateProjectLegacy: { - deprecated: "octokit.teams.addOrUpdateProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#add-or-update-team-project-legacy", - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "PUT", - params: { - permission: { - enum: ["read", "write", "admin"], - type: "string" - }, - project_id: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/projects/:project_id" - }, - addOrUpdateRepo: { - deprecated: "octokit.teams.addOrUpdateRepo() has been renamed to octokit.teams.addOrUpdateRepoLegacy() (2020-01-16)", - method: "PUT", - params: { - owner: { - required: true, - type: "string" - }, - permission: { - enum: ["pull", "push", "admin"], - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/repos/:owner/:repo" - }, - addOrUpdateRepoInOrg: { - method: "PUT", - params: { - org: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - permission: { - enum: ["pull", "push", "admin"], - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo" - }, - addOrUpdateRepoLegacy: { - deprecated: "octokit.teams.addOrUpdateRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#add-or-update-team-repository-legacy", - method: "PUT", - params: { - owner: { - required: true, - type: "string" - }, - permission: { - enum: ["pull", "push", "admin"], - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/repos/:owner/:repo" - }, - checkManagesRepo: { - deprecated: "octokit.teams.checkManagesRepo() has been renamed to octokit.teams.checkManagesRepoLegacy() (2020-01-16)", - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/repos/:owner/:repo" - }, - checkManagesRepoInOrg: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo" - }, - checkManagesRepoLegacy: { - deprecated: "octokit.teams.checkManagesRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#check-if-a-team-manages-a-repository-legacy", - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/repos/:owner/:repo" - }, - create: { - method: "POST", - params: { - description: { - type: "string" - }, - maintainers: { - type: "string[]" - }, - name: { - required: true, - type: "string" - }, - org: { - required: true, - type: "string" - }, - parent_team_id: { - type: "integer" - }, - permission: { - enum: ["pull", "push", "admin"], - type: "string" - }, - privacy: { - enum: ["secret", "closed"], - type: "string" - }, - repo_names: { - type: "string[]" - } - }, - url: "/orgs/:org/teams" - }, - createDiscussion: { - deprecated: "octokit.teams.createDiscussion() has been renamed to octokit.teams.createDiscussionLegacy() (2020-01-16)", - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - private: { - type: "boolean" - }, - team_id: { - required: true, - type: "integer" - }, - title: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/discussions" - }, - createDiscussionComment: { - deprecated: "octokit.teams.createDiscussionComment() has been renamed to octokit.teams.createDiscussionCommentLegacy() (2020-01-16)", - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments" - }, - createDiscussionCommentInOrg: { - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments" - }, - createDiscussionCommentLegacy: { - deprecated: "octokit.teams.createDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#create-a-comment-legacy", - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments" - }, - createDiscussionInOrg: { - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - org: { - required: true, - type: "string" - }, - private: { - type: "boolean" - }, - team_slug: { - required: true, - type: "string" - }, - title: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions" - }, - createDiscussionLegacy: { - deprecated: "octokit.teams.createDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#create-a-discussion-legacy", - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - private: { - type: "boolean" - }, - team_id: { - required: true, - type: "integer" - }, - title: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/discussions" - }, - delete: { - deprecated: "octokit.teams.delete() has been renamed to octokit.teams.deleteLegacy() (2020-01-16)", - method: "DELETE", - params: { - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id" - }, - deleteDiscussion: { - deprecated: "octokit.teams.deleteDiscussion() has been renamed to octokit.teams.deleteDiscussionLegacy() (2020-01-16)", - method: "DELETE", - params: { - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number" - }, - deleteDiscussionComment: { - deprecated: "octokit.teams.deleteDiscussionComment() has been renamed to octokit.teams.deleteDiscussionCommentLegacy() (2020-01-16)", - method: "DELETE", - params: { - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" - }, - deleteDiscussionCommentInOrg: { - method: "DELETE", - params: { - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number" - }, - deleteDiscussionCommentLegacy: { - deprecated: "octokit.teams.deleteDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#delete-a-comment-legacy", - method: "DELETE", - params: { - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" - }, - deleteDiscussionInOrg: { - method: "DELETE", - params: { - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number" - }, - deleteDiscussionLegacy: { - deprecated: "octokit.teams.deleteDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#delete-a-discussion-legacy", - method: "DELETE", - params: { - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number" - }, - deleteInOrg: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug" - }, - deleteLegacy: { - deprecated: "octokit.teams.deleteLegacy() is deprecated, see https://developer.github.com/v3/teams/#delete-team-legacy", - method: "DELETE", - params: { - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id" - }, - get: { - deprecated: "octokit.teams.get() has been renamed to octokit.teams.getLegacy() (2020-01-16)", - method: "GET", - params: { - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id" - }, - getByName: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug" - }, - getDiscussion: { - deprecated: "octokit.teams.getDiscussion() has been renamed to octokit.teams.getDiscussionLegacy() (2020-01-16)", - method: "GET", - params: { - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number" - }, - getDiscussionComment: { - deprecated: "octokit.teams.getDiscussionComment() has been renamed to octokit.teams.getDiscussionCommentLegacy() (2020-01-16)", - method: "GET", - params: { - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" - }, - getDiscussionCommentInOrg: { - method: "GET", - params: { - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number" - }, - getDiscussionCommentLegacy: { - deprecated: "octokit.teams.getDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#get-a-single-comment-legacy", - method: "GET", - params: { - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" - }, - getDiscussionInOrg: { - method: "GET", - params: { - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number" - }, - getDiscussionLegacy: { - deprecated: "octokit.teams.getDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#get-a-single-discussion-legacy", - method: "GET", - params: { - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number" - }, - getLegacy: { - deprecated: "octokit.teams.getLegacy() is deprecated, see https://developer.github.com/v3/teams/#get-team-legacy", - method: "GET", - params: { - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id" - }, - getMember: { - deprecated: "octokit.teams.getMember() has been renamed to octokit.teams.getMemberLegacy() (2020-01-16)", - method: "GET", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/members/:username" - }, - getMemberLegacy: { - deprecated: "octokit.teams.getMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#get-team-member-legacy", - method: "GET", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/members/:username" - }, - getMembership: { - deprecated: "octokit.teams.getMembership() has been renamed to octokit.teams.getMembershipLegacy() (2020-01-16)", - method: "GET", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/memberships/:username" - }, - getMembershipInOrg: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/memberships/:username" - }, - getMembershipLegacy: { - deprecated: "octokit.teams.getMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#get-team-membership-legacy", - method: "GET", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/memberships/:username" - }, - list: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/teams" - }, - listChild: { - deprecated: "octokit.teams.listChild() has been renamed to octokit.teams.listChildLegacy() (2020-01-16)", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/teams" - }, - listChildInOrg: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/teams" - }, - listChildLegacy: { - deprecated: "octokit.teams.listChildLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-child-teams-legacy", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/teams" - }, - listDiscussionComments: { - deprecated: "octokit.teams.listDiscussionComments() has been renamed to octokit.teams.listDiscussionCommentsLegacy() (2020-01-16)", - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments" - }, - listDiscussionCommentsInOrg: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments" - }, - listDiscussionCommentsLegacy: { - deprecated: "octokit.teams.listDiscussionCommentsLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#list-comments-legacy", - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments" - }, - listDiscussions: { - deprecated: "octokit.teams.listDiscussions() has been renamed to octokit.teams.listDiscussionsLegacy() (2020-01-16)", - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions" - }, - listDiscussionsInOrg: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions" - }, - listDiscussionsLegacy: { - deprecated: "octokit.teams.listDiscussionsLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#list-discussions-legacy", - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions" - }, - listForAuthenticatedUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/teams" - }, - listMembers: { - deprecated: "octokit.teams.listMembers() has been renamed to octokit.teams.listMembersLegacy() (2020-01-16)", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - role: { - enum: ["member", "maintainer", "all"], - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/members" - }, - listMembersInOrg: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - role: { - enum: ["member", "maintainer", "all"], - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/members" - }, - listMembersLegacy: { - deprecated: "octokit.teams.listMembersLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#list-team-members-legacy", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - role: { - enum: ["member", "maintainer", "all"], - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/members" - }, - listPendingInvitations: { - deprecated: "octokit.teams.listPendingInvitations() has been renamed to octokit.teams.listPendingInvitationsLegacy() (2020-01-16)", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/invitations" - }, - listPendingInvitationsInOrg: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/invitations" - }, - listPendingInvitationsLegacy: { - deprecated: "octokit.teams.listPendingInvitationsLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#list-pending-team-invitations-legacy", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/invitations" - }, - listProjects: { - deprecated: "octokit.teams.listProjects() has been renamed to octokit.teams.listProjectsLegacy() (2020-01-16)", - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/projects" - }, - listProjectsInOrg: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/projects" - }, - listProjectsLegacy: { - deprecated: "octokit.teams.listProjectsLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-team-projects-legacy", - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/projects" - }, - listRepos: { - deprecated: "octokit.teams.listRepos() has been renamed to octokit.teams.listReposLegacy() (2020-01-16)", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/repos" - }, - listReposInOrg: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/repos" - }, - listReposLegacy: { - deprecated: "octokit.teams.listReposLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-team-repos-legacy", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/repos" - }, - removeMember: { - deprecated: "octokit.teams.removeMember() has been renamed to octokit.teams.removeMemberLegacy() (2020-01-16)", - method: "DELETE", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/members/:username" - }, - removeMemberLegacy: { - deprecated: "octokit.teams.removeMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#remove-team-member-legacy", - method: "DELETE", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/members/:username" - }, - removeMembership: { - deprecated: "octokit.teams.removeMembership() has been renamed to octokit.teams.removeMembershipLegacy() (2020-01-16)", - method: "DELETE", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/memberships/:username" - }, - removeMembershipInOrg: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/memberships/:username" - }, - removeMembershipLegacy: { - deprecated: "octokit.teams.removeMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#remove-team-membership-legacy", - method: "DELETE", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/memberships/:username" - }, - removeProject: { - deprecated: "octokit.teams.removeProject() has been renamed to octokit.teams.removeProjectLegacy() (2020-01-16)", - method: "DELETE", - params: { - project_id: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/projects/:project_id" - }, - removeProjectInOrg: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - project_id: { - required: true, - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/projects/:project_id" - }, - removeProjectLegacy: { - deprecated: "octokit.teams.removeProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#remove-team-project-legacy", - method: "DELETE", - params: { - project_id: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/projects/:project_id" - }, - removeRepo: { - deprecated: "octokit.teams.removeRepo() has been renamed to octokit.teams.removeRepoLegacy() (2020-01-16)", - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/repos/:owner/:repo" - }, - removeRepoInOrg: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo" - }, - removeRepoLegacy: { - deprecated: "octokit.teams.removeRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#remove-team-repository-legacy", - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/repos/:owner/:repo" - }, - reviewProject: { - deprecated: "octokit.teams.reviewProject() has been renamed to octokit.teams.reviewProjectLegacy() (2020-01-16)", - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - project_id: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/projects/:project_id" - }, - reviewProjectInOrg: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - project_id: { - required: true, - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/projects/:project_id" - }, - reviewProjectLegacy: { - deprecated: "octokit.teams.reviewProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#review-a-team-project-legacy", - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - project_id: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/projects/:project_id" - }, - update: { - deprecated: "octokit.teams.update() has been renamed to octokit.teams.updateLegacy() (2020-01-16)", - method: "PATCH", - params: { - description: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - parent_team_id: { - type: "integer" - }, - permission: { - enum: ["pull", "push", "admin"], - type: "string" - }, - privacy: { - enum: ["secret", "closed"], - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id" - }, - updateDiscussion: { - deprecated: "octokit.teams.updateDiscussion() has been renamed to octokit.teams.updateDiscussionLegacy() (2020-01-16)", - method: "PATCH", - params: { - body: { - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - }, - title: { - type: "string" - } - }, - url: "/teams/:team_id/discussions/:discussion_number" - }, - updateDiscussionComment: { - deprecated: "octokit.teams.updateDiscussionComment() has been renamed to octokit.teams.updateDiscussionCommentLegacy() (2020-01-16)", - method: "PATCH", - params: { - body: { - required: true, - type: "string" - }, - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" - }, - updateDiscussionCommentInOrg: { - method: "PATCH", - params: { - body: { - required: true, - type: "string" - }, - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number" - }, - updateDiscussionCommentLegacy: { - deprecated: "octokit.teams.updateDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#edit-a-comment-legacy", - method: "PATCH", - params: { - body: { - required: true, - type: "string" - }, - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" - }, - updateDiscussionInOrg: { - method: "PATCH", - params: { - body: { - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - }, - title: { - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number" - }, - updateDiscussionLegacy: { - deprecated: "octokit.teams.updateDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#edit-a-discussion-legacy", - method: "PATCH", - params: { - body: { - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - }, - title: { - type: "string" - } - }, - url: "/teams/:team_id/discussions/:discussion_number" - }, - updateInOrg: { - method: "PATCH", - params: { - description: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - org: { - required: true, - type: "string" - }, - parent_team_id: { - type: "integer" - }, - permission: { - enum: ["pull", "push", "admin"], - type: "string" - }, - privacy: { - enum: ["secret", "closed"], - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug" - }, - updateLegacy: { - deprecated: "octokit.teams.updateLegacy() is deprecated, see https://developer.github.com/v3/teams/#edit-team-legacy", - method: "PATCH", - params: { - description: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - parent_team_id: { - type: "integer" - }, - permission: { - enum: ["pull", "push", "admin"], - type: "string" - }, - privacy: { - enum: ["secret", "closed"], - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id" - } - }, - users: { - addEmails: { - method: "POST", - params: { - emails: { - required: true, - type: "string[]" - } - }, - url: "/user/emails" - }, - block: { - method: "PUT", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/user/blocks/:username" - }, - checkBlocked: { - method: "GET", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/user/blocks/:username" - }, - checkFollowing: { - method: "GET", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/user/following/:username" - }, - checkFollowingForUser: { - method: "GET", - params: { - target_user: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/following/:target_user" - }, - createGpgKey: { - method: "POST", - params: { - armored_public_key: { - type: "string" - } - }, - url: "/user/gpg_keys" - }, - createPublicKey: { - method: "POST", - params: { - key: { - type: "string" - }, - title: { - type: "string" - } - }, - url: "/user/keys" - }, - deleteEmails: { - method: "DELETE", - params: { - emails: { - required: true, - type: "string[]" - } - }, - url: "/user/emails" - }, - deleteGpgKey: { - method: "DELETE", - params: { - gpg_key_id: { - required: true, - type: "integer" - } - }, - url: "/user/gpg_keys/:gpg_key_id" - }, - deletePublicKey: { - method: "DELETE", - params: { - key_id: { - required: true, - type: "integer" - } - }, - url: "/user/keys/:key_id" - }, - follow: { - method: "PUT", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/user/following/:username" - }, - getAuthenticated: { - method: "GET", - params: {}, - url: "/user" - }, - getByUsername: { - method: "GET", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/users/:username" - }, - getContextForUser: { - method: "GET", - params: { - subject_id: { - type: "string" - }, - subject_type: { - enum: ["organization", "repository", "issue", "pull_request"], - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/hovercard" - }, - getGpgKey: { - method: "GET", - params: { - gpg_key_id: { - required: true, - type: "integer" - } - }, - url: "/user/gpg_keys/:gpg_key_id" - }, - getPublicKey: { - method: "GET", - params: { - key_id: { - required: true, - type: "integer" - } - }, - url: "/user/keys/:key_id" - }, - list: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - } - }, - url: "/users" - }, - listBlocked: { - method: "GET", - params: {}, - url: "/user/blocks" - }, - listEmails: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/emails" - }, - listFollowersForAuthenticatedUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/followers" - }, - listFollowersForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/followers" - }, - listFollowingForAuthenticatedUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/following" - }, - listFollowingForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/following" - }, - listGpgKeys: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/gpg_keys" - }, - listGpgKeysForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/gpg_keys" - }, - listPublicEmails: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/public_emails" - }, - listPublicKeys: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/keys" - }, - listPublicKeysForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/keys" - }, - togglePrimaryEmailVisibility: { - method: "PATCH", - params: { - email: { - required: true, - type: "string" - }, - visibility: { - required: true, - type: "string" - } - }, - url: "/user/email/visibility" - }, - unblock: { - method: "DELETE", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/user/blocks/:username" - }, - unfollow: { - method: "DELETE", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/user/following/:username" - }, - updateAuthenticated: { - method: "PATCH", - params: { - bio: { - type: "string" - }, - blog: { - type: "string" - }, - company: { - type: "string" - }, - email: { - type: "string" - }, - hireable: { - type: "boolean" - }, - location: { - type: "string" - }, - name: { - type: "string" - } - }, - url: "/user" - } - } -}; - -const VERSION = "2.4.0"; - -function registerEndpoints(octokit, routes) { - Object.keys(routes).forEach(namespaceName => { - if (!octokit[namespaceName]) { - octokit[namespaceName] = {}; - } - - Object.keys(routes[namespaceName]).forEach(apiName => { - const apiOptions = routes[namespaceName][apiName]; - const endpointDefaults = ["method", "url", "headers"].reduce((map, key) => { - if (typeof apiOptions[key] !== "undefined") { - map[key] = apiOptions[key]; - } - - return map; - }, {}); - endpointDefaults.request = { - validate: apiOptions.params - }; - let request = octokit.request.defaults(endpointDefaults); // patch request & endpoint methods to support deprecated parameters. - // Not the most elegant solution, but we don’t want to move deprecation - // logic into octokit/endpoint.js as it’s out of scope - - const hasDeprecatedParam = Object.keys(apiOptions.params || {}).find(key => apiOptions.params[key].deprecated); - - if (hasDeprecatedParam) { - const patch = patchForDeprecation.bind(null, octokit, apiOptions); - request = patch(octokit.request.defaults(endpointDefaults), `.${namespaceName}.${apiName}()`); - request.endpoint = patch(request.endpoint, `.${namespaceName}.${apiName}.endpoint()`); - request.endpoint.merge = patch(request.endpoint.merge, `.${namespaceName}.${apiName}.endpoint.merge()`); - } - - if (apiOptions.deprecated) { - octokit[namespaceName][apiName] = Object.assign(function deprecatedEndpointMethod() { - octokit.log.warn(new deprecation.Deprecation(`[@octokit/rest] ${apiOptions.deprecated}`)); - octokit[namespaceName][apiName] = request; - return request.apply(null, arguments); - }, request); - return; - } - - octokit[namespaceName][apiName] = request; - }); - }); -} - -function patchForDeprecation(octokit, apiOptions, method, methodName) { - const patchedMethod = options => { - options = Object.assign({}, options); - Object.keys(options).forEach(key => { - if (apiOptions.params[key] && apiOptions.params[key].deprecated) { - const aliasKey = apiOptions.params[key].alias; - octokit.log.warn(new deprecation.Deprecation(`[@octokit/rest] "${key}" parameter is deprecated for "${methodName}". Use "${aliasKey}" instead`)); - - if (!(aliasKey in options)) { - options[aliasKey] = options[key]; - } - - delete options[key]; - } - }); - return method(options); - }; - - Object.keys(method).forEach(key => { - patchedMethod[key] = method[key]; - }); - return patchedMethod; -} - -/** - * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary - * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is - * done, we will remove the registerEndpoints methods and return the methods - * directly as with the other plugins. At that point we will also remove the - * legacy workarounds and deprecations. - * - * See the plan at - * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1 - */ - -function restEndpointMethods(octokit) { - // @ts-ignore - octokit.registerEndpoints = registerEndpoints.bind(null, octokit); - registerEndpoints(octokit, endpointsByScope); // Aliasing scopes for backward compatibility - // See https://github.com/octokit/rest.js/pull/1134 - - [["gitdata", "git"], ["authorization", "oauthAuthorizations"], ["pullRequests", "pulls"]].forEach(([deprecatedScope, scope]) => { - Object.defineProperty(octokit, deprecatedScope, { - get() { - octokit.log.warn( // @ts-ignore - new deprecation.Deprecation(`[@octokit/plugin-rest-endpoint-methods] "octokit.${deprecatedScope}.*" methods are deprecated, use "octokit.${scope}.*" instead`)); // @ts-ignore - - return octokit[scope]; - } - - }); - }); - return {}; -} -restEndpointMethods.VERSION = VERSION; - -exports.restEndpointMethods = restEndpointMethods; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 537: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var deprecation = __nccwpck_require__(8932); -var once = _interopDefault(__nccwpck_require__(1223)); - -const logOnce = once(deprecation => console.warn(deprecation)); -/** - * Error with extra properties to help with debugging - */ - -class RequestError extends Error { - constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = "HttpError"; - this.status = statusCode; - Object.defineProperty(this, "code", { - get() { - logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - } - - }); - this.headers = options.headers || {}; // redact request credentials without mutating original request options - - const requestCopy = Object.assign({}, options.request); - - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") - }); - } - - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; - } - -} - -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 6234: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var endpoint = __nccwpck_require__(9440); -var universalUserAgent = __nccwpck_require__(5030); -var isPlainObject = __nccwpck_require__(3287); -var nodeFetch = _interopDefault(__nccwpck_require__(467)); -var requestError = __nccwpck_require__(537); - -const VERSION = "5.4.12"; - -function getBufferResponse(response) { - return response.arrayBuffer(); -} - -function fetchWrapper(requestOptions) { - if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { - requestOptions.body = JSON.stringify(requestOptions.body); - } - - let headers = {}; - let status; - let url; - const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; - return fetch(requestOptions.url, Object.assign({ - method: requestOptions.method, - body: requestOptions.body, - headers: requestOptions.headers, - redirect: requestOptions.redirect - }, requestOptions.request)).then(response => { - url = response.url; - status = response.status; - - for (const keyAndValue of response.headers) { - headers[keyAndValue[0]] = keyAndValue[1]; - } - - if (status === 204 || status === 205) { - return; - } // GitHub API returns 200 for HEAD requests - - - if (requestOptions.method === "HEAD") { - if (status < 400) { - return; - } - - throw new requestError.RequestError(response.statusText, status, { - headers, - request: requestOptions - }); - } - - if (status === 304) { - throw new requestError.RequestError("Not modified", status, { - headers, - request: requestOptions - }); - } - - if (status >= 400) { - return response.text().then(message => { - const error = new requestError.RequestError(message, status, { - headers, - request: requestOptions - }); - - try { - let responseBody = JSON.parse(error.message); - Object.assign(error, responseBody); - let errors = responseBody.errors; // Assumption `errors` would always be in Array format - - error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); - } catch (e) {// ignore, see octokit/rest.js#684 - } - - throw error; - }); - } - - const contentType = response.headers.get("content-type"); - - if (/application\/json/.test(contentType)) { - return response.json(); - } - - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } - - return getBufferResponse(response); - }).then(data => { - return { - status, - url, - headers, - data - }; - }).catch(error => { - if (error instanceof requestError.RequestError) { - throw error; - } - - throw new requestError.RequestError(error.message, 500, { - headers, - request: requestOptions - }); - }); -} - -function withDefaults(oldEndpoint, newDefaults) { - const endpoint = oldEndpoint.defaults(newDefaults); - - const newApi = function (route, parameters) { - const endpointOptions = endpoint.merge(route, parameters); - - if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint.parse(endpointOptions)); - } - - const request = (route, parameters) => { - return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); - }; - - Object.assign(request, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); - return endpointOptions.request.hook(request, endpointOptions); - }; - - return Object.assign(newApi, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); -} - -const request = withDefaults(endpoint.endpoint, { - headers: { - "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` - } -}); - -exports.request = request; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 1150: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const {spawnSync} = __nccwpck_require__(3129); - -const isString = (a) => typeof a === 'string'; - -module.exports = (str, filter = {}) => { - if (!isString(str)) { - filter = str || {}; - str = run(); - } - - const { - added, - modified, - untracked, - deleted, - renamed, - } = filter; - - const files = parse(str); - const picked = pick(files, { - added, - modified, - untracked, - deleted, - renamed, - }); - - const names = getNames(picked); - - return names; -}; - -const getName = ({name}) => name; - -module.exports.getNames = getNames; -function getNames(files) { - return files.map(getName); -} - -module.exports.run = run; -function run() { - const result = spawnSync('git', ['status', '--porcelain']); - return result.stdout.toString(); -} - -module.exports.parse = parse; -function parse(str) { - const result = []; - const lines = str - .split('\n') - .filter(Boolean); - - for (const line of lines) { - const {name, mode} = parseLine(line); - - result.push({ - name, - mode, - }); - } - - return result; -} - -const UNTRACKED = '?'; -const RENAMED = 'R'; -const ARROW = '-> '; - -// "R a -> b" -> "b" -const cutRenameTo = (line) => { - const i = line.indexOf(ARROW); - const count = i + ARROW.length; - - return line.slice(count); -}; - -function parseLine(line) { - const [first] = line; - - if (first === UNTRACKED) - return { - name: line.replace('?? ', ''), - mode: UNTRACKED, - }; - - if (first === RENAMED) - return { - name: cutRenameTo(line), - mode: RENAMED, - }; - - const [mode] = line.match(/^[\sA-Z]{1,}\s/, ''); - const name = line.replace(mode, ''); - - return { - name, - mode, - }; -} - -const isModified = ({mode}) => /M/.test(mode); -const isAdded = ({mode}) => /A/.test(mode); -const isRenamed = ({mode}) => /R/.test(mode); -const isDeleted = ({mode}) => /D/.test(mode); -const isUntracked = ({mode}) => /\?/.test(mode); - -const check = ({added, modified, untracked, deleted, renamed}) => (file) => { - let is = false; - - if (added) - is = is || isAdded(file); - - if (modified) - is = is || isModified(file); - - if (untracked) - is = is || isUntracked(file); - - if (deleted) - is = is || isDeleted(file); - - if (renamed) - is = is || isRenamed(file); - - return is; -}; - -module.exports.pick = pick; -function pick(files, {added, modified, deleted, untracked, renamed}) { - return files.filter(check({ - added, - modified, - untracked, - deleted, - renamed, - })); -} - - - -/***/ }), - -/***/ 4623: -/***/ (function(module, exports, __nccwpck_require__) { - -"use strict"; - -var __assign = (this && this.__assign) || function () { - __assign = Object.assign || function(t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) - t[p] = s[p]; - } - return t; - }; - return __assign.apply(this, arguments); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getInput = void 0; -var dotenv_1 = __importDefault(__nccwpck_require__(2437)); -dotenv_1.default.config(); -var VALID_TYPES = ['string', 'array', 'boolean', 'number']; -var DEFAULT_OPTIONS = { - required: false, - type: 'string', - disableable: false -}; -var getEnvVar = function (key) { - var parsed = process.env["INPUT_" + key.replace(/ /g, '_').toUpperCase()]; - var raw = process.env[key]; - return parsed || raw || undefined; -}; -var parseArray = function (val) { - var array = val.split('\n').join(',').split(','); - var filtered = array.filter(function (n) { return n; }); - return filtered.map(function (n) { return n.trim(); }); -}; -var parseBoolean = function (val) { - var trueValue = ['true', 'True', 'TRUE']; - var falseValue = ['false', 'False', 'FALSE']; - if (trueValue.includes(val)) - return true; - if (falseValue.includes(val)) - return false; - throw new Error('boolean input has to be one of \`true | True | TRUE | false | False | FALSE\`'); -}; -var parseNumber = function (val) { - var parsed = Number(val); - if (isNaN(parsed)) - throw new Error('input has to be a valid number'); - return parsed; -}; -var parseValue = function (val, type) { - if (type === 'array') { - return parseArray(val); - } - if (type === 'boolean') { - return parseBoolean(val); - } - if (type === 'number') { - return parseNumber(val); - } - return val.trim(); -}; -var getInput = function (key, opts) { - var parsedOptions; - if (typeof key === 'string') { - parsedOptions = __assign({ key: key }, opts); - } - else if (typeof key === 'object') { - parsedOptions = key; - } - else { - throw new Error('No key for input specified'); - } - if (!parsedOptions.key) - throw new Error('No key for input specified'); - var options = Object.assign({}, DEFAULT_OPTIONS, parsedOptions); - if (VALID_TYPES.includes(options.type) === false) - throw new Error('option type has to be one of `string | array | boolean | number`'); - var val = getEnvVar(options.key); - if (options.disableable && val === 'false') - return undefined; - var parsed = val !== undefined ? parseValue(val, options.type) : undefined; - if (parsed === undefined) { - if (options.required) - throw new Error("Input `" + options.key + "` is required but was not provided."); - if (options.default !== undefined) - return options.default; - return undefined; - } - if (options.modifier) - return options.modifier(parsed); - return parsed; -}; -exports.getInput = getInput; -module.exports.getInput = exports.getInput; - - -/***/ }), - -/***/ 5224: -/***/ ((module) => { - -module.exports = function atob(str) { - return Buffer.from(str, 'base64').toString('binary') -} - - -/***/ }), - -/***/ 3682: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var register = __nccwpck_require__(4670) -var addHook = __nccwpck_require__(5549) -var removeHook = __nccwpck_require__(6819) - -// bind with array of arguments: https://stackoverflow.com/a/21792913 -var bind = Function.bind -var bindable = bind.bind(bind) - -function bindApi (hook, state, name) { - var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) - hook.api = { remove: removeHookRef } - hook.remove = removeHookRef - - ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { - var args = name ? [state, kind, name] : [state, kind] - hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) - }) -} - -function HookSingular () { - var singularHookName = 'h' - var singularHookState = { - registry: {} - } - var singularHook = register.bind(null, singularHookState, singularHookName) - bindApi(singularHook, singularHookState, singularHookName) - return singularHook -} - -function HookCollection () { - var state = { - registry: {} - } - - var hook = register.bind(null, state) - bindApi(hook, state) - - return hook -} - -var collectionHookDeprecationMessageDisplayed = false -function Hook () { - if (!collectionHookDeprecationMessageDisplayed) { - console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') - collectionHookDeprecationMessageDisplayed = true - } - return HookCollection() -} - -Hook.Singular = HookSingular.bind() -Hook.Collection = HookCollection.bind() - -module.exports = Hook -// expose constructors as a named property for TypeScript -module.exports.Hook = Hook -module.exports.Singular = Hook.Singular -module.exports.Collection = Hook.Collection - - -/***/ }), - -/***/ 5549: -/***/ ((module) => { - -module.exports = addHook - -function addHook (state, kind, name, hook) { - var orig = hook - if (!state.registry[name]) { - state.registry[name] = [] - } - - if (kind === 'before') { - hook = function (method, options) { - return Promise.resolve() - .then(orig.bind(null, options)) - .then(method.bind(null, options)) - } - } - - if (kind === 'after') { - hook = function (method, options) { - var result - return Promise.resolve() - .then(method.bind(null, options)) - .then(function (result_) { - result = result_ - return orig(result, options) - }) - .then(function () { - return result - }) - } - } - - if (kind === 'error') { - hook = function (method, options) { - return Promise.resolve() - .then(method.bind(null, options)) - .catch(function (error) { - return orig(error, options) - }) - } - } - - state.registry[name].push({ - hook: hook, - orig: orig - }) -} - - -/***/ }), - -/***/ 4670: -/***/ ((module) => { - -module.exports = register - -function register (state, name, method, options) { - if (typeof method !== 'function') { - throw new Error('method for before hook must be a function') - } - - if (!options) { - options = {} - } - - if (Array.isArray(name)) { - return name.reverse().reduce(function (callback, name) { - return register.bind(null, state, name, callback, options) - }, method)() - } - - return Promise.resolve() - .then(function () { - if (!state.registry[name]) { - return method(options) - } - - return (state.registry[name]).reduce(function (method, registered) { - return registered.hook.bind(null, method, options) - }, method)() - }) -} - - -/***/ }), - -/***/ 6819: -/***/ ((module) => { - -module.exports = removeHook - -function removeHook (state, name, method) { - if (!state.registry[name]) { - return - } - - var index = state.registry[name] - .map(function (registered) { return registered.orig }) - .indexOf(method) - - if (index === -1) { - return - } - - state.registry[name].splice(index, 1) -} - - -/***/ }), - -/***/ 2358: -/***/ ((module) => { - -module.exports = function btoa(str) { - return new Buffer(str).toString('base64') -} - - -/***/ }), - -/***/ 8932: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -class Deprecation extends Error { - constructor(message) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = 'Deprecation'; - } - -} - -exports.Deprecation = Deprecation; - - -/***/ }), - -/***/ 2437: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -/* @flow */ -/*:: - -type DotenvParseOptions = { - debug?: boolean -} - -// keys and values from src -type DotenvParseOutput = { [string]: string } - -type DotenvConfigOptions = { - path?: string, // path to .env file - encoding?: string, // encoding of .env file - debug?: string // turn on logging for debugging purposes -} - -type DotenvConfigOutput = { - parsed?: DotenvParseOutput, - error?: Error -} - -*/ - -const fs = __nccwpck_require__(5747) -const path = __nccwpck_require__(5622) - -function log (message /*: string */) { - console.log(`[dotenv][DEBUG] ${message}`) -} - -const NEWLINE = '\n' -const RE_INI_KEY_VAL = /^\s*([\w.-]+)\s*=\s*(.*)?\s*$/ -const RE_NEWLINES = /\\n/g -const NEWLINES_MATCH = /\n|\r|\r\n/ - -// Parses src into an Object -function parse (src /*: string | Buffer */, options /*: ?DotenvParseOptions */) /*: DotenvParseOutput */ { - const debug = Boolean(options && options.debug) - const obj = {} - - // convert Buffers before splitting into lines and processing - src.toString().split(NEWLINES_MATCH).forEach(function (line, idx) { - // matching "KEY' and 'VAL' in 'KEY=VAL' - const keyValueArr = line.match(RE_INI_KEY_VAL) - // matched? - if (keyValueArr != null) { - const key = keyValueArr[1] - // default undefined or missing values to empty string - let val = (keyValueArr[2] || '') - const end = val.length - 1 - const isDoubleQuoted = val[0] === '"' && val[end] === '"' - const isSingleQuoted = val[0] === "'" && val[end] === "'" - - // if single or double quoted, remove quotes - if (isSingleQuoted || isDoubleQuoted) { - val = val.substring(1, end) - - // if double quoted, expand newlines - if (isDoubleQuoted) { - val = val.replace(RE_NEWLINES, NEWLINE) - } - } else { - // remove surrounding whitespace - val = val.trim() - } - - obj[key] = val - } else if (debug) { - log(`did not match key and value when parsing line ${idx + 1}: ${line}`) - } - }) - - return obj -} - -// Populates process.env from .env file -function config (options /*: ?DotenvConfigOptions */) /*: DotenvConfigOutput */ { - let dotenvPath = path.resolve(process.cwd(), '.env') - let encoding /*: string */ = 'utf8' - let debug = false - - if (options) { - if (options.path != null) { - dotenvPath = options.path - } - if (options.encoding != null) { - encoding = options.encoding - } - if (options.debug != null) { - debug = true - } - } - - try { - // specifying an encoding returns a string instead of a buffer - const parsed = parse(fs.readFileSync(dotenvPath, { encoding }), { debug }) - - Object.keys(parsed).forEach(function (key) { - if (!Object.prototype.hasOwnProperty.call(process.env, key)) { - process.env[key] = parsed[key] - } else if (debug) { - log(`"${key}" is already defined in \`process.env\` and will not be overwritten`) - } - }) - - return { parsed } - } catch (e) { - return { error: e } - } -} - -module.exports.config = config -module.exports.parse = parse - - -/***/ }), - -/***/ 1205: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var once = __nccwpck_require__(1223); - -var noop = function() {}; - -var isRequest = function(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -}; - -var isChildProcess = function(stream) { - return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 -}; - -var eos = function(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - - callback = once(callback || noop); - - var ws = stream._writableState; - var rs = stream._readableState; - var readable = opts.readable || (opts.readable !== false && stream.readable); - var writable = opts.writable || (opts.writable !== false && stream.writable); - var cancelled = false; - - var onlegacyfinish = function() { - if (!stream.writable) onfinish(); - }; - - var onfinish = function() { - writable = false; - if (!readable) callback.call(stream); - }; - - var onend = function() { - readable = false; - if (!writable) callback.call(stream); - }; - - var onexit = function(exitCode) { - callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); - }; - - var onerror = function(err) { - callback.call(stream, err); - }; - - var onclose = function() { - process.nextTick(onclosenexttick); - }; - - var onclosenexttick = function() { - if (cancelled) return; - if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); - if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); - }; - - var onrequest = function() { - stream.req.on('finish', onfinish); - }; - - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest(); - else stream.on('request', onrequest); - } else if (writable && !ws) { // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } - - if (isChildProcess(stream)) stream.on('exit', onexit); - - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); - - return function() { - cancelled = true; - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('exit', onexit); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; -}; - -module.exports = eos; - - -/***/ }), - -/***/ 3338: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const mkdirsSync = __nccwpck_require__(2915).mkdirsSync -const utimesMillisSync = __nccwpck_require__(2548).utimesMillisSync -const stat = __nccwpck_require__(3901) - -function copySync (src, dest, opts) { - if (typeof opts === 'function') { - opts = { filter: opts } - } - - opts = opts || {} - opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now - opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - - // Warn about using preserveTimestamps on 32-bit node - if (opts.preserveTimestamps && process.arch === 'ia32') { - console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n - see https://github.com/jprichardson/node-fs-extra/issues/269`) - } - - const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy', opts) - stat.checkParentPathsSync(src, srcStat, dest, 'copy') - return handleFilterAndCopy(destStat, src, dest, opts) -} - -function handleFilterAndCopy (destStat, src, dest, opts) { - if (opts.filter && !opts.filter(src, dest)) return - const destParent = path.dirname(dest) - if (!fs.existsSync(destParent)) mkdirsSync(destParent) - return getStats(destStat, src, dest, opts) -} - -function startCopy (destStat, src, dest, opts) { - if (opts.filter && !opts.filter(src, dest)) return - return getStats(destStat, src, dest, opts) -} - -function getStats (destStat, src, dest, opts) { - const statSync = opts.dereference ? fs.statSync : fs.lstatSync - const srcStat = statSync(src) - - if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts) - else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts) - else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts) - else if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`) - else if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`) - throw new Error(`Unknown file: ${src}`) -} - -function onFile (srcStat, destStat, src, dest, opts) { - if (!destStat) return copyFile(srcStat, src, dest, opts) - return mayCopyFile(srcStat, src, dest, opts) -} - -function mayCopyFile (srcStat, src, dest, opts) { - if (opts.overwrite) { - fs.unlinkSync(dest) - return copyFile(srcStat, src, dest, opts) - } else if (opts.errorOnExist) { - throw new Error(`'${dest}' already exists`) - } -} - -function copyFile (srcStat, src, dest, opts) { - fs.copyFileSync(src, dest) - if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest) - return setDestMode(dest, srcStat.mode) -} - -function handleTimestamps (srcMode, src, dest) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode) - return setDestTimestamps(src, dest) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode) { - return setDestMode(dest, srcMode | 0o200) -} - -function setDestMode (dest, srcMode) { - return fs.chmodSync(dest, srcMode) -} - -function setDestTimestamps (src, dest) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - const updatedSrcStat = fs.statSync(src) - return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime) -} - -function onDir (srcStat, destStat, src, dest, opts) { - if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts) - return copyDir(src, dest, opts) -} - -function mkDirAndCopy (srcMode, src, dest, opts) { - fs.mkdirSync(dest) - copyDir(src, dest, opts) - return setDestMode(dest, srcMode) -} - -function copyDir (src, dest, opts) { - fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts)) -} - -function copyDirItem (item, src, dest, opts) { - const srcItem = path.join(src, item) - const destItem = path.join(dest, item) - const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy', opts) - return startCopy(destStat, srcItem, destItem, opts) -} - -function onLink (destStat, src, dest, opts) { - let resolvedSrc = fs.readlinkSync(src) - if (opts.dereference) { - resolvedSrc = path.resolve(process.cwd(), resolvedSrc) - } - - if (!destStat) { - return fs.symlinkSync(resolvedSrc, dest) - } else { - let resolvedDest - try { - resolvedDest = fs.readlinkSync(dest) - } catch (err) { - // dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest) - throw err - } - if (opts.dereference) { - resolvedDest = path.resolve(process.cwd(), resolvedDest) - } - if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { - throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`) - } - - // prevent copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - if (fs.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { - throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`) - } - return copyLink(resolvedSrc, dest) - } -} - -function copyLink (resolvedSrc, dest) { - fs.unlinkSync(dest) - return fs.symlinkSync(resolvedSrc, dest) -} - -module.exports = copySync - - -/***/ }), - -/***/ 1135: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -module.exports = { - copySync: __nccwpck_require__(3338) -} - - -/***/ }), - -/***/ 8834: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const mkdirs = __nccwpck_require__(2915).mkdirs -const pathExists = __nccwpck_require__(3835).pathExists -const utimesMillis = __nccwpck_require__(2548).utimesMillis -const stat = __nccwpck_require__(3901) - -function copy (src, dest, opts, cb) { - if (typeof opts === 'function' && !cb) { - cb = opts - opts = {} - } else if (typeof opts === 'function') { - opts = { filter: opts } - } - - cb = cb || function () {} - opts = opts || {} - - opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now - opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - - // Warn about using preserveTimestamps on 32-bit node - if (opts.preserveTimestamps && process.arch === 'ia32') { - console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n - see https://github.com/jprichardson/node-fs-extra/issues/269`) - } - - stat.checkPaths(src, dest, 'copy', opts, (err, stats) => { - if (err) return cb(err) - const { srcStat, destStat } = stats - stat.checkParentPaths(src, srcStat, dest, 'copy', err => { - if (err) return cb(err) - if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb) - return checkParentDir(destStat, src, dest, opts, cb) - }) - }) -} - -function checkParentDir (destStat, src, dest, opts, cb) { - const destParent = path.dirname(dest) - pathExists(destParent, (err, dirExists) => { - if (err) return cb(err) - if (dirExists) return getStats(destStat, src, dest, opts, cb) - mkdirs(destParent, err => { - if (err) return cb(err) - return getStats(destStat, src, dest, opts, cb) - }) - }) -} - -function handleFilter (onInclude, destStat, src, dest, opts, cb) { - Promise.resolve(opts.filter(src, dest)).then(include => { - if (include) return onInclude(destStat, src, dest, opts, cb) - return cb() - }, error => cb(error)) -} - -function startCopy (destStat, src, dest, opts, cb) { - if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb) - return getStats(destStat, src, dest, opts, cb) -} - -function getStats (destStat, src, dest, opts, cb) { - const stat = opts.dereference ? fs.stat : fs.lstat - stat(src, (err, srcStat) => { - if (err) return cb(err) - - if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb) - else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb) - else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb) - else if (srcStat.isSocket()) return cb(new Error(`Cannot copy a socket file: ${src}`)) - else if (srcStat.isFIFO()) return cb(new Error(`Cannot copy a FIFO pipe: ${src}`)) - return cb(new Error(`Unknown file: ${src}`)) - }) -} - -function onFile (srcStat, destStat, src, dest, opts, cb) { - if (!destStat) return copyFile(srcStat, src, dest, opts, cb) - return mayCopyFile(srcStat, src, dest, opts, cb) -} - -function mayCopyFile (srcStat, src, dest, opts, cb) { - if (opts.overwrite) { - fs.unlink(dest, err => { - if (err) return cb(err) - return copyFile(srcStat, src, dest, opts, cb) - }) - } else if (opts.errorOnExist) { - return cb(new Error(`'${dest}' already exists`)) - } else return cb() -} - -function copyFile (srcStat, src, dest, opts, cb) { - fs.copyFile(src, dest, err => { - if (err) return cb(err) - if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb) - return setDestMode(dest, srcStat.mode, cb) - }) -} - -function handleTimestampsAndMode (srcMode, src, dest, cb) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) { - return makeFileWritable(dest, srcMode, err => { - if (err) return cb(err) - return setDestTimestampsAndMode(srcMode, src, dest, cb) - }) - } - return setDestTimestampsAndMode(srcMode, src, dest, cb) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode, cb) { - return setDestMode(dest, srcMode | 0o200, cb) -} - -function setDestTimestampsAndMode (srcMode, src, dest, cb) { - setDestTimestamps(src, dest, err => { - if (err) return cb(err) - return setDestMode(dest, srcMode, cb) - }) -} - -function setDestMode (dest, srcMode, cb) { - return fs.chmod(dest, srcMode, cb) -} - -function setDestTimestamps (src, dest, cb) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - fs.stat(src, (err, updatedSrcStat) => { - if (err) return cb(err) - return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb) - }) -} - -function onDir (srcStat, destStat, src, dest, opts, cb) { - if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb) - return copyDir(src, dest, opts, cb) -} - -function mkDirAndCopy (srcMode, src, dest, opts, cb) { - fs.mkdir(dest, err => { - if (err) return cb(err) - copyDir(src, dest, opts, err => { - if (err) return cb(err) - return setDestMode(dest, srcMode, cb) - }) - }) -} - -function copyDir (src, dest, opts, cb) { - fs.readdir(src, (err, items) => { - if (err) return cb(err) - return copyDirItems(items, src, dest, opts, cb) - }) -} - -function copyDirItems (items, src, dest, opts, cb) { - const item = items.pop() - if (!item) return cb() - return copyDirItem(items, item, src, dest, opts, cb) -} - -function copyDirItem (items, item, src, dest, opts, cb) { - const srcItem = path.join(src, item) - const destItem = path.join(dest, item) - stat.checkPaths(srcItem, destItem, 'copy', opts, (err, stats) => { - if (err) return cb(err) - const { destStat } = stats - startCopy(destStat, srcItem, destItem, opts, err => { - if (err) return cb(err) - return copyDirItems(items, src, dest, opts, cb) - }) - }) -} - -function onLink (destStat, src, dest, opts, cb) { - fs.readlink(src, (err, resolvedSrc) => { - if (err) return cb(err) - if (opts.dereference) { - resolvedSrc = path.resolve(process.cwd(), resolvedSrc) - } - - if (!destStat) { - return fs.symlink(resolvedSrc, dest, cb) - } else { - fs.readlink(dest, (err, resolvedDest) => { - if (err) { - // dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest, cb) - return cb(err) - } - if (opts.dereference) { - resolvedDest = path.resolve(process.cwd(), resolvedDest) - } - if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { - return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)) - } - - // do not copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - if (destStat.isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { - return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)) - } - return copyLink(resolvedSrc, dest, cb) - }) - } - }) -} - -function copyLink (resolvedSrc, dest, cb) { - fs.unlink(dest, err => { - if (err) return cb(err) - return fs.symlink(resolvedSrc, dest, cb) - }) -} - -module.exports = copy - - -/***/ }), - -/***/ 1335: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(1463).fromCallback -module.exports = { - copy: u(__nccwpck_require__(8834)) -} - - -/***/ }), - -/***/ 6970: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(1463).fromPromise -const fs = __nccwpck_require__(1176) -const path = __nccwpck_require__(5622) -const mkdir = __nccwpck_require__(2915) -const remove = __nccwpck_require__(7357) - -const emptyDir = u(async function emptyDir (dir) { - let items - try { - items = await fs.readdir(dir) - } catch { - return mkdir.mkdirs(dir) - } - - return Promise.all(items.map(item => remove.remove(path.join(dir, item)))) -}) - -function emptyDirSync (dir) { - let items - try { - items = fs.readdirSync(dir) - } catch { - return mkdir.mkdirsSync(dir) - } - - items.forEach(item => { - item = path.join(dir, item) - remove.removeSync(item) - }) -} - -module.exports = { - emptyDirSync, - emptydirSync: emptyDirSync, - emptyDir, - emptydir: emptyDir -} - - -/***/ }), - -/***/ 2164: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(1463).fromCallback -const path = __nccwpck_require__(5622) -const fs = __nccwpck_require__(7758) -const mkdir = __nccwpck_require__(2915) - -function createFile (file, callback) { - function makeFile () { - fs.writeFile(file, '', err => { - if (err) return callback(err) - callback() - }) - } - - fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err - if (!err && stats.isFile()) return callback() - const dir = path.dirname(file) - fs.stat(dir, (err, stats) => { - if (err) { - // if the directory doesn't exist, make it - if (err.code === 'ENOENT') { - return mkdir.mkdirs(dir, err => { - if (err) return callback(err) - makeFile() - }) - } - return callback(err) - } - - if (stats.isDirectory()) makeFile() - else { - // parent is not a directory - // This is just to cause an internal ENOTDIR error to be thrown - fs.readdir(dir, err => { - if (err) return callback(err) - }) - } - }) - }) -} - -function createFileSync (file) { - let stats - try { - stats = fs.statSync(file) - } catch {} - if (stats && stats.isFile()) return - - const dir = path.dirname(file) - try { - if (!fs.statSync(dir).isDirectory()) { - // parent is not a directory - // This is just to cause an internal ENOTDIR error to be thrown - fs.readdirSync(dir) - } - } catch (err) { - // If the stat call above failed because the directory doesn't exist, create it - if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir) - else throw err - } - - fs.writeFileSync(file, '') -} - -module.exports = { - createFile: u(createFile), - createFileSync -} - - -/***/ }), - -/***/ 55: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const file = __nccwpck_require__(2164) -const link = __nccwpck_require__(3797) -const symlink = __nccwpck_require__(2549) - -module.exports = { - // file - createFile: file.createFile, - createFileSync: file.createFileSync, - ensureFile: file.createFile, - ensureFileSync: file.createFileSync, - // link - createLink: link.createLink, - createLinkSync: link.createLinkSync, - ensureLink: link.createLink, - ensureLinkSync: link.createLinkSync, - // symlink - createSymlink: symlink.createSymlink, - createSymlinkSync: symlink.createSymlinkSync, - ensureSymlink: symlink.createSymlink, - ensureSymlinkSync: symlink.createSymlinkSync -} - - -/***/ }), - -/***/ 3797: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(1463).fromCallback -const path = __nccwpck_require__(5622) -const fs = __nccwpck_require__(7758) -const mkdir = __nccwpck_require__(2915) -const pathExists = __nccwpck_require__(3835).pathExists -const { areIdentical } = __nccwpck_require__(3901) - -function createLink (srcpath, dstpath, callback) { - function makeLink (srcpath, dstpath) { - fs.link(srcpath, dstpath, err => { - if (err) return callback(err) - callback(null) - }) - } - - fs.lstat(dstpath, (_, dstStat) => { - fs.lstat(srcpath, (err, srcStat) => { - if (err) { - err.message = err.message.replace('lstat', 'ensureLink') - return callback(err) - } - if (dstStat && areIdentical(srcStat, dstStat)) return callback(null) - - const dir = path.dirname(dstpath) - pathExists(dir, (err, dirExists) => { - if (err) return callback(err) - if (dirExists) return makeLink(srcpath, dstpath) - mkdir.mkdirs(dir, err => { - if (err) return callback(err) - makeLink(srcpath, dstpath) - }) - }) - }) - }) -} - -function createLinkSync (srcpath, dstpath) { - let dstStat - try { - dstStat = fs.lstatSync(dstpath) - } catch {} - - try { - const srcStat = fs.lstatSync(srcpath) - if (dstStat && areIdentical(srcStat, dstStat)) return - } catch (err) { - err.message = err.message.replace('lstat', 'ensureLink') - throw err - } - - const dir = path.dirname(dstpath) - const dirExists = fs.existsSync(dir) - if (dirExists) return fs.linkSync(srcpath, dstpath) - mkdir.mkdirsSync(dir) - - return fs.linkSync(srcpath, dstpath) -} - -module.exports = { - createLink: u(createLink), - createLinkSync -} - - -/***/ }), - -/***/ 3727: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const path = __nccwpck_require__(5622) -const fs = __nccwpck_require__(7758) -const pathExists = __nccwpck_require__(3835).pathExists - -/** - * Function that returns two types of paths, one relative to symlink, and one - * relative to the current working directory. Checks if path is absolute or - * relative. If the path is relative, this function checks if the path is - * relative to symlink or relative to current working directory. This is an - * initiative to find a smarter `srcpath` to supply when building symlinks. - * This allows you to determine which path to use out of one of three possible - * types of source paths. The first is an absolute path. This is detected by - * `path.isAbsolute()`. When an absolute path is provided, it is checked to - * see if it exists. If it does it's used, if not an error is returned - * (callback)/ thrown (sync). The other two options for `srcpath` are a - * relative url. By default Node's `fs.symlink` works by creating a symlink - * using `dstpath` and expects the `srcpath` to be relative to the newly - * created symlink. If you provide a `srcpath` that does not exist on the file - * system it results in a broken symlink. To minimize this, the function - * checks to see if the 'relative to symlink' source file exists, and if it - * does it will use it. If it does not, it checks if there's a file that - * exists that is relative to the current working directory, if does its used. - * This preserves the expectations of the original fs.symlink spec and adds - * the ability to pass in `relative to current working direcotry` paths. - */ - -function symlinkPaths (srcpath, dstpath, callback) { - if (path.isAbsolute(srcpath)) { - return fs.lstat(srcpath, (err) => { - if (err) { - err.message = err.message.replace('lstat', 'ensureSymlink') - return callback(err) - } - return callback(null, { - toCwd: srcpath, - toDst: srcpath - }) - }) - } else { - const dstdir = path.dirname(dstpath) - const relativeToDst = path.join(dstdir, srcpath) - return pathExists(relativeToDst, (err, exists) => { - if (err) return callback(err) - if (exists) { - return callback(null, { - toCwd: relativeToDst, - toDst: srcpath - }) - } else { - return fs.lstat(srcpath, (err) => { - if (err) { - err.message = err.message.replace('lstat', 'ensureSymlink') - return callback(err) - } - return callback(null, { - toCwd: srcpath, - toDst: path.relative(dstdir, srcpath) - }) - }) - } - }) - } -} - -function symlinkPathsSync (srcpath, dstpath) { - let exists - if (path.isAbsolute(srcpath)) { - exists = fs.existsSync(srcpath) - if (!exists) throw new Error('absolute srcpath does not exist') - return { - toCwd: srcpath, - toDst: srcpath - } - } else { - const dstdir = path.dirname(dstpath) - const relativeToDst = path.join(dstdir, srcpath) - exists = fs.existsSync(relativeToDst) - if (exists) { - return { - toCwd: relativeToDst, - toDst: srcpath - } - } else { - exists = fs.existsSync(srcpath) - if (!exists) throw new Error('relative srcpath does not exist') - return { - toCwd: srcpath, - toDst: path.relative(dstdir, srcpath) - } - } - } -} - -module.exports = { - symlinkPaths, - symlinkPathsSync -} - - -/***/ }), - -/***/ 8254: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) - -function symlinkType (srcpath, type, callback) { - callback = (typeof type === 'function') ? type : callback - type = (typeof type === 'function') ? false : type - if (type) return callback(null, type) - fs.lstat(srcpath, (err, stats) => { - if (err) return callback(null, 'file') - type = (stats && stats.isDirectory()) ? 'dir' : 'file' - callback(null, type) - }) -} - -function symlinkTypeSync (srcpath, type) { - let stats - - if (type) return type - try { - stats = fs.lstatSync(srcpath) - } catch { - return 'file' - } - return (stats && stats.isDirectory()) ? 'dir' : 'file' -} - -module.exports = { - symlinkType, - symlinkTypeSync -} - - -/***/ }), - -/***/ 2549: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(1463).fromCallback -const path = __nccwpck_require__(5622) -const fs = __nccwpck_require__(1176) -const _mkdirs = __nccwpck_require__(2915) -const mkdirs = _mkdirs.mkdirs -const mkdirsSync = _mkdirs.mkdirsSync - -const _symlinkPaths = __nccwpck_require__(3727) -const symlinkPaths = _symlinkPaths.symlinkPaths -const symlinkPathsSync = _symlinkPaths.symlinkPathsSync - -const _symlinkType = __nccwpck_require__(8254) -const symlinkType = _symlinkType.symlinkType -const symlinkTypeSync = _symlinkType.symlinkTypeSync - -const pathExists = __nccwpck_require__(3835).pathExists - -const { areIdentical } = __nccwpck_require__(3901) - -function createSymlink (srcpath, dstpath, type, callback) { - callback = (typeof type === 'function') ? type : callback - type = (typeof type === 'function') ? false : type - - fs.lstat(dstpath, (err, stats) => { - if (!err && stats.isSymbolicLink()) { - Promise.all([ - fs.stat(srcpath), - fs.stat(dstpath) - ]).then(([srcStat, dstStat]) => { - if (areIdentical(srcStat, dstStat)) return callback(null) - _createSymlink(srcpath, dstpath, type, callback) - }) - } else _createSymlink(srcpath, dstpath, type, callback) - }) -} - -function _createSymlink (srcpath, dstpath, type, callback) { - symlinkPaths(srcpath, dstpath, (err, relative) => { - if (err) return callback(err) - srcpath = relative.toDst - symlinkType(relative.toCwd, type, (err, type) => { - if (err) return callback(err) - const dir = path.dirname(dstpath) - pathExists(dir, (err, dirExists) => { - if (err) return callback(err) - if (dirExists) return fs.symlink(srcpath, dstpath, type, callback) - mkdirs(dir, err => { - if (err) return callback(err) - fs.symlink(srcpath, dstpath, type, callback) - }) - }) - }) - }) -} - -function createSymlinkSync (srcpath, dstpath, type) { - let stats - try { - stats = fs.lstatSync(dstpath) - } catch {} - if (stats && stats.isSymbolicLink()) { - const srcStat = fs.statSync(srcpath) - const dstStat = fs.statSync(dstpath) - if (areIdentical(srcStat, dstStat)) return - } - - const relative = symlinkPathsSync(srcpath, dstpath) - srcpath = relative.toDst - type = symlinkTypeSync(relative.toCwd, type) - const dir = path.dirname(dstpath) - const exists = fs.existsSync(dir) - if (exists) return fs.symlinkSync(srcpath, dstpath, type) - mkdirsSync(dir) - return fs.symlinkSync(srcpath, dstpath, type) -} - -module.exports = { - createSymlink: u(createSymlink), - createSymlinkSync -} - - -/***/ }), - -/***/ 1176: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// This is adapted from https://github.com/normalize/mz -// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors -const u = __nccwpck_require__(1463).fromCallback -const fs = __nccwpck_require__(7758) - -const api = [ - 'access', - 'appendFile', - 'chmod', - 'chown', - 'close', - 'copyFile', - 'fchmod', - 'fchown', - 'fdatasync', - 'fstat', - 'fsync', - 'ftruncate', - 'futimes', - 'lchmod', - 'lchown', - 'link', - 'lstat', - 'mkdir', - 'mkdtemp', - 'open', - 'opendir', - 'readdir', - 'readFile', - 'readlink', - 'realpath', - 'rename', - 'rm', - 'rmdir', - 'stat', - 'symlink', - 'truncate', - 'unlink', - 'utimes', - 'writeFile' -].filter(key => { - // Some commands are not available on some systems. Ex: - // fs.opendir was added in Node.js v12.12.0 - // fs.rm was added in Node.js v14.14.0 - // fs.lchown is not available on at least some Linux - return typeof fs[key] === 'function' -}) - -// Export cloned fs: -Object.assign(exports, fs) - -// Universalify async methods: -api.forEach(method => { - exports[method] = u(fs[method]) -}) -exports.realpath.native = u(fs.realpath.native) - -// We differ from mz/fs in that we still ship the old, broken, fs.exists() -// since we are a drop-in replacement for the native module -exports.exists = function (filename, callback) { - if (typeof callback === 'function') { - return fs.exists(filename, callback) - } - return new Promise(resolve => { - return fs.exists(filename, resolve) - }) -} - -// fs.read(), fs.write(), & fs.writev() need special treatment due to multiple callback args - -exports.read = function (fd, buffer, offset, length, position, callback) { - if (typeof callback === 'function') { - return fs.read(fd, buffer, offset, length, position, callback) - } - return new Promise((resolve, reject) => { - fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => { - if (err) return reject(err) - resolve({ bytesRead, buffer }) - }) - }) -} - -// Function signature can be -// fs.write(fd, buffer[, offset[, length[, position]]], callback) -// OR -// fs.write(fd, string[, position[, encoding]], callback) -// We need to handle both cases, so we use ...args -exports.write = function (fd, buffer, ...args) { - if (typeof args[args.length - 1] === 'function') { - return fs.write(fd, buffer, ...args) - } - - return new Promise((resolve, reject) => { - fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => { - if (err) return reject(err) - resolve({ bytesWritten, buffer }) - }) - }) -} - -// fs.writev only available in Node v12.9.0+ -if (typeof fs.writev === 'function') { - // Function signature is - // s.writev(fd, buffers[, position], callback) - // We need to handle the optional arg, so we use ...args - exports.writev = function (fd, buffers, ...args) { - if (typeof args[args.length - 1] === 'function') { - return fs.writev(fd, buffers, ...args) - } - - return new Promise((resolve, reject) => { - fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => { - if (err) return reject(err) - resolve({ bytesWritten, buffers }) - }) - }) - } -} - - -/***/ }), - -/***/ 5630: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -module.exports = { - // Export promiseified graceful-fs: - ...__nccwpck_require__(1176), - // Export extra methods: - ...__nccwpck_require__(1135), - ...__nccwpck_require__(1335), - ...__nccwpck_require__(6970), - ...__nccwpck_require__(55), - ...__nccwpck_require__(213), - ...__nccwpck_require__(2915), - ...__nccwpck_require__(9665), - ...__nccwpck_require__(1497), - ...__nccwpck_require__(6570), - ...__nccwpck_require__(3835), - ...__nccwpck_require__(7357) -} - - -/***/ }), - -/***/ 213: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(1463).fromPromise -const jsonFile = __nccwpck_require__(8970) - -jsonFile.outputJson = u(__nccwpck_require__(531)) -jsonFile.outputJsonSync = __nccwpck_require__(9421) -// aliases -jsonFile.outputJSON = jsonFile.outputJson -jsonFile.outputJSONSync = jsonFile.outputJsonSync -jsonFile.writeJSON = jsonFile.writeJson -jsonFile.writeJSONSync = jsonFile.writeJsonSync -jsonFile.readJSON = jsonFile.readJson -jsonFile.readJSONSync = jsonFile.readJsonSync - -module.exports = jsonFile - - -/***/ }), - -/***/ 8970: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const jsonFile = __nccwpck_require__(6160) - -module.exports = { - // jsonfile exports - readJson: jsonFile.readFile, - readJsonSync: jsonFile.readFileSync, - writeJson: jsonFile.writeFile, - writeJsonSync: jsonFile.writeFileSync -} - - -/***/ }), - -/***/ 9421: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const { stringify } = __nccwpck_require__(5902) -const { outputFileSync } = __nccwpck_require__(6570) - -function outputJsonSync (file, data, options) { - const str = stringify(data, options) - - outputFileSync(file, str, options) -} - -module.exports = outputJsonSync - - -/***/ }), - -/***/ 531: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const { stringify } = __nccwpck_require__(5902) -const { outputFile } = __nccwpck_require__(6570) - -async function outputJson (file, data, options = {}) { - const str = stringify(data, options) - - await outputFile(file, str, options) -} - -module.exports = outputJson - - -/***/ }), - -/***/ 2915: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const u = __nccwpck_require__(1463).fromPromise -const { makeDir: _makeDir, makeDirSync } = __nccwpck_require__(2751) -const makeDir = u(_makeDir) - -module.exports = { - mkdirs: makeDir, - mkdirsSync: makeDirSync, - // alias - mkdirp: makeDir, - mkdirpSync: makeDirSync, - ensureDir: makeDir, - ensureDirSync: makeDirSync -} - - -/***/ }), - -/***/ 2751: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const fs = __nccwpck_require__(1176) -const { checkPath } = __nccwpck_require__(9907) - -const getMode = options => { - const defaults = { mode: 0o777 } - if (typeof options === 'number') return options - return ({ ...defaults, ...options }).mode -} - -module.exports.makeDir = async (dir, options) => { - checkPath(dir) - - return fs.mkdir(dir, { - mode: getMode(options), - recursive: true - }) -} - -module.exports.makeDirSync = (dir, options) => { - checkPath(dir) - - return fs.mkdirSync(dir, { - mode: getMode(options), - recursive: true - }) -} - - -/***/ }), - -/***/ 9907: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; -// Adapted from https://github.com/sindresorhus/make-dir -// Copyright (c) Sindre Sorhus (sindresorhus.com) -// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -const path = __nccwpck_require__(5622) - -// https://github.com/nodejs/node/issues/8987 -// https://github.com/libuv/libuv/pull/1088 -module.exports.checkPath = function checkPath (pth) { - if (process.platform === 'win32') { - const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, '')) - - if (pathHasInvalidWinCharacters) { - const error = new Error(`Path contains invalid characters: ${pth}`) - error.code = 'EINVAL' - throw error - } - } -} - - -/***/ }), - -/***/ 9665: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -module.exports = { - moveSync: __nccwpck_require__(6445) -} - - -/***/ }), - -/***/ 6445: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const copySync = __nccwpck_require__(1135).copySync -const removeSync = __nccwpck_require__(7357).removeSync -const mkdirpSync = __nccwpck_require__(2915).mkdirpSync -const stat = __nccwpck_require__(3901) - -function moveSync (src, dest, opts) { - opts = opts || {} - const overwrite = opts.overwrite || opts.clobber || false - - const { srcStat, isChangingCase = false } = stat.checkPathsSync(src, dest, 'move', opts) - stat.checkParentPathsSync(src, srcStat, dest, 'move') - if (!isParentRoot(dest)) mkdirpSync(path.dirname(dest)) - return doRename(src, dest, overwrite, isChangingCase) -} - -function isParentRoot (dest) { - const parent = path.dirname(dest) - const parsedPath = path.parse(parent) - return parsedPath.root === parent -} - -function doRename (src, dest, overwrite, isChangingCase) { - if (isChangingCase) return rename(src, dest, overwrite) - if (overwrite) { - removeSync(dest) - return rename(src, dest, overwrite) - } - if (fs.existsSync(dest)) throw new Error('dest already exists.') - return rename(src, dest, overwrite) -} - -function rename (src, dest, overwrite) { - try { - fs.renameSync(src, dest) - } catch (err) { - if (err.code !== 'EXDEV') throw err - return moveAcrossDevice(src, dest, overwrite) - } -} - -function moveAcrossDevice (src, dest, overwrite) { - const opts = { - overwrite, - errorOnExist: true - } - copySync(src, dest, opts) - return removeSync(src) -} - -module.exports = moveSync - - -/***/ }), - -/***/ 1497: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(1463).fromCallback -module.exports = { - move: u(__nccwpck_require__(2231)) -} - - -/***/ }), - -/***/ 2231: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const copy = __nccwpck_require__(1335).copy -const remove = __nccwpck_require__(7357).remove -const mkdirp = __nccwpck_require__(2915).mkdirp -const pathExists = __nccwpck_require__(3835).pathExists -const stat = __nccwpck_require__(3901) - -function move (src, dest, opts, cb) { - if (typeof opts === 'function') { - cb = opts - opts = {} - } - - const overwrite = opts.overwrite || opts.clobber || false - - stat.checkPaths(src, dest, 'move', opts, (err, stats) => { - if (err) return cb(err) - const { srcStat, isChangingCase = false } = stats - stat.checkParentPaths(src, srcStat, dest, 'move', err => { - if (err) return cb(err) - if (isParentRoot(dest)) return doRename(src, dest, overwrite, isChangingCase, cb) - mkdirp(path.dirname(dest), err => { - if (err) return cb(err) - return doRename(src, dest, overwrite, isChangingCase, cb) - }) - }) - }) -} - -function isParentRoot (dest) { - const parent = path.dirname(dest) - const parsedPath = path.parse(parent) - return parsedPath.root === parent -} - -function doRename (src, dest, overwrite, isChangingCase, cb) { - if (isChangingCase) return rename(src, dest, overwrite, cb) - if (overwrite) { - return remove(dest, err => { - if (err) return cb(err) - return rename(src, dest, overwrite, cb) - }) - } - pathExists(dest, (err, destExists) => { - if (err) return cb(err) - if (destExists) return cb(new Error('dest already exists.')) - return rename(src, dest, overwrite, cb) - }) -} - -function rename (src, dest, overwrite, cb) { - fs.rename(src, dest, err => { - if (!err) return cb() - if (err.code !== 'EXDEV') return cb(err) - return moveAcrossDevice(src, dest, overwrite, cb) - }) -} - -function moveAcrossDevice (src, dest, overwrite, cb) { - const opts = { - overwrite, - errorOnExist: true - } - copy(src, dest, opts, err => { - if (err) return cb(err) - return remove(src, cb) - }) -} - -module.exports = move - - -/***/ }), - -/***/ 6570: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = __nccwpck_require__(1463).fromCallback -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const mkdir = __nccwpck_require__(2915) -const pathExists = __nccwpck_require__(3835).pathExists - -function outputFile (file, data, encoding, callback) { - if (typeof encoding === 'function') { - callback = encoding - encoding = 'utf8' - } - - const dir = path.dirname(file) - pathExists(dir, (err, itDoes) => { - if (err) return callback(err) - if (itDoes) return fs.writeFile(file, data, encoding, callback) - - mkdir.mkdirs(dir, err => { - if (err) return callback(err) - - fs.writeFile(file, data, encoding, callback) - }) - }) -} - -function outputFileSync (file, ...args) { - const dir = path.dirname(file) - if (fs.existsSync(dir)) { - return fs.writeFileSync(file, ...args) - } - mkdir.mkdirsSync(dir) - fs.writeFileSync(file, ...args) -} - -module.exports = { - outputFile: u(outputFile), - outputFileSync -} - - -/***/ }), - -/***/ 3835: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const u = __nccwpck_require__(1463).fromPromise -const fs = __nccwpck_require__(1176) - -function pathExists (path) { - return fs.access(path).then(() => true).catch(() => false) -} - -module.exports = { - pathExists: u(pathExists), - pathExistsSync: fs.existsSync -} - - -/***/ }), - -/***/ 7357: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const u = __nccwpck_require__(1463).fromCallback -const rimraf = __nccwpck_require__(7247) - -function remove (path, callback) { - // Node 14.14.0+ - if (fs.rm) return fs.rm(path, { recursive: true, force: true }, callback) - rimraf(path, callback) -} - -function removeSync (path) { - // Node 14.14.0+ - if (fs.rmSync) return fs.rmSync(path, { recursive: true, force: true }) - rimraf.sync(path) -} - -module.exports = { - remove: u(remove), - removeSync -} - - -/***/ }), - -/***/ 7247: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) -const path = __nccwpck_require__(5622) -const assert = __nccwpck_require__(2357) - -const isWindows = (process.platform === 'win32') - -function defaults (options) { - const methods = [ - 'unlink', - 'chmod', - 'stat', - 'lstat', - 'rmdir', - 'readdir' - ] - methods.forEach(m => { - options[m] = options[m] || fs[m] - m = m + 'Sync' - options[m] = options[m] || fs[m] - }) - - options.maxBusyTries = options.maxBusyTries || 3 -} - -function rimraf (p, options, cb) { - let busyTries = 0 - - if (typeof options === 'function') { - cb = options - options = {} - } - - assert(p, 'rimraf: missing path') - assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') - assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required') - assert(options, 'rimraf: invalid options argument provided') - assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') - - defaults(options) - - rimraf_(p, options, function CB (er) { - if (er) { - if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') && - busyTries < options.maxBusyTries) { - busyTries++ - const time = busyTries * 100 - // try again, with the same exact callback as this one. - return setTimeout(() => rimraf_(p, options, CB), time) - } - - // already gone - if (er.code === 'ENOENT') er = null - } - - cb(er) - }) -} - -// Two possible strategies. -// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR -// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR -// -// Both result in an extra syscall when you guess wrong. However, there -// are likely far more normal files in the world than directories. This -// is based on the assumption that a the average number of files per -// directory is >= 1. -// -// If anyone ever complains about this, then I guess the strategy could -// be made configurable somehow. But until then, YAGNI. -function rimraf_ (p, options, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // sunos lets the root user unlink directories, which is... weird. - // so we have to lstat here and make sure it's not a dir. - options.lstat(p, (er, st) => { - if (er && er.code === 'ENOENT') { - return cb(null) - } - - // Windows can EPERM on stat. Life is suffering. - if (er && er.code === 'EPERM' && isWindows) { - return fixWinEPERM(p, options, er, cb) - } - - if (st && st.isDirectory()) { - return rmdir(p, options, er, cb) - } - - options.unlink(p, er => { - if (er) { - if (er.code === 'ENOENT') { - return cb(null) - } - if (er.code === 'EPERM') { - return (isWindows) - ? fixWinEPERM(p, options, er, cb) - : rmdir(p, options, er, cb) - } - if (er.code === 'EISDIR') { - return rmdir(p, options, er, cb) - } - } - return cb(er) - }) - }) -} - -function fixWinEPERM (p, options, er, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.chmod(p, 0o666, er2 => { - if (er2) { - cb(er2.code === 'ENOENT' ? null : er) - } else { - options.stat(p, (er3, stats) => { - if (er3) { - cb(er3.code === 'ENOENT' ? null : er) - } else if (stats.isDirectory()) { - rmdir(p, options, er, cb) - } else { - options.unlink(p, cb) - } - }) - } - }) -} - -function fixWinEPERMSync (p, options, er) { - let stats - - assert(p) - assert(options) - - try { - options.chmodSync(p, 0o666) - } catch (er2) { - if (er2.code === 'ENOENT') { - return - } else { - throw er - } - } - - try { - stats = options.statSync(p) - } catch (er3) { - if (er3.code === 'ENOENT') { - return - } else { - throw er - } - } - - if (stats.isDirectory()) { - rmdirSync(p, options, er) - } else { - options.unlinkSync(p) - } -} - -function rmdir (p, options, originalEr, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) - // if we guessed wrong, and it's not a directory, then - // raise the original error. - options.rmdir(p, er => { - if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) { - rmkids(p, options, cb) - } else if (er && er.code === 'ENOTDIR') { - cb(originalEr) - } else { - cb(er) - } - }) -} - -function rmkids (p, options, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.readdir(p, (er, files) => { - if (er) return cb(er) - - let n = files.length - let errState - - if (n === 0) return options.rmdir(p, cb) - - files.forEach(f => { - rimraf(path.join(p, f), options, er => { - if (errState) { - return - } - if (er) return cb(errState = er) - if (--n === 0) { - options.rmdir(p, cb) - } - }) - }) - }) -} - -// this looks simpler, and is strictly *faster*, but will -// tie up the JavaScript thread and fail on excessively -// deep directory trees. -function rimrafSync (p, options) { - let st - - options = options || {} - defaults(options) - - assert(p, 'rimraf: missing path') - assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') - assert(options, 'rimraf: missing options') - assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') - - try { - st = options.lstatSync(p) - } catch (er) { - if (er.code === 'ENOENT') { - return - } - - // Windows can EPERM on stat. Life is suffering. - if (er.code === 'EPERM' && isWindows) { - fixWinEPERMSync(p, options, er) - } - } - - try { - // sunos lets the root user unlink directories, which is... weird. - if (st && st.isDirectory()) { - rmdirSync(p, options, null) - } else { - options.unlinkSync(p) - } - } catch (er) { - if (er.code === 'ENOENT') { - return - } else if (er.code === 'EPERM') { - return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) - } else if (er.code !== 'EISDIR') { - throw er - } - rmdirSync(p, options, er) - } -} - -function rmdirSync (p, options, originalEr) { - assert(p) - assert(options) - - try { - options.rmdirSync(p) - } catch (er) { - if (er.code === 'ENOTDIR') { - throw originalEr - } else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') { - rmkidsSync(p, options) - } else if (er.code !== 'ENOENT') { - throw er - } - } -} - -function rmkidsSync (p, options) { - assert(p) - assert(options) - options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) - - if (isWindows) { - // We only end up here once we got ENOTEMPTY at least once, and - // at this point, we are guaranteed to have removed all the kids. - // So, we know that it won't be ENOENT or ENOTDIR or anything else. - // try really hard to delete stuff on windows, because it has a - // PROFOUNDLY annoying habit of not closing handles promptly when - // files are deleted, resulting in spurious ENOTEMPTY errors. - const startTime = Date.now() - do { - try { - const ret = options.rmdirSync(p, options) - return ret - } catch {} - } while (Date.now() - startTime < 500) // give up after 500ms - } else { - const ret = options.rmdirSync(p, options) - return ret - } -} - -module.exports = rimraf -rimraf.sync = rimrafSync - - -/***/ }), - -/***/ 3901: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(1176) -const path = __nccwpck_require__(5622) -const util = __nccwpck_require__(1669) - -function getStats (src, dest, opts) { - const statFunc = opts.dereference - ? (file) => fs.stat(file, { bigint: true }) - : (file) => fs.lstat(file, { bigint: true }) - return Promise.all([ - statFunc(src), - statFunc(dest).catch(err => { - if (err.code === 'ENOENT') return null - throw err - }) - ]).then(([srcStat, destStat]) => ({ srcStat, destStat })) -} - -function getStatsSync (src, dest, opts) { - let destStat - const statFunc = opts.dereference - ? (file) => fs.statSync(file, { bigint: true }) - : (file) => fs.lstatSync(file, { bigint: true }) - const srcStat = statFunc(src) - try { - destStat = statFunc(dest) - } catch (err) { - if (err.code === 'ENOENT') return { srcStat, destStat: null } - throw err - } - return { srcStat, destStat } -} - -function checkPaths (src, dest, funcName, opts, cb) { - util.callbackify(getStats)(src, dest, opts, (err, stats) => { - if (err) return cb(err) - const { srcStat, destStat } = stats - - if (destStat) { - if (areIdentical(srcStat, destStat)) { - const srcBaseName = path.basename(src) - const destBaseName = path.basename(dest) - if (funcName === 'move' && - srcBaseName !== destBaseName && - srcBaseName.toLowerCase() === destBaseName.toLowerCase()) { - return cb(null, { srcStat, destStat, isChangingCase: true }) - } - return cb(new Error('Source and destination must not be the same.')) - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - return cb(new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`)) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - return cb(new Error(errMsg(src, dest, funcName))) - } - return cb(null, { srcStat, destStat }) - }) -} - -function checkPathsSync (src, dest, funcName, opts) { - const { srcStat, destStat } = getStatsSync(src, dest, opts) - - if (destStat) { - if (areIdentical(srcStat, destStat)) { - const srcBaseName = path.basename(src) - const destBaseName = path.basename(dest) - if (funcName === 'move' && - srcBaseName !== destBaseName && - srcBaseName.toLowerCase() === destBaseName.toLowerCase()) { - return { srcStat, destStat, isChangingCase: true } - } - throw new Error('Source and destination must not be the same.') - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - throw new Error(errMsg(src, dest, funcName)) - } - return { srcStat, destStat } -} - -// recursively check if dest parent is a subdirectory of src. -// It works for all file types including symlinks since it -// checks the src and dest inodes. It starts from the deepest -// parent and stops once it reaches the src parent or the root path. -function checkParentPaths (src, srcStat, dest, funcName, cb) { - const srcParent = path.resolve(path.dirname(src)) - const destParent = path.resolve(path.dirname(dest)) - if (destParent === srcParent || destParent === path.parse(destParent).root) return cb() - fs.stat(destParent, { bigint: true }, (err, destStat) => { - if (err) { - if (err.code === 'ENOENT') return cb() - return cb(err) - } - if (areIdentical(srcStat, destStat)) { - return cb(new Error(errMsg(src, dest, funcName))) - } - return checkParentPaths(src, srcStat, destParent, funcName, cb) - }) -} - -function checkParentPathsSync (src, srcStat, dest, funcName) { - const srcParent = path.resolve(path.dirname(src)) - const destParent = path.resolve(path.dirname(dest)) - if (destParent === srcParent || destParent === path.parse(destParent).root) return - let destStat - try { - destStat = fs.statSync(destParent, { bigint: true }) - } catch (err) { - if (err.code === 'ENOENT') return - throw err - } - if (areIdentical(srcStat, destStat)) { - throw new Error(errMsg(src, dest, funcName)) - } - return checkParentPathsSync(src, srcStat, destParent, funcName) -} - -function areIdentical (srcStat, destStat) { - return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev -} - -// return true if dest is a subdir of src, otherwise false. -// It only checks the path strings. -function isSrcSubdir (src, dest) { - const srcArr = path.resolve(src).split(path.sep).filter(i => i) - const destArr = path.resolve(dest).split(path.sep).filter(i => i) - return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true) -} - -function errMsg (src, dest, funcName) { - return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.` -} - -module.exports = { - checkPaths, - checkPathsSync, - checkParentPaths, - checkParentPathsSync, - isSrcSubdir, - areIdentical -} - - -/***/ }), - -/***/ 2548: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(7758) - -function utimesMillis (path, atime, mtime, callback) { - // if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback) - fs.open(path, 'r+', (err, fd) => { - if (err) return callback(err) - fs.futimes(fd, atime, mtime, futimesErr => { - fs.close(fd, closeErr => { - if (callback) callback(futimesErr || closeErr) - }) - }) - }) -} - -function utimesMillisSync (path, atime, mtime) { - const fd = fs.openSync(path, 'r+') - fs.futimesSync(fd, atime, mtime) - return fs.closeSync(fd) -} - -module.exports = { - utimesMillis, - utimesMillisSync -} - - -/***/ }), - -/***/ 7356: -/***/ ((module) => { - -"use strict"; - - -module.exports = clone - -var getPrototypeOf = Object.getPrototypeOf || function (obj) { - return obj.__proto__ -} - -function clone (obj) { - if (obj === null || typeof obj !== 'object') - return obj - - if (obj instanceof Object) - var copy = { __proto__: getPrototypeOf(obj) } - else - var copy = Object.create(null) - - Object.getOwnPropertyNames(obj).forEach(function (key) { - Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key)) - }) - - return copy -} - - -/***/ }), - -/***/ 7758: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var fs = __nccwpck_require__(5747) -var polyfills = __nccwpck_require__(263) -var legacy = __nccwpck_require__(5162) -var clone = __nccwpck_require__(7356) - -var util = __nccwpck_require__(1669) - -/* istanbul ignore next - node 0.x polyfill */ -var gracefulQueue -var previousSymbol - -/* istanbul ignore else - node 0.x polyfill */ -if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { - gracefulQueue = Symbol.for('graceful-fs.queue') - // This is used in testing by future versions - previousSymbol = Symbol.for('graceful-fs.previous') -} else { - gracefulQueue = '___graceful-fs.queue' - previousSymbol = '___graceful-fs.previous' -} - -function noop () {} - -function publishQueue(context, queue) { - Object.defineProperty(context, gracefulQueue, { - get: function() { - return queue - } - }) -} - -var debug = noop -if (util.debuglog) - debug = util.debuglog('gfs4') -else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) - debug = function() { - var m = util.format.apply(util, arguments) - m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ') - console.error(m) - } - -// Once time initialization -if (!fs[gracefulQueue]) { - // This queue can be shared by multiple loaded instances - var queue = global[gracefulQueue] || [] - publishQueue(fs, queue) - - // Patch fs.close/closeSync to shared queue version, because we need - // to retry() whenever a close happens *anywhere* in the program. - // This is essential when multiple graceful-fs instances are - // in play at the same time. - fs.close = (function (fs$close) { - function close (fd, cb) { - return fs$close.call(fs, fd, function (err) { - // This function uses the graceful-fs shared queue - if (!err) { - retry() - } - - if (typeof cb === 'function') - cb.apply(this, arguments) - }) - } - - Object.defineProperty(close, previousSymbol, { - value: fs$close - }) - return close - })(fs.close) - - fs.closeSync = (function (fs$closeSync) { - function closeSync (fd) { - // This function uses the graceful-fs shared queue - fs$closeSync.apply(fs, arguments) - retry() - } - - Object.defineProperty(closeSync, previousSymbol, { - value: fs$closeSync - }) - return closeSync - })(fs.closeSync) - - if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { - process.on('exit', function() { - debug(fs[gracefulQueue]) - __nccwpck_require__(2357).equal(fs[gracefulQueue].length, 0) - }) - } -} - -if (!global[gracefulQueue]) { - publishQueue(global, fs[gracefulQueue]); -} - -module.exports = patch(clone(fs)) -if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) { - module.exports = patch(fs) - fs.__patched = true; -} - -function patch (fs) { - // Everything that references the open() function needs to be in here - polyfills(fs) - fs.gracefulify = patch - - fs.createReadStream = createReadStream - fs.createWriteStream = createWriteStream - var fs$readFile = fs.readFile - fs.readFile = readFile - function readFile (path, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$readFile(path, options, cb) - - function go$readFile (path, options, cb) { - return fs$readFile(path, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$readFile, [path, options, cb]]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - retry() - } - }) - } - } - - var fs$writeFile = fs.writeFile - fs.writeFile = writeFile - function writeFile (path, data, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$writeFile(path, data, options, cb) - - function go$writeFile (path, data, options, cb) { - return fs$writeFile(path, data, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$writeFile, [path, data, options, cb]]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - retry() - } - }) - } - } - - var fs$appendFile = fs.appendFile - if (fs$appendFile) - fs.appendFile = appendFile - function appendFile (path, data, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$appendFile(path, data, options, cb) - - function go$appendFile (path, data, options, cb) { - return fs$appendFile(path, data, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$appendFile, [path, data, options, cb]]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - retry() - } - }) - } - } - - var fs$copyFile = fs.copyFile - if (fs$copyFile) - fs.copyFile = copyFile - function copyFile (src, dest, flags, cb) { - if (typeof flags === 'function') { - cb = flags - flags = 0 - } - return fs$copyFile(src, dest, flags, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([fs$copyFile, [src, dest, flags, cb]]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - retry() - } - }) - } - - var fs$readdir = fs.readdir - fs.readdir = readdir - function readdir (path, options, cb) { - var args = [path] - if (typeof options !== 'function') { - args.push(options) - } else { - cb = options - } - args.push(go$readdir$cb) - - return go$readdir(args) - - function go$readdir$cb (err, files) { - if (files && files.sort) - files.sort() - - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$readdir, [args]]) - - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - retry() - } - } - } - - function go$readdir (args) { - return fs$readdir.apply(fs, args) - } - - if (process.version.substr(0, 4) === 'v0.8') { - var legStreams = legacy(fs) - ReadStream = legStreams.ReadStream - WriteStream = legStreams.WriteStream - } - - var fs$ReadStream = fs.ReadStream - if (fs$ReadStream) { - ReadStream.prototype = Object.create(fs$ReadStream.prototype) - ReadStream.prototype.open = ReadStream$open - } - - var fs$WriteStream = fs.WriteStream - if (fs$WriteStream) { - WriteStream.prototype = Object.create(fs$WriteStream.prototype) - WriteStream.prototype.open = WriteStream$open - } - - Object.defineProperty(fs, 'ReadStream', { - get: function () { - return ReadStream - }, - set: function (val) { - ReadStream = val - }, - enumerable: true, - configurable: true - }) - Object.defineProperty(fs, 'WriteStream', { - get: function () { - return WriteStream - }, - set: function (val) { - WriteStream = val - }, - enumerable: true, - configurable: true - }) - - // legacy names - var FileReadStream = ReadStream - Object.defineProperty(fs, 'FileReadStream', { - get: function () { - return FileReadStream - }, - set: function (val) { - FileReadStream = val - }, - enumerable: true, - configurable: true - }) - var FileWriteStream = WriteStream - Object.defineProperty(fs, 'FileWriteStream', { - get: function () { - return FileWriteStream - }, - set: function (val) { - FileWriteStream = val - }, - enumerable: true, - configurable: true - }) - - function ReadStream (path, options) { - if (this instanceof ReadStream) - return fs$ReadStream.apply(this, arguments), this - else - return ReadStream.apply(Object.create(ReadStream.prototype), arguments) - } - - function ReadStream$open () { - var that = this - open(that.path, that.flags, that.mode, function (err, fd) { - if (err) { - if (that.autoClose) - that.destroy() - - that.emit('error', err) - } else { - that.fd = fd - that.emit('open', fd) - that.read() - } - }) - } - - function WriteStream (path, options) { - if (this instanceof WriteStream) - return fs$WriteStream.apply(this, arguments), this - else - return WriteStream.apply(Object.create(WriteStream.prototype), arguments) - } - - function WriteStream$open () { - var that = this - open(that.path, that.flags, that.mode, function (err, fd) { - if (err) { - that.destroy() - that.emit('error', err) - } else { - that.fd = fd - that.emit('open', fd) - } - }) - } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = 'Failed request: (' + statusCode + ')'; + } + let err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + }); + } +} +exports.HttpClient = HttpClient; - function createReadStream (path, options) { - return new fs.ReadStream(path, options) - } - function createWriteStream (path, options) { - return new fs.WriteStream(path, options) - } +/***/ }), - var fs$open = fs.open - fs.open = open - function open (path, flags, mode, cb) { - if (typeof mode === 'function') - cb = mode, mode = null +/***/ 6443: +/***/ ((__unused_webpack_module, exports) => { - return go$open(path, flags, mode, cb) +"use strict"; - function go$open (path, flags, mode, cb) { - return fs$open(path, flags, mode, function (err, fd) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$open, [path, flags, mode, cb]]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - retry() - } - }) +Object.defineProperty(exports, "__esModule", ({ value: true })); +function getProxyUrl(reqUrl) { + let usingSsl = reqUrl.protocol === 'https:'; + let proxyUrl; + if (checkBypass(reqUrl)) { + return proxyUrl; } - } - - return fs -} - -function enqueue (elem) { - debug('ENQUEUE', elem[0].name, elem[1]) - fs[gracefulQueue].push(elem) + let proxyVar; + if (usingSsl) { + proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + if (proxyVar) { + proxyUrl = new URL(proxyVar); + } + return proxyUrl; } - -function retry () { - var elem = fs[gracefulQueue].shift() - if (elem) { - debug('RETRY', elem[0].name, elem[1]) - elem[0].apply(null, elem[1]) - } +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + let upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (let upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperReqHosts.some(x => x === upperNoProxyItem)) { + return true; + } + } + return false; } +exports.checkBypass = checkBypass; /***/ }), -/***/ 5162: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var Stream = __nccwpck_require__(2413).Stream +/***/ 334: +/***/ ((__unused_webpack_module, exports) => { -module.exports = legacy +"use strict"; -function legacy (fs) { - return { - ReadStream: ReadStream, - WriteStream: WriteStream - } - function ReadStream (path, options) { - if (!(this instanceof ReadStream)) return new ReadStream(path, options); +Object.defineProperty(exports, "__esModule", ({ value: true })); - Stream.call(this); +async function auth(token) { + const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; + return { + type: "token", + token: token, + tokenType + }; +} - var self = this; +/** + * Prefix token for usage in the Authorization header + * + * @param token OAuth token or JSON Web Token + */ +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } - this.path = path; - this.fd = null; - this.readable = true; - this.paused = false; + return `token ${token}`; +} - this.flags = 'r'; - this.mode = 438; /*=0666*/ - this.bufferSize = 64 * 1024; +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge(route, parameters); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} - options = options || {}; +const createTokenAuth = function createTokenAuth(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } - // Mixin options into this - var keys = Object.keys(options); - for (var index = 0, length = keys.length; index < length; index++) { - var key = keys[index]; - this[key] = options[key]; - } + if (typeof token !== "string") { + throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); + } - if (this.encoding) this.setEncoding(this.encoding); + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; - if (this.start !== undefined) { - if ('number' !== typeof this.start) { - throw TypeError('start must be a Number'); - } - if (this.end === undefined) { - this.end = Infinity; - } else if ('number' !== typeof this.end) { - throw TypeError('end must be a Number'); - } +exports.createTokenAuth = createTokenAuth; +//# sourceMappingURL=index.js.map - if (this.start > this.end) { - throw new Error('start must be <= end'); - } - this.pos = this.start; - } +/***/ }), - if (this.fd !== null) { - process.nextTick(function() { - self._read(); - }); - return; - } +/***/ 6762: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - fs.open(this.path, this.flags, this.mode, function (err, fd) { - if (err) { - self.emit('error', err); - self.readable = false; - return; - } +"use strict"; - self.fd = fd; - self.emit('open', fd); - self._read(); - }) - } - function WriteStream (path, options) { - if (!(this instanceof WriteStream)) return new WriteStream(path, options); +Object.defineProperty(exports, "__esModule", ({ value: true })); - Stream.call(this); +var universalUserAgent = __nccwpck_require__(5030); +var beforeAfterHook = __nccwpck_require__(6319); +var request = __nccwpck_require__(6234); +var graphql = __nccwpck_require__(8467); +var authToken = __nccwpck_require__(334); - this.path = path; - this.fd = null; - this.writable = true; +function _objectWithoutPropertiesLoose(source, excluded) { + if (source == null) return {}; + var target = {}; + var sourceKeys = Object.keys(source); + var key, i; - this.flags = 'w'; - this.encoding = 'binary'; - this.mode = 438; /*=0666*/ - this.bytesWritten = 0; + for (i = 0; i < sourceKeys.length; i++) { + key = sourceKeys[i]; + if (excluded.indexOf(key) >= 0) continue; + target[key] = source[key]; + } - options = options || {}; + return target; +} - // Mixin options into this - var keys = Object.keys(options); - for (var index = 0, length = keys.length; index < length; index++) { - var key = keys[index]; - this[key] = options[key]; - } +function _objectWithoutProperties(source, excluded) { + if (source == null) return {}; - if (this.start !== undefined) { - if ('number' !== typeof this.start) { - throw TypeError('start must be a Number'); - } - if (this.start < 0) { - throw new Error('start must be >= zero'); - } + var target = _objectWithoutPropertiesLoose(source, excluded); - this.pos = this.start; - } + var key, i; - this.busy = false; - this._queue = []; + if (Object.getOwnPropertySymbols) { + var sourceSymbolKeys = Object.getOwnPropertySymbols(source); - if (this.fd === null) { - this._open = fs.open; - this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); - this.flush(); + for (i = 0; i < sourceSymbolKeys.length; i++) { + key = sourceSymbolKeys[i]; + if (excluded.indexOf(key) >= 0) continue; + if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; + target[key] = source[key]; } } + + return target; } +const VERSION = "3.4.0"; -/***/ }), +class Octokit { + constructor(options = {}) { + const hook = new beforeAfterHook.Collection(); + const requestDefaults = { + baseUrl: request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; // prepend default user agent with `options.userAgent` if set -/***/ 263: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); -var constants = __nccwpck_require__(7619) + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } -var origCwd = process.cwd -var cwd = null + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } -var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } -process.cwd = function() { - if (!cwd) - cwd = origCwd.call(process) - return cwd -} -try { - process.cwd() -} catch (er) {} + this.request = request.request.defaults(requestDefaults); + this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults); + this.log = Object.assign({ + debug: () => {}, + info: () => {}, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, options.log); + this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance + // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. + // (2) If only `options.auth` is set, use the default token authentication strategy. + // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. + // TODO: type `options.auth` based on `options.authStrategy`. + + if (!options.authStrategy) { + if (!options.auth) { + // (1) + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + // (2) + const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ -// This check is needed until node.js 12 is required -if (typeof process.chdir === 'function') { - var chdir = process.chdir - process.chdir = function (d) { - cwd = null - chdir.call(process, d) + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { + authStrategy + } = options, + otherOptions = _objectWithoutProperties(options, ["authStrategy"]); + + const auth = authStrategy(Object.assign({ + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ + + hook.wrap("request", auth.hook); + this.auth = auth; + } // apply plugins + // https://stackoverflow.com/a/16345172 + + + const classConstructor = this.constructor; + classConstructor.plugins.forEach(plugin => { + Object.assign(this, plugin(this, options)); + }); } - if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir) -} -module.exports = patch + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; -function patch (fs) { - // (re-)implement some things that are known busted or missing. + if (typeof defaults === "function") { + super(defaults(options)); + return; + } - // lchmod, broken prior to 0.6.2 - // back-port the fix here. - if (constants.hasOwnProperty('O_SYMLINK') && - process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { - patchLchmod(fs) + super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null)); + } + + }; + return OctokitWithDefaults; } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ - // lutimes implementation, or no-op - if (!fs.lutimes) { - patchLutimes(fs) + + static plugin(...newPlugins) { + var _a; + + const currentPlugins = this.plugins; + const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); + return NewOctokit; } - // https://github.com/isaacs/node-graceful-fs/issues/4 - // Chown should not fail on einval or eperm if non-root. - // It should not fail on enosys ever, as this just indicates - // that a fs doesn't support the intended operation. +} +Octokit.VERSION = VERSION; +Octokit.plugins = []; - fs.chown = chownFix(fs.chown) - fs.fchown = chownFix(fs.fchown) - fs.lchown = chownFix(fs.lchown) +exports.Octokit = Octokit; +//# sourceMappingURL=index.js.map - fs.chmod = chmodFix(fs.chmod) - fs.fchmod = chmodFix(fs.fchmod) - fs.lchmod = chmodFix(fs.lchmod) - fs.chownSync = chownFixSync(fs.chownSync) - fs.fchownSync = chownFixSync(fs.fchownSync) - fs.lchownSync = chownFixSync(fs.lchownSync) +/***/ }), - fs.chmodSync = chmodFixSync(fs.chmodSync) - fs.fchmodSync = chmodFixSync(fs.fchmodSync) - fs.lchmodSync = chmodFixSync(fs.lchmodSync) +/***/ 6319: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - fs.stat = statFix(fs.stat) - fs.fstat = statFix(fs.fstat) - fs.lstat = statFix(fs.lstat) +var register = __nccwpck_require__(7694) +var addHook = __nccwpck_require__(1191) +var removeHook = __nccwpck_require__(6618) - fs.statSync = statFixSync(fs.statSync) - fs.fstatSync = statFixSync(fs.fstatSync) - fs.lstatSync = statFixSync(fs.lstatSync) +// bind with array of arguments: https://stackoverflow.com/a/21792913 +var bind = Function.bind +var bindable = bind.bind(bind) - // if lchmod/lchown do not exist, then make them no-ops - if (!fs.lchmod) { - fs.lchmod = function (path, mode, cb) { - if (cb) process.nextTick(cb) - } - fs.lchmodSync = function () {} - } - if (!fs.lchown) { - fs.lchown = function (path, uid, gid, cb) { - if (cb) process.nextTick(cb) - } - fs.lchownSync = function () {} - } +function bindApi (hook, state, name) { + var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) + hook.api = { remove: removeHookRef } + hook.remove = removeHookRef - // on Windows, A/V software can lock the directory, causing this - // to fail with an EACCES or EPERM if the directory contains newly - // created files. Try again on failure, for up to 60 seconds. + ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { + var args = name ? [state, kind, name] : [state, kind] + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) + }) +} - // Set the timeout this long because some Windows Anti-Virus, such as Parity - // bit9, may lock files for up to a minute, causing npm package install - // failures. Also, take care to yield the scheduler. Windows scheduling gives - // CPU to a busy looping process, which can cause the program causing the lock - // contention to be starved of CPU by node, so the contention doesn't resolve. - if (platform === "win32") { - fs.rename = (function (fs$rename) { return function (from, to, cb) { - var start = Date.now() - var backoff = 0; - fs$rename(from, to, function CB (er) { - if (er - && (er.code === "EACCES" || er.code === "EPERM") - && Date.now() - start < 60000) { - setTimeout(function() { - fs.stat(to, function (stater, st) { - if (stater && stater.code === "ENOENT") - fs$rename(from, to, CB); - else - cb(er) - }) - }, backoff) - if (backoff < 100) - backoff += 10; - return; - } - if (cb) cb(er) - }) - }})(fs.rename) +function HookSingular () { + var singularHookName = 'h' + var singularHookState = { + registry: {} } + var singularHook = register.bind(null, singularHookState, singularHookName) + bindApi(singularHook, singularHookState, singularHookName) + return singularHook +} - // if read() returns EAGAIN, then just try it again. - fs.read = (function (fs$read) { - function read (fd, buffer, offset, length, position, callback_) { - var callback - if (callback_ && typeof callback_ === 'function') { - var eagCounter = 0 - callback = function (er, _, __) { - if (er && er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - return fs$read.call(fs, fd, buffer, offset, length, position, callback) - } - callback_.apply(this, arguments) - } - } - return fs$read.call(fs, fd, buffer, offset, length, position, callback) - } +function HookCollection () { + var state = { + registry: {} + } - // This ensures `util.promisify` works as it does for native `fs.read`. - if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read) - return read - })(fs.read) + var hook = register.bind(null, state) + bindApi(hook, state) - fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) { - var eagCounter = 0 - while (true) { - try { - return fs$readSync.call(fs, fd, buffer, offset, length, position) - } catch (er) { - if (er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - continue - } - throw er - } - } - }})(fs.readSync) + return hook +} - function patchLchmod (fs) { - fs.lchmod = function (path, mode, callback) { - fs.open( path - , constants.O_WRONLY | constants.O_SYMLINK - , mode - , function (err, fd) { - if (err) { - if (callback) callback(err) - return - } - // prefer to return the chmod error, if one occurs, - // but still try to close, and report closing errors if they occur. - fs.fchmod(fd, mode, function (err) { - fs.close(fd, function(err2) { - if (callback) callback(err || err2) - }) - }) - }) - } +var collectionHookDeprecationMessageDisplayed = false +function Hook () { + if (!collectionHookDeprecationMessageDisplayed) { + console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') + collectionHookDeprecationMessageDisplayed = true + } + return HookCollection() +} - fs.lchmodSync = function (path, mode) { - var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) +Hook.Singular = HookSingular.bind() +Hook.Collection = HookCollection.bind() - // prefer to return the chmod error, if one occurs, - // but still try to close, and report closing errors if they occur. - var threw = true - var ret - try { - ret = fs.fchmodSync(fd, mode) - threw = false - } finally { - if (threw) { - try { - fs.closeSync(fd) - } catch (er) {} - } else { - fs.closeSync(fd) - } - } - return ret - } - } +module.exports = Hook +// expose constructors as a named property for TypeScript +module.exports.Hook = Hook +module.exports.Singular = Hook.Singular +module.exports.Collection = Hook.Collection - function patchLutimes (fs) { - if (constants.hasOwnProperty("O_SYMLINK")) { - fs.lutimes = function (path, at, mt, cb) { - fs.open(path, constants.O_SYMLINK, function (er, fd) { - if (er) { - if (cb) cb(er) - return - } - fs.futimes(fd, at, mt, function (er) { - fs.close(fd, function (er2) { - if (cb) cb(er || er2) - }) - }) - }) - } - fs.lutimesSync = function (path, at, mt) { - var fd = fs.openSync(path, constants.O_SYMLINK) - var ret - var threw = true - try { - ret = fs.futimesSync(fd, at, mt) - threw = false - } finally { - if (threw) { - try { - fs.closeSync(fd) - } catch (er) {} - } else { - fs.closeSync(fd) - } - } - return ret - } +/***/ }), + +/***/ 1191: +/***/ ((module) => { - } else { - fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } - fs.lutimesSync = function () {} - } - } +module.exports = addHook; - function chmodFix (orig) { - if (!orig) return orig - return function (target, mode, cb) { - return orig.call(fs, target, mode, function (er) { - if (chownErOk(er)) er = null - if (cb) cb.apply(this, arguments) - }) - } +function addHook(state, kind, name, hook) { + var orig = hook; + if (!state.registry[name]) { + state.registry[name] = []; } - function chmodFixSync (orig) { - if (!orig) return orig - return function (target, mode) { - try { - return orig.call(fs, target, mode) - } catch (er) { - if (!chownErOk(er)) throw er - } - } + if (kind === "before") { + hook = function (method, options) { + return Promise.resolve() + .then(orig.bind(null, options)) + .then(method.bind(null, options)); + }; } - - function chownFix (orig) { - if (!orig) return orig - return function (target, uid, gid, cb) { - return orig.call(fs, target, uid, gid, function (er) { - if (chownErOk(er)) er = null - if (cb) cb.apply(this, arguments) - }) - } + if (kind === "after") { + hook = function (method, options) { + var result; + return Promise.resolve() + .then(method.bind(null, options)) + .then(function (result_) { + result = result_; + return orig(result, options); + }) + .then(function () { + return result; + }); + }; } - function chownFixSync (orig) { - if (!orig) return orig - return function (target, uid, gid) { - try { - return orig.call(fs, target, uid, gid) - } catch (er) { - if (!chownErOk(er)) throw er - } - } + if (kind === "error") { + hook = function (method, options) { + return Promise.resolve() + .then(method.bind(null, options)) + .catch(function (error) { + return orig(error, options); + }); + }; } - function statFix (orig) { - if (!orig) return orig - // Older versions of Node erroneously returned signed integers for - // uid + gid. - return function (target, options, cb) { - if (typeof options === 'function') { - cb = options - options = null - } - function callback (er, stats) { - if (stats) { - if (stats.uid < 0) stats.uid += 0x100000000 - if (stats.gid < 0) stats.gid += 0x100000000 - } - if (cb) cb.apply(this, arguments) - } - return options ? orig.call(fs, target, options, callback) - : orig.call(fs, target, callback) - } - } + state.registry[name].push({ + hook: hook, + orig: orig, + }); +} - function statFixSync (orig) { - if (!orig) return orig - // Older versions of Node erroneously returned signed integers for - // uid + gid. - return function (target, options) { - var stats = options ? orig.call(fs, target, options) - : orig.call(fs, target) - if (stats.uid < 0) stats.uid += 0x100000000 - if (stats.gid < 0) stats.gid += 0x100000000 - return stats; - } + +/***/ }), + +/***/ 7694: +/***/ ((module) => { + +module.exports = register; + +function register(state, name, method, options) { + if (typeof method !== "function") { + throw new Error("method for before hook must be a function"); } - // ENOSYS means that the fs doesn't support the op. Just ignore - // that, because it doesn't matter. - // - // if there's no getuid, or if getuid() is something other - // than 0, and the error is EINVAL or EPERM, then just ignore - // it. - // - // This specific case is a silent failure in cp, install, tar, - // and most other unix tools that manage permissions. - // - // When running as root, or if other types of errors are - // encountered, then it's strict. - function chownErOk (er) { - if (!er) - return true + if (!options) { + options = {}; + } - if (er.code === "ENOSYS") - return true + if (Array.isArray(name)) { + return name.reverse().reduce(function (callback, name) { + return register.bind(null, state, name, callback, options); + }, method)(); + } - var nonroot = !process.getuid || process.getuid() !== 0 - if (nonroot) { - if (er.code === "EINVAL" || er.code === "EPERM") - return true + return Promise.resolve().then(function () { + if (!state.registry[name]) { + return method(options); } - return false - } + return state.registry[name].reduce(function (method, registered) { + return registered.hook.bind(null, method, options); + }, method)(); + }); } /***/ }), -/***/ 3287: -/***/ ((__unused_webpack_module, exports) => { +/***/ 6618: +/***/ ((module) => { -"use strict"; +module.exports = removeHook; +function removeHook(state, name, method) { + if (!state.registry[name]) { + return; + } -Object.defineProperty(exports, "__esModule", ({ value: true })); + var index = state.registry[name] + .map(function (registered) { + return registered.orig; + }) + .indexOf(method); -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ + if (index === -1) { + return; + } -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; + state.registry[name].splice(index, 1); } -function isPlainObject(o) { - var ctor,prot; - if (isObject(o) === false) return false; +/***/ }), - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; +/***/ 9440: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; +"use strict"; - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var isPlainObject = __nccwpck_require__(3287); +var universalUserAgent = __nccwpck_require__(5030); + +function lowercaseKeys(object) { + if (!object) { + return {}; } - // Most likely a plain Object - return true; + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); } -exports.isPlainObject = isPlainObject; +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach(key => { + if (isPlainObject.isPlainObject(options[key])) { + if (!(key in defaults)) Object.assign(result, { + [key]: options[key] + });else result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { + [key]: options[key] + }); + } + }); + return result; +} +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === undefined) { + delete obj[key]; + } + } -/***/ }), + return obj; +} -/***/ 7126: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { + method, + url + } : { + url: method + }, options); + } else { + options = Object.assign({}, route); + } // lowercase header names before merging with defaults to avoid duplicates -var fs = __nccwpck_require__(5747) -var core -if (process.platform === 'win32' || global.TESTING_WINDOWS) { - core = __nccwpck_require__(2001) -} else { - core = __nccwpck_require__(9728) -} -module.exports = isexe -isexe.sync = sync + options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging -function isexe (path, options, cb) { - if (typeof options === 'function') { - cb = options - options = {} + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten + + if (defaults && defaults.mediaType.previews.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); } - if (!cb) { - if (typeof Promise !== 'function') { - throw new TypeError('callback not provided') - } + mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); + return mergedOptions; +} + +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); - return new Promise(function (resolve, reject) { - isexe(path, options || {}, function (er, is) { - if (er) { - reject(er) - } else { - resolve(is) - } - }) - }) + if (names.length === 0) { + return url; } - core(path, options || {}, function (er, is) { - // ignore EACCES because that just means we aren't allowed to run it - if (er) { - if (er.code === 'EACCES' || options && options.ignoreErrors) { - er = null - is = false - } + return url + separator + names.map(name => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); } - cb(er, is) - }) -} -function sync (path, options) { - // my kingdom for a filtered catch - try { - return core.sync(path, options || {}) - } catch (er) { - if (options && options.ignoreErrors || er.code === 'EACCES') { - return false - } else { - throw er - } - } + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); } +const urlVariableRegex = /\{[^}]+\}/g; -/***/ }), - -/***/ 9728: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} -module.exports = isexe -isexe.sync = sync +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); -var fs = __nccwpck_require__(5747) + if (!matches) { + return []; + } -function isexe (path, options, cb) { - fs.stat(path, function (er, stat) { - cb(er, er ? false : checkStat(stat, options)) - }) + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); } -function sync (path, options) { - return checkStat(fs.statSync(path), options) +function omit(object, keysToOmit) { + return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { + obj[key] = object[key]; + return obj; + }, {}); } -function checkStat (stat, options) { - return stat.isFile() && checkMode(stat, options) -} +// Based on https://github.com/bramstein/url-template, licensed under BSD +// TODO: create separate package. +// +// Copyright (c) 2012-2014, Bram Stein +// All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// 1. Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// 3. The name of the author may not be used to endorse or promote products +// derived from this software without specific prior written permission. +// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -function checkMode (stat, options) { - var mod = stat.mode - var uid = stat.uid - var gid = stat.gid +/* istanbul ignore file */ +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } - var myUid = options.uid !== undefined ? - options.uid : process.getuid && process.getuid() - var myGid = options.gid !== undefined ? - options.gid : process.getgid && process.getgid() + return part; + }).join(""); +} - var u = parseInt('100', 8) - var g = parseInt('010', 8) - var o = parseInt('001', 8) - var ug = u | g +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} - var ret = (mod & o) || - (mod & g) && gid === myGid || - (mod & u) && uid === myUid || - (mod & ug) && myUid === 0 +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - return ret + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } } +function isDefined(value) { + return value !== undefined && value !== null; +} -/***/ }), +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} -/***/ 2001: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function getValues(context, operator, key, modifier) { + var value = context[key], + result = []; -module.exports = isexe -isexe.sync = sync + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); -var fs = __nccwpck_require__(5747) + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } -function checkPathExt (path, options) { - var pathext = options.pathExt !== undefined ? - options.pathExt : process.env.PATHEXT + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; - if (!pathext) { - return true - } + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + tmp.push(encodeValue(operator, value)); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } - pathext = pathext.split(';') - if (pathext.indexOf('') !== -1) { - return true - } - for (var i = 0; i < pathext.length; i++) { - var p = pathext[i].toLowerCase() - if (p && path.substr(-p.length).toLowerCase() === p) { - return true + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); } } - return false -} -function checkStat (stat, path, options) { - if (!stat.isSymbolicLink() && !stat.isFile()) { - return false - } - return checkPathExt(path, options) + return result; } -function isexe (path, options, cb) { - fs.stat(path, function (er, stat) { - cb(er, er ? false : checkStat(stat, path, options)) - }) +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; } -function sync (path, options) { - return checkStat(fs.statSync(path), path, options) -} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } -/***/ }), + expression.split(/,/g).forEach(function (variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); -/***/ 1917: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (operator && operator !== "+") { + var separator = ","; -"use strict"; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); + } + }); +} +function parse(options) { + // https://fetch.spec.whatwg.org/#methods + let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible -var loader = __nccwpck_require__(1161); -var dumper = __nccwpck_require__(8866); + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); -function renamed(from, to) { - return function () { - throw new Error('Function yaml.' + from + ' is removed in js-yaml 4. ' + - 'Use yaml.' + to + ' instead, which is now safe by default.'); - }; -} + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); -module.exports.Type = __nccwpck_require__(6073); -module.exports.Schema = __nccwpck_require__(1082); -module.exports.FAILSAFE_SCHEMA = __nccwpck_require__(8562); -module.exports.JSON_SCHEMA = __nccwpck_require__(1035); -module.exports.CORE_SCHEMA = __nccwpck_require__(2011); -module.exports.DEFAULT_SCHEMA = __nccwpck_require__(8759); -module.exports.load = loader.load; -module.exports.loadAll = loader.loadAll; -module.exports.dump = dumper.dump; -module.exports.YAMLException = __nccwpck_require__(8179); + if (!isBinaryRequest) { + if (options.mediaType.format) { + // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw + headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); + } -// Re-export all types in case user wants to create custom schema -module.exports.types = { - binary: __nccwpck_require__(7900), - float: __nccwpck_require__(2705), - map: __nccwpck_require__(6150), - null: __nccwpck_require__(721), - pairs: __nccwpck_require__(6860), - set: __nccwpck_require__(9548), - timestamp: __nccwpck_require__(9212), - bool: __nccwpck_require__(4993), - int: __nccwpck_require__(1615), - merge: __nccwpck_require__(6104), - omap: __nccwpck_require__(9046), - seq: __nccwpck_require__(7283), - str: __nccwpck_require__(3619) -}; + if (options.mediaType.previews.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } // for GET/HEAD requests, set URL query parameters from remaining parameters + // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters -// Removed functions from JS-YAML 3.0.x -module.exports.safeLoad = renamed('safeLoad', 'load'); -module.exports.safeLoadAll = renamed('safeLoadAll', 'loadAll'); -module.exports.safeDump = renamed('safeDump', 'dump'); + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } else { + headers["content-length"] = 0; + } + } + } // default content-type for JSON if body is set -/***/ }), -/***/ 6829: -/***/ ((module) => { + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. + // fetch does not allow to set `content-length` header, but we can set body to an empty string -"use strict"; + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } // Only return body/request keys if present -function isNothing(subject) { - return (typeof subject === 'undefined') || (subject === null); + return Object.assign({ + method, + url, + headers + }, typeof body !== "undefined" ? { + body + } : null, options.request ? { + request: options.request + } : null); } +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} -function isObject(subject) { - return (typeof subject === 'object') && (subject !== null); +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS = merge(oldDefaults, newDefaults); + const endpoint = endpointWithDefaults.bind(null, DEFAULTS); + return Object.assign(endpoint, { + DEFAULTS, + defaults: withDefaults.bind(null, DEFAULTS), + merge: merge.bind(null, DEFAULTS), + parse + }); } +const VERSION = "6.0.10"; -function toArray(sequence) { - if (Array.isArray(sequence)) return sequence; - else if (isNothing(sequence)) return []; +const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. +// So we use RequestParameters and add method as additional required property. - return [ sequence ]; -} +const DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "", + previews: [] + } +}; +const endpoint = withDefaults(null, DEFAULTS); -function extend(target, source) { - var index, length, key, sourceKeys; +exports.endpoint = endpoint; +//# sourceMappingURL=index.js.map - if (source) { - sourceKeys = Object.keys(source); - for (index = 0, length = sourceKeys.length; index < length; index += 1) { - key = sourceKeys[index]; - target[key] = source[key]; - } - } +/***/ }), - return target; -} +/***/ 8467: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +"use strict"; -function repeat(string, count) { - var result = '', cycle; - for (cycle = 0; cycle < count; cycle += 1) { - result += string; - } +Object.defineProperty(exports, "__esModule", ({ value: true })); - return result; -} +var request = __nccwpck_require__(6234); +var universalUserAgent = __nccwpck_require__(5030); +const VERSION = "4.5.8"; + +class GraphqlError extends Error { + constructor(request, response) { + const message = response.data.errors[0].message; + super(message); + Object.assign(this, response.data); + Object.assign(this, { + headers: response.headers + }); + this.name = "GraphqlError"; + this.request = request; // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } -function isNegativeZero(number) { - return (number === 0) && (Number.NEGATIVE_INFINITY === 1 / number); } +const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; +const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request, query, options) { + if (typeof query === "string" && options && "query" in options) { + return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); + } -module.exports.isNothing = isNothing; -module.exports.isObject = isObject; -module.exports.toArray = toArray; -module.exports.repeat = repeat; -module.exports.isNegativeZero = isNegativeZero; -module.exports.extend = extend; + const parsedOptions = typeof query === "string" ? Object.assign({ + query + }, options) : query; + const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } -/***/ }), + result.variables[key] = parsedOptions[key]; + return result; + }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix + // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 -/***/ 8866: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; -"use strict"; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + return request(requestOptions).then(response => { + if (response.data.errors) { + const headers = {}; -/*eslint-disable no-use-before-define*/ + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } -var common = __nccwpck_require__(6829); -var YAMLException = __nccwpck_require__(8179); -var DEFAULT_SCHEMA = __nccwpck_require__(8759); + throw new GraphqlError(requestOptions, { + headers, + data: response.data + }); + } -var _toString = Object.prototype.toString; -var _hasOwnProperty = Object.prototype.hasOwnProperty; + return response.data.data; + }); +} -var CHAR_BOM = 0xFEFF; -var CHAR_TAB = 0x09; /* Tab */ -var CHAR_LINE_FEED = 0x0A; /* LF */ -var CHAR_CARRIAGE_RETURN = 0x0D; /* CR */ -var CHAR_SPACE = 0x20; /* Space */ -var CHAR_EXCLAMATION = 0x21; /* ! */ -var CHAR_DOUBLE_QUOTE = 0x22; /* " */ -var CHAR_SHARP = 0x23; /* # */ -var CHAR_PERCENT = 0x25; /* % */ -var CHAR_AMPERSAND = 0x26; /* & */ -var CHAR_SINGLE_QUOTE = 0x27; /* ' */ -var CHAR_ASTERISK = 0x2A; /* * */ -var CHAR_COMMA = 0x2C; /* , */ -var CHAR_MINUS = 0x2D; /* - */ -var CHAR_COLON = 0x3A; /* : */ -var CHAR_EQUALS = 0x3D; /* = */ -var CHAR_GREATER_THAN = 0x3E; /* > */ -var CHAR_QUESTION = 0x3F; /* ? */ -var CHAR_COMMERCIAL_AT = 0x40; /* @ */ -var CHAR_LEFT_SQUARE_BRACKET = 0x5B; /* [ */ -var CHAR_RIGHT_SQUARE_BRACKET = 0x5D; /* ] */ -var CHAR_GRAVE_ACCENT = 0x60; /* ` */ -var CHAR_LEFT_CURLY_BRACKET = 0x7B; /* { */ -var CHAR_VERTICAL_LINE = 0x7C; /* | */ -var CHAR_RIGHT_CURLY_BRACKET = 0x7D; /* } */ +function withDefaults(request$1, newDefaults) { + const newRequest = request$1.defaults(newDefaults); -var ESCAPE_SEQUENCES = {}; + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; -ESCAPE_SEQUENCES[0x00] = '\\0'; -ESCAPE_SEQUENCES[0x07] = '\\a'; -ESCAPE_SEQUENCES[0x08] = '\\b'; -ESCAPE_SEQUENCES[0x09] = '\\t'; -ESCAPE_SEQUENCES[0x0A] = '\\n'; -ESCAPE_SEQUENCES[0x0B] = '\\v'; -ESCAPE_SEQUENCES[0x0C] = '\\f'; -ESCAPE_SEQUENCES[0x0D] = '\\r'; -ESCAPE_SEQUENCES[0x1B] = '\\e'; -ESCAPE_SEQUENCES[0x22] = '\\"'; -ESCAPE_SEQUENCES[0x5C] = '\\\\'; -ESCAPE_SEQUENCES[0x85] = '\\N'; -ESCAPE_SEQUENCES[0xA0] = '\\_'; -ESCAPE_SEQUENCES[0x2028] = '\\L'; -ESCAPE_SEQUENCES[0x2029] = '\\P'; + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: request.request.endpoint + }); +} -var DEPRECATED_BOOLEANS_SYNTAX = [ - 'y', 'Y', 'yes', 'Yes', 'YES', 'on', 'On', 'ON', - 'n', 'N', 'no', 'No', 'NO', 'off', 'Off', 'OFF' -]; +const graphql$1 = withDefaults(request.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} -var DEPRECATED_BASE60_SYNTAX = /^[-+]?[0-9_]+(?::[0-9_]+)+(?:\.[0-9_]*)?$/; +exports.graphql = graphql$1; +exports.withCustomRequest = withCustomRequest; +//# sourceMappingURL=index.js.map -function compileStyleMap(schema, map) { - var result, keys, index, length, tag, style, type; - if (map === null) return {}; +/***/ }), - result = {}; - keys = Object.keys(map); +/***/ 4193: +/***/ ((__unused_webpack_module, exports) => { - for (index = 0, length = keys.length; index < length; index += 1) { - tag = keys[index]; - style = String(map[tag]); +"use strict"; - if (tag.slice(0, 2) === '!!') { - tag = 'tag:yaml.org,2002:' + tag.slice(2); - } - type = schema.compiledTypeMap['fallback'][tag]; - if (type && _hasOwnProperty.call(type.styleAliases, style)) { - style = type.styleAliases[style]; - } +Object.defineProperty(exports, "__esModule", ({ value: true })); - result[tag] = style; - } +const VERSION = "2.13.3"; - return result; -} +/** + * Some “list” response that can be paginated have a different response structure + * + * They have a `total_count` key in the response (search also has `incomplete_results`, + * /installation/repositories also has `repository_selection`), as well as a key with + * the list of the items which name varies from endpoint to endpoint. + * + * Octokit normalizes these responses so that paginated results are always returned following + * the same structure. One challenge is that if the list response has only one page, no Link + * header is provided, so this header alone is not sufficient to check wether a response is + * paginated or not. + * + * We check if a "total_count" key is present in the response data, but also make sure that + * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would + * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref + */ +function normalizePaginatedListResponse(response) { + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way + // to retrieve the same information. -function encodeHex(character) { - var string, handle, length; + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; - string = character.toString(16).toUpperCase(); + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } - if (character <= 0xFF) { - handle = 'x'; - length = 2; - } else if (character <= 0xFFFF) { - handle = 'u'; - length = 4; - } else if (character <= 0xFFFFFFFF) { - handle = 'U'; - length = 8; - } else { - throw new YAMLException('code point within a string may not be greater than 0xFFFFFFFF'); + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; } - return '\\' + handle + common.repeat('0', length - string.length) + string; + response.data.total_count = totalCount; + return response; } +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) return { + done: true + }; + const response = await requestMethod({ + method, + url, + headers + }); + const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: + // '; rel="next", ; rel="last"' + // sets `url` to undefined if "next" URL is not present or `link` header is not set -var QUOTING_TYPE_SINGLE = 1, - QUOTING_TYPE_DOUBLE = 2; - -function State(options) { - this.schema = options['schema'] || DEFAULT_SCHEMA; - this.indent = Math.max(1, (options['indent'] || 2)); - this.noArrayIndent = options['noArrayIndent'] || false; - this.skipInvalid = options['skipInvalid'] || false; - this.flowLevel = (common.isNothing(options['flowLevel']) ? -1 : options['flowLevel']); - this.styleMap = compileStyleMap(this.schema, options['styles'] || null); - this.sortKeys = options['sortKeys'] || false; - this.lineWidth = options['lineWidth'] || 80; - this.noRefs = options['noRefs'] || false; - this.noCompatMode = options['noCompatMode'] || false; - this.condenseFlow = options['condenseFlow'] || false; - this.quotingType = options['quotingType'] === '"' ? QUOTING_TYPE_DOUBLE : QUOTING_TYPE_SINGLE; - this.forceQuotes = options['forceQuotes'] || false; - this.replacer = typeof options['replacer'] === 'function' ? options['replacer'] : null; - - this.implicitTypes = this.schema.compiledImplicit; - this.explicitTypes = this.schema.compiledExplicit; - - this.tag = null; - this.result = ''; + url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; + return { + value: normalizedResponse + }; + } - this.duplicates = []; - this.usedDuplicates = null; + }) + }; } -// Indents every line in a string. Empty lines (\n only) are not indented. -function indentString(string, spaces) { - var ind = common.repeat(' ', spaces), - position = 0, - next = -1, - result = '', - line, - length = string.length; - - while (position < length) { - next = string.indexOf('\n', position); - if (next === -1) { - line = string.slice(position); - position = length; - } else { - line = string.slice(position, next + 1); - position = next + 1; - } - - if (line.length && line !== '\n') result += ind; - - result += line; +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = undefined; } - return result; -} - -function generateNextLine(state, level) { - return '\n' + common.repeat(' ', state.indent * level); + return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); } -function testImplicitResolving(state, str) { - var index, length, type; +function gather(octokit, results, iterator, mapFn) { + return iterator.next().then(result => { + if (result.done) { + return results; + } - for (index = 0, length = state.implicitTypes.length; index < length; index += 1) { - type = state.implicitTypes[index]; + let earlyExit = false; - if (type.resolve(str)) { - return true; + function done() { + earlyExit = true; } - } - return false; -} + results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); -// [33] s-white ::= s-space | s-tab -function isWhitespace(c) { - return c === CHAR_SPACE || c === CHAR_TAB; -} + if (earlyExit) { + return results; + } -// Returns true if the character can be printed without escaping. -// From YAML 1.2: "any allowed characters known to be non-printable -// should also be escaped. [However,] This isn’t mandatory" -// Derived from nb-char - \t - #x85 - #xA0 - #x2028 - #x2029. -function isPrintable(c) { - return (0x00020 <= c && c <= 0x00007E) - || ((0x000A1 <= c && c <= 0x00D7FF) && c !== 0x2028 && c !== 0x2029) - || ((0x0E000 <= c && c <= 0x00FFFD) && c !== CHAR_BOM) - || (0x10000 <= c && c <= 0x10FFFF); + return gather(octokit, results, iterator, mapFn); + }); } -// [34] ns-char ::= nb-char - s-white -// [27] nb-char ::= c-printable - b-char - c-byte-order-mark -// [26] b-char ::= b-line-feed | b-carriage-return -// Including s-white (for some reason, examples doesn't match specs in this aspect) -// ns-char ::= c-printable - b-line-feed - b-carriage-return - c-byte-order-mark -function isNsCharOrWhitespace(c) { - return isPrintable(c) - && c !== CHAR_BOM - // - b-char - && c !== CHAR_CARRIAGE_RETURN - && c !== CHAR_LINE_FEED; -} +const composePaginateRest = Object.assign(paginate, { + iterator +}); -// [127] ns-plain-safe(c) ::= c = flow-out ⇒ ns-plain-safe-out -// c = flow-in ⇒ ns-plain-safe-in -// c = block-key ⇒ ns-plain-safe-out -// c = flow-key ⇒ ns-plain-safe-in -// [128] ns-plain-safe-out ::= ns-char -// [129] ns-plain-safe-in ::= ns-char - c-flow-indicator -// [130] ns-plain-char(c) ::= ( ns-plain-safe(c) - “:” - “#” ) -// | ( /* An ns-char preceding */ “#” ) -// | ( “:” /* Followed by an ns-plain-safe(c) */ ) -function isPlainSafe(c, prev, inblock) { - var cIsNsCharOrWhitespace = isNsCharOrWhitespace(c); - var cIsNsChar = cIsNsCharOrWhitespace && !isWhitespace(c); - return ( - // ns-plain-safe - inblock ? // c = flow-in - cIsNsCharOrWhitespace - : cIsNsCharOrWhitespace - // - c-flow-indicator - && c !== CHAR_COMMA - && c !== CHAR_LEFT_SQUARE_BRACKET - && c !== CHAR_RIGHT_SQUARE_BRACKET - && c !== CHAR_LEFT_CURLY_BRACKET - && c !== CHAR_RIGHT_CURLY_BRACKET - ) - // ns-plain-char - && c !== CHAR_SHARP // false on '#' - && !(prev === CHAR_COLON && !cIsNsChar) // false on ': ' - || (isNsCharOrWhitespace(prev) && !isWhitespace(prev) && c === CHAR_SHARP) // change to true on '[^ ]#' - || (prev === CHAR_COLON && cIsNsChar); // change to true on ':[^ ]' +const paginatingEndpoints = ["GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/actions/runners/downloads", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/runners/downloads", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/blocks", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/events", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runners/downloads", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /scim/v2/enterprises/{enterprise}/Groups", "GET /scim/v2/enterprises/{enterprise}/Users", "GET /scim/v2/organizations/{org}/Users", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/team-sync/group-mappings", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; + +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; + } } -// Simplified test for values allowed as the first character in plain style. -function isPlainSafeFirst(c) { - // Uses a subset of ns-char - c-indicator - // where ns-char = nb-char - s-white. - // No support of ( ( “?” | “:” | “-” ) /* Followed by an ns-plain-safe(c)) */ ) part - return isPrintable(c) && c !== CHAR_BOM - && !isWhitespace(c) // - s-white - // - (c-indicator ::= - // “-” | “?” | “:” | “,” | “[” | “]” | “{” | “}” - && c !== CHAR_MINUS - && c !== CHAR_QUESTION - && c !== CHAR_COLON - && c !== CHAR_COMMA - && c !== CHAR_LEFT_SQUARE_BRACKET - && c !== CHAR_RIGHT_SQUARE_BRACKET - && c !== CHAR_LEFT_CURLY_BRACKET - && c !== CHAR_RIGHT_CURLY_BRACKET - // | “#” | “&” | “*” | “!” | “|” | “=” | “>” | “'” | “"” - && c !== CHAR_SHARP - && c !== CHAR_AMPERSAND - && c !== CHAR_ASTERISK - && c !== CHAR_EXCLAMATION - && c !== CHAR_VERTICAL_LINE - && c !== CHAR_EQUALS - && c !== CHAR_GREATER_THAN - && c !== CHAR_SINGLE_QUOTE - && c !== CHAR_DOUBLE_QUOTE - // | “%” | “@” | “`”) - && c !== CHAR_PERCENT - && c !== CHAR_COMMERCIAL_AT - && c !== CHAR_GRAVE_ACCENT; +/** + * @param octokit Octokit instance + * @param options Options passed to Octokit constructor + */ + +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; } +paginateRest.VERSION = VERSION; -// Simplified test for values allowed as the last character in plain style. -function isPlainSafeLast(c) { - // just not whitespace or colon, it will be checked to be plain character later - return !isWhitespace(c) && c !== CHAR_COLON; +exports.composePaginateRest = composePaginateRest; +exports.isPaginatingEndpoint = isPaginatingEndpoint; +exports.paginateRest = paginateRest; +exports.paginatingEndpoints = paginatingEndpoints; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 3044: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); + + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + + if (enumerableOnly) { + symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + }); + } + + keys.push.apply(keys, symbols); + } + + return keys; } -// Same as 'string'.codePointAt(pos), but works in older browsers. -function codePointAt(string, pos) { - var first = string.charCodeAt(pos), second; - if (first >= 0xD800 && first <= 0xDBFF && pos + 1 < string.length) { - second = string.charCodeAt(pos + 1); - if (second >= 0xDC00 && second <= 0xDFFF) { - // https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae - return (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000; +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] != null ? arguments[i] : {}; + + if (i % 2) { + ownKeys(Object(source), true).forEach(function (key) { + _defineProperty(target, key, source[key]); + }); + } else if (Object.getOwnPropertyDescriptors) { + Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); + } else { + ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); } } - return first; + + return target; } -// Determines whether block indentation indicator is required. -function needIndentIndicator(string) { - var leadingSpaceRe = /^\n* /; - return leadingSpaceRe.test(string); +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; } -var STYLE_PLAIN = 1, - STYLE_SINGLE = 2, - STYLE_LITERAL = 3, - STYLE_FOLDED = 4, - STYLE_DOUBLE = 5; +const Endpoints = { + actions: { + addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"], + cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], + createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], + createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], + createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], + deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], + deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"], + disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"], + downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], + downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], + downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], + enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], + getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"], + getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"], + getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"], + getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], + getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, { + renamed: ["actions", "getGithubActionsPermissionsRepository"] + }], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], + getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"], + listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], + listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], + listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], + listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], + setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"], + setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"], + setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"], + setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"], + setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"], + setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], + checkToken: ["POST /applications/{client_id}/token"], + createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", { + mediaType: { + previews: ["corsair"] + } + }], + createContentAttachmentForRepo: ["POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments", { + mediaType: { + previews: ["corsair"] + } + }], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], + getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], + listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], + removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"], + updateWebhookConfigForApp: ["PATCH /app/hook/config"] + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], + getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], + getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"], + setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] + }, + codeScanning: { + deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"], + getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, { + renamedParameters: { + alert_id: "alert_number" + } + }], + getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, { + renamed: ["codeScanning", "listAlertInstances"] + }], + listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], + updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"], + uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct", { + mediaType: { + previews: ["scarlet-witch"] + } + }], + getConductCode: ["GET /codes_of_conduct/{key}", { + mediaType: { + previews: ["scarlet-witch"] + } + }], + getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", { + mediaType: { + previews: ["scarlet-witch"] + } + }] + }, + emojis: { + get: ["GET /emojis"] + }, + enterpriseAdmin: { + disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], + enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], + getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"], + getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"], + listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"], + setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"], + setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"], + setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"] + }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, { + renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] + }], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"], + removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, { + renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] + }], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, { + renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] + }] + }, + issues: { + addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", { + mediaType: { + previews: ["mockingbird"] + } + }], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], + removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: ["POST /markdown/raw", { + headers: { + "content-type": "text/plain; charset=utf-8" + } + }] + }, + meta: { + get: ["GET /meta"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"] + }, + migrations: { + cancelImport: ["DELETE /repos/{owner}/{repo}/import"], + deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], + getImportStatus: ["GET /repos/{owner}/{repo}/import"], + getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", { + mediaType: { + previews: ["wyandotte"] + } + }], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", { + mediaType: { + previews: ["wyandotte"] + } + }], + listForAuthenticatedUser: ["GET /user/migrations", { + mediaType: { + previews: ["wyandotte"] + } + }], + listForOrg: ["GET /orgs/{org}/migrations", { + mediaType: { + previews: ["wyandotte"] + } + }], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", { + mediaType: { + previews: ["wyandotte"] + } + }], + listReposForUser: ["GET /user/migrations/{migration_id}/repositories", { + mediaType: { + previews: ["wyandotte"] + } + }], + mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], + setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: ["PUT /repos/{owner}/{repo}/import"], + unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", { + mediaType: { + previews: ["wyandotte"] + } + }], + unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", { + mediaType: { + previews: ["wyandotte"] + } + }], + updateImport: ["PATCH /repos/{owner}/{repo}/import"] + }, + orgs: { + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], + createInvitation: ["POST /orgs/{org}/invitations"], + createWebhook: ["POST /orgs/{org}/hooks"], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + get: ["GET /orgs/{org}"], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations"], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listWebhooks: ["GET /orgs/{org}/hooks"], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], + removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] + }, + packages: { + deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"], + deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"], + deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], + deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, { + renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] + }], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, { + renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"] + }], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"], + getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"], + getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"], + getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"], + getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"], + getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"], + getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], + getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"], + restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"], + restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], + restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", { + mediaType: { + previews: ["inertia"] + } + }], + createCard: ["POST /projects/columns/{column_id}/cards", { + mediaType: { + previews: ["inertia"] + } + }], + createColumn: ["POST /projects/{project_id}/columns", { + mediaType: { + previews: ["inertia"] + } + }], + createForAuthenticatedUser: ["POST /user/projects", { + mediaType: { + previews: ["inertia"] + } + }], + createForOrg: ["POST /orgs/{org}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + createForRepo: ["POST /repos/{owner}/{repo}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + delete: ["DELETE /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + deleteCard: ["DELETE /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + deleteColumn: ["DELETE /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }], + get: ["GET /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getCard: ["GET /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getColumn: ["GET /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", { + mediaType: { + previews: ["inertia"] + } + }], + listCards: ["GET /projects/columns/{column_id}/cards", { + mediaType: { + previews: ["inertia"] + } + }], + listCollaborators: ["GET /projects/{project_id}/collaborators", { + mediaType: { + previews: ["inertia"] + } + }], + listColumns: ["GET /projects/{project_id}/columns", { + mediaType: { + previews: ["inertia"] + } + }], + listForOrg: ["GET /orgs/{org}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listForRepo: ["GET /repos/{owner}/{repo}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listForUser: ["GET /users/{username}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + moveCard: ["POST /projects/columns/cards/{card_id}/moves", { + mediaType: { + previews: ["inertia"] + } + }], + moveColumn: ["POST /projects/columns/{column_id}/moves", { + mediaType: { + previews: ["inertia"] + } + }], + removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", { + mediaType: { + previews: ["inertia"] + } + }], + update: ["PATCH /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + updateCard: ["PATCH /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + updateColumn: ["PATCH /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", { + mediaType: { + previews: ["lydian"] + } + }], + updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] + }, + rateLimit: { + get: ["GET /rate_limit"] + }, + reactions: { + createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForRelease: ["POST /repos/{owner}/{repo}/releases/{release_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteLegacy: ["DELETE /reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }, { + deprecated: "octokit.rest.reactions.deleteLegacy() is deprecated, see https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy" + }], + listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }] + }, + repos: { + acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], + addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"], + createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", { + mediaType: { + previews: ["baptiste"] + } + }], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], + deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"], + deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }], + deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", { + mediaType: { + previews: ["london"] + } + }], + disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, { + renamed: ["repos", "downloadZipballArchive"] + }], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", { + mediaType: { + previews: ["london"] + } + }], + enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], + getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], + getAllTopics: ["GET /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }], + getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], + getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", { + mediaType: { + previews: ["groot"] + } + }], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", { + mediaType: { + previews: ["groot"] + } + }], + listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], + removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], + updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], + updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { + renamed: ["repos", "updateStatusCheckProtection"] + }], + updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"], + uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { + baseUrl: "https://uploads.github.com" + }] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits", { + mediaType: { + previews: ["cloak"] + } + }], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics", { + mediaType: { + previews: ["mercy"] + } + }], + users: ["GET /search/users"] + }, + secretScanning: { + getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"] + }, + teams: { + addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], + addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], + removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], + removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: ["POST /user/emails"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: ["POST /user/keys"], + deleteEmailForAuthenticated: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], + list: ["GET /users"], + listBlockedByAuthenticated: ["GET /user/blocks"], + listEmailsForAuthenticated: ["GET /user/emails"], + listFollowedByAuthenticated: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: ["GET /user/keys"], + setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] + } +}; -// Determines which scalar styles are possible and returns the preferred style. -// lineWidth = -1 => no limit. -// Pre-conditions: str.length > 0. -// Post-conditions: -// STYLE_PLAIN or STYLE_SINGLE => no \n are in the string. -// STYLE_LITERAL => no lines are suitable for folding (or lineWidth is -1). -// STYLE_FOLDED => a line > lineWidth and can be folded (and lineWidth != -1). -function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth, - testAmbiguousType, quotingType, forceQuotes, inblock) { +const VERSION = "5.3.1"; - var i; - var char = 0; - var prevChar = null; - var hasLineBreak = false; - var hasFoldableLine = false; // only checked if shouldTrackWidth - var shouldTrackWidth = lineWidth !== -1; - var previousLineBreak = -1; // count the first line correctly - var plain = isPlainSafeFirst(codePointAt(string, 0)) - && isPlainSafeLast(codePointAt(string, string.length - 1)); +function endpointsToMethods(octokit, endpointsMap) { + const newMethods = {}; - if (singleLineOnly || forceQuotes) { - // Case: no block styles. - // Check for disallowed characters to rule out plain and single. - for (i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) { - char = codePointAt(string, i); - if (!isPrintable(char)) { - return STYLE_DOUBLE; + for (const [scope, endpoints] of Object.entries(endpointsMap)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign({ + method, + url + }, defaults); + + if (!newMethods[scope]) { + newMethods[scope] = {}; } - plain = plain && isPlainSafe(char, prevChar, inblock); - prevChar = char; - } - } else { - // Case: block styles permitted. - for (i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) { - char = codePointAt(string, i); - if (char === CHAR_LINE_FEED) { - hasLineBreak = true; - // Check if any line can be folded. - if (shouldTrackWidth) { - hasFoldableLine = hasFoldableLine || - // Foldable line = too long, and not more-indented. - (i - previousLineBreak - 1 > lineWidth && - string[previousLineBreak + 1] !== ' '); - previousLineBreak = i; - } - } else if (!isPrintable(char)) { - return STYLE_DOUBLE; + + const scopeMethods = newMethods[scope]; + + if (decorations) { + scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); + continue; } - plain = plain && isPlainSafe(char, prevChar, inblock); - prevChar = char; - } - // in case the end is missing a \n - hasFoldableLine = hasFoldableLine || (shouldTrackWidth && - (i - previousLineBreak - 1 > lineWidth && - string[previousLineBreak + 1] !== ' ')); - } - // Although every style can represent \n without escaping, prefer block styles - // for multiline, since they're more readable and they don't add empty lines. - // Also prefer folding a super-long line. - if (!hasLineBreak && !hasFoldableLine) { - // Strings interpretable as another type have to be quoted; - // e.g. the string 'true' vs. the boolean true. - if (plain && !forceQuotes && !testAmbiguousType(string)) { - return STYLE_PLAIN; + + scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); } - return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE; - } - // Edge case: block indentation indicator can only have one digit. - if (indentPerLevel > 9 && needIndentIndicator(string)) { - return STYLE_DOUBLE; - } - // At this point we know block styles are valid. - // Prefer literal style unless we want to fold. - if (!forceQuotes) { - return hasFoldableLine ? STYLE_FOLDED : STYLE_LITERAL; } - return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE; + + return newMethods; } -// Note: line breaking/folding is implemented for only the folded style. -// NB. We drop the last trailing newline (if any) of a returned block scalar -// since the dumper adds its own newline. This always works: -// • No ending newline => unaffected; already using strip "-" chomping. -// • Ending newline => removed then restored. -// Importantly, this keeps the "+" chomp indicator from gaining an extra line. -function writeScalar(state, string, level, iskey, inblock) { - state.dump = (function () { - if (string.length === 0) { - return state.quotingType === QUOTING_TYPE_DOUBLE ? '""' : "''"; - } - if (!state.noCompatMode) { - if (DEPRECATED_BOOLEANS_SYNTAX.indexOf(string) !== -1 || DEPRECATED_BASE60_SYNTAX.test(string)) { - return state.quotingType === QUOTING_TYPE_DOUBLE ? ('"' + string + '"') : ("'" + string + "'"); - } - } +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + /* istanbul ignore next */ - var indent = state.indent * Math.max(1, level); // no 0-indent scalars - // As indentation gets deeper, let the width decrease monotonically - // to the lower bound min(state.lineWidth, 40). - // Note that this implies - // state.lineWidth ≤ 40 + state.indent: width is fixed at the lower bound. - // state.lineWidth > 40 + state.indent: width decreases until the lower bound. - // This behaves better than a constant minimum width which disallows narrower options, - // or an indent threshold which causes the width to suddenly increase. - var lineWidth = state.lineWidth === -1 - ? -1 : Math.max(Math.min(state.lineWidth, 40), state.lineWidth - indent); + function withDecorations(...args) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` - // Without knowing if keys are implicit/explicit, assume implicit for safety. - var singleLineOnly = iskey - // No block styles in flow mode. - || (state.flowLevel > -1 && level >= state.flowLevel); - function testAmbiguity(string) { - return testImplicitResolving(state, string); + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: undefined + }); + return requestWithDefaults(options); } - switch (chooseScalarStyle(string, singleLineOnly, state.indent, lineWidth, - testAmbiguity, state.quotingType, state.forceQuotes && !iskey, inblock)) { + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); + } - case STYLE_PLAIN: - return string; - case STYLE_SINGLE: - return "'" + string.replace(/'/g, "''") + "'"; - case STYLE_LITERAL: - return '|' + blockHeader(string, state.indent) - + dropEndingNewline(indentString(string, indent)); - case STYLE_FOLDED: - return '>' + blockHeader(string, state.indent) - + dropEndingNewline(indentString(foldString(string, lineWidth), indent)); - case STYLE_DOUBLE: - return '"' + escapeString(string, lineWidth) + '"'; - default: - throw new YAMLException('impossible error: invalid scalar style'); + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); } - }()); -} -// Pre-conditions: string is valid for a block scalar, 1 <= indentPerLevel <= 9. -function blockHeader(string, indentPerLevel) { - var indentIndicator = needIndentIndicator(string) ? String(indentPerLevel) : ''; + if (decorations.renamedParameters) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + const options = requestWithDefaults.endpoint.merge(...args); + + for (const [name, alias] of Object.entries(decorations.renamedParameters)) { + if (name in options) { + octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); + + if (!(alias in options)) { + options[alias] = options[name]; + } + + delete options[name]; + } + } - // note the special case: the string '\n' counts as a "trailing" empty line. - var clip = string[string.length - 1] === '\n'; - var keep = clip && (string[string.length - 2] === '\n' || string === '\n'); - var chomp = keep ? '+' : (clip ? '' : '-'); + return requestWithDefaults(options); + } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - return indentIndicator + chomp + '\n'; + + return requestWithDefaults(...args); + } + + return Object.assign(withDecorations, requestWithDefaults); } -// (See the note for writeScalar.) -function dropEndingNewline(string) { - return string[string.length - 1] === '\n' ? string.slice(0, -1) : string; +function restEndpointMethods(octokit) { + const api = endpointsToMethods(octokit, Endpoints); + return { + rest: api + }; } +restEndpointMethods.VERSION = VERSION; +function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit, Endpoints); + return _objectSpread2(_objectSpread2({}, api), {}, { + rest: api + }); +} +legacyRestEndpointMethods.VERSION = VERSION; -// Note: a long line without a suitable break point will exceed the width limit. -// Pre-conditions: every char in str isPrintable, str.length > 0, width > 0. -function foldString(string, width) { - // In folded style, $k$ consecutive newlines output as $k+1$ newlines— - // unless they're before or after a more-indented line, or at the very - // beginning or end, in which case $k$ maps to $k$. - // Therefore, parse each chunk as newline(s) followed by a content line. - var lineRe = /(\n+)([^\n]*)/g; +exports.legacyRestEndpointMethods = legacyRestEndpointMethods; +exports.restEndpointMethods = restEndpointMethods; +//# sourceMappingURL=index.js.map - // first line (possibly an empty line) - var result = (function () { - var nextLF = string.indexOf('\n'); - nextLF = nextLF !== -1 ? nextLF : string.length; - lineRe.lastIndex = nextLF; - return foldLine(string.slice(0, nextLF), width); - }()); - // If we haven't reached the first content line yet, don't add an extra \n. - var prevMoreIndented = string[0] === '\n' || string[0] === ' '; - var moreIndented; - // rest of the lines - var match; - while ((match = lineRe.exec(string))) { - var prefix = match[1], line = match[2]; - moreIndented = (line[0] === ' '); - result += prefix - + (!prevMoreIndented && !moreIndented && line !== '' - ? '\n' : '') - + foldLine(line, width); - prevMoreIndented = moreIndented; - } +/***/ }), - return result; -} +/***/ 9968: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -// Greedy line breaking. -// Picks the longest line under the limit each time, -// otherwise settles for the shortest line over the limit. -// NB. More-indented lines *cannot* be folded, as that would add an extra \n. -function foldLine(line, width) { - if (line === '' || line[0] === ' ') return line; +"use strict"; - // Since a more-indented line adds a \n, breaks can't be followed by a space. - var breakRe = / [^ ]/g; // note: the match index will always be <= length-2. - var match; - // start is an inclusive index. end, curr, and next are exclusive. - var start = 0, end, curr = 0, next = 0; - var result = ''; - // Invariants: 0 <= start <= length-1. - // 0 <= curr <= next <= max(0, length-2). curr - start <= width. - // Inside the loop: - // A match implies length >= 2, so curr and next are <= length-2. - while ((match = breakRe.exec(line))) { - next = match.index; - // maintain invariant: curr - start <= width - if (next - start > width) { - end = (curr > start) ? curr : next; // derive end <= length-2 - result += '\n' + line.slice(start, end); - // skip the space that was output as \n - start = end + 1; // derive start <= length-1 - } - curr = next; - } +Object.defineProperty(exports, "__esModule", ({ value: true })); - // By the invariants, start <= length-1, so there is something left over. - // It is either the whole string or a part starting from non-whitespace. - result += '\n'; - // Insert a break if the remainder is too long and there is a break available. - if (line.length - start > width && curr > start) { - result += line.slice(start, curr) + '\n' + line.slice(curr + 1); +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var BottleneckLight = _interopDefault(__nccwpck_require__(1174)); + +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); } else { - result += line.slice(start); + obj[key] = value; } - return result.slice(1); // drop extra \n joiner + return obj; } -// Escapes a double-quoted string. -function escapeString(string) { - var result = ''; - var char = 0; - var escapeSeq; +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); - for (var i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) { - char = codePointAt(string, i); - escapeSeq = ESCAPE_SEQUENCES[char]; + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + if (enumerableOnly) symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + }); + keys.push.apply(keys, symbols); + } - if (!escapeSeq && isPrintable(char)) { - result += string[i]; - if (char >= 0x10000) result += string[i + 1]; + return keys; +} + +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] != null ? arguments[i] : {}; + + if (i % 2) { + ownKeys(Object(source), true).forEach(function (key) { + _defineProperty(target, key, source[key]); + }); + } else if (Object.getOwnPropertyDescriptors) { + Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { - result += escapeSeq || encodeHex(char); + ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); } } - return result; + return target; } -function writeFlowSequence(state, level, object) { - var _result = '', - _tag = state.tag, - index, - length, - value; +const VERSION = "3.4.1"; - for (index = 0, length = object.length; index < length; index += 1) { - value = object[index]; +const noop = () => Promise.resolve(); // @ts-ignore - if (state.replacer) { - value = state.replacer.call(object, String(index), value); - } - // Write only valid elements, put null instead of invalid elements. - if (writeNode(state, level, value, false, false) || - (typeof value === 'undefined' && - writeNode(state, level, null, false, false))) { +function wrapRequest(state, request, options) { + return state.retryLimiter.schedule(doRequest, state, request, options); +} // @ts-ignore - if (_result !== '') _result += ',' + (!state.condenseFlow ? ' ' : ''); - _result += state.dump; - } - } +async function doRequest(state, request, options) { + const isWrite = options.method !== "GET" && options.method !== "HEAD"; + const isSearch = options.method === "GET" && options.url.startsWith("/search/"); + const isGraphQL = options.url.startsWith("/graphql"); + const retryCount = ~~options.request.retryCount; + const jobOptions = retryCount > 0 ? { + priority: 0, + weight: 0 + } : {}; - state.tag = _tag; - state.dump = '[' + _result + ']'; -} + if (state.clustering) { + // Remove a job from Redis if it has not completed or failed within 60s + // Examples: Node process terminated, client disconnected, etc. + // @ts-ignore + jobOptions.expiration = 1000 * 60; + } // Guarantee at least 1000ms between writes + // GraphQL can also trigger writes -function writeBlockSequence(state, level, object, compact) { - var _result = '', - _tag = state.tag, - index, - length, - value; - for (index = 0, length = object.length; index < length; index += 1) { - value = object[index]; + if (isWrite || isGraphQL) { + await state.write.key(state.id).schedule(jobOptions, noop); + } // Guarantee at least 3000ms between requests that trigger notifications - if (state.replacer) { - value = state.replacer.call(object, String(index), value); - } - // Write only valid elements, put null instead of invalid elements. - if (writeNode(state, level + 1, value, true, true, false, true) || - (typeof value === 'undefined' && - writeNode(state, level + 1, null, true, true, false, true))) { + if (isWrite && state.triggersNotification(options.url)) { + await state.notifications.key(state.id).schedule(jobOptions, noop); + } // Guarantee at least 2000ms between search requests - if (!compact || _result !== '') { - _result += generateNextLine(state, level); - } - if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { - _result += '-'; - } else { - _result += '- '; - } + if (isSearch) { + await state.search.key(state.id).schedule(jobOptions, noop); + } - _result += state.dump; + const req = state.global.key(state.id).schedule(jobOptions, request, options); + + if (isGraphQL) { + const res = await req; + + if (res.data.errors != null && // @ts-ignore + res.data.errors.some(error => error.type === "RATE_LIMITED")) { + const error = Object.assign(new Error("GraphQL Rate Limit Exceeded"), { + headers: res.headers, + data: res.data + }); + throw error; } } - state.tag = _tag; - state.dump = _result || '[]'; // Empty sequence if no valid values. + return req; } -function writeFlowMapping(state, level, object) { - var _result = '', - _tag = state.tag, - objectKeyList = Object.keys(object), - index, - length, - objectKey, - objectValue, - pairBuffer; +var triggersNotificationPaths = ["/orgs/{org}/invitations", "/orgs/{org}/invitations/{invitation_id}", "/orgs/{org}/teams/{team_slug}/discussions", "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "/repos/{owner}/{repo}/collaborators/{username}", "/repos/{owner}/{repo}/commits/{commit_sha}/comments", "/repos/{owner}/{repo}/issues", "/repos/{owner}/{repo}/issues/{issue_number}/comments", "/repos/{owner}/{repo}/pulls", "/repos/{owner}/{repo}/pulls/{pull_number}/comments", "/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", "/repos/{owner}/{repo}/pulls/{pull_number}/merge", "/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "/repos/{owner}/{repo}/pulls/{pull_number}/reviews", "/repos/{owner}/{repo}/releases", "/teams/{team_id}/discussions", "/teams/{team_id}/discussions/{discussion_number}/comments"]; - for (index = 0, length = objectKeyList.length; index < length; index += 1) { +// @ts-ignore +function routeMatcher(paths) { + // EXAMPLE. For the following paths: - pairBuffer = ''; - if (_result !== '') pairBuffer += ', '; + /* [ + "/orgs/{org}/invitations", + "/repos/{owner}/{repo}/collaborators/{username}" + ] */ + // @ts-ignore + const regexes = paths.map(path => path.split("/") // @ts-ignore + .map(c => c.startsWith("{") ? "(?:.+?)" : c).join("/")); // 'regexes' would contain: - if (state.condenseFlow) pairBuffer += '"'; + /* [ + '/orgs/(?:.+?)/invitations', + '/repos/(?:.+?)/(?:.+?)/collaborators/(?:.+?)' + ] */ + // @ts-ignore - objectKey = objectKeyList[index]; - objectValue = object[objectKey]; + const regex = `^(?:${regexes.map(r => `(?:${r})`).join("|")})[^/]*$`; // 'regex' would contain: - if (state.replacer) { - objectValue = state.replacer.call(object, objectKey, objectValue); - } + /* + ^(?:(?:\/orgs\/(?:.+?)\/invitations)|(?:\/repos\/(?:.+?)\/(?:.+?)\/collaborators\/(?:.+?)))[^\/]*$ + It may look scary, but paste it into https://www.debuggex.com/ + and it will make a lot more sense! + */ - if (!writeNode(state, level, objectKey, false, false)) { - continue; // Skip this pair because of invalid key; - } + return new RegExp(regex, "i"); +} - if (state.dump.length > 1024) pairBuffer += '? '; +const regex = routeMatcher(triggersNotificationPaths); +const triggersNotification = regex.test.bind(regex); +const groups = {}; // @ts-ignore - pairBuffer += state.dump + (state.condenseFlow ? '"' : '') + ':' + (state.condenseFlow ? '' : ' '); +const createGroups = function (Bottleneck, common) { + // @ts-ignore + groups.global = new Bottleneck.Group(_objectSpread2({ + id: "octokit-global", + maxConcurrent: 10 + }, common)); // @ts-ignore + + groups.search = new Bottleneck.Group(_objectSpread2({ + id: "octokit-search", + maxConcurrent: 1, + minTime: 2000 + }, common)); // @ts-ignore + + groups.write = new Bottleneck.Group(_objectSpread2({ + id: "octokit-write", + maxConcurrent: 1, + minTime: 1000 + }, common)); // @ts-ignore + + groups.notifications = new Bottleneck.Group(_objectSpread2({ + id: "octokit-notifications", + maxConcurrent: 1, + minTime: 3000 + }, common)); +}; - if (!writeNode(state, level, objectValue, false, false)) { - continue; // Skip this pair because of invalid value. - } +function throttling(octokit, octokitOptions = {}) { + const { + enabled = true, + Bottleneck = BottleneckLight, + id = "no-id", + timeout = 1000 * 60 * 2, + // Redis TTL: 2 minutes + connection + } = octokitOptions.throttle || {}; + + if (!enabled) { + return; + } - pairBuffer += state.dump; + const common = { + connection, + timeout + }; // @ts-ignore - // Both key and value are valid. - _result += pairBuffer; + if (groups.global == null) { + createGroups(Bottleneck, common); } - state.tag = _tag; - state.dump = '{' + _result + '}'; -} + const state = Object.assign(_objectSpread2({ + clustering: connection != null, + triggersNotification, + minimumAbuseRetryAfter: 5, + retryAfterBaseValue: 1000, + retryLimiter: new Bottleneck(), + id + }, groups), // @ts-ignore + octokitOptions.throttle); -function writeBlockMapping(state, level, object, compact) { - var _result = '', - _tag = state.tag, - objectKeyList = Object.keys(object), - index, - length, - objectKey, - objectValue, - explicitPair, - pairBuffer; + if (typeof state.onAbuseLimit !== "function" || typeof state.onRateLimit !== "function") { + throw new Error(`octokit/plugin-throttling error: + You must pass the onAbuseLimit and onRateLimit error handlers. + See https://github.com/octokit/rest.js#throttling - // Allow sorting keys so that the output file is deterministic - if (state.sortKeys === true) { - // Default sorting - objectKeyList.sort(); - } else if (typeof state.sortKeys === 'function') { - // Custom sort function - objectKeyList.sort(state.sortKeys); - } else if (state.sortKeys) { - // Something is wrong - throw new YAMLException('sortKeys must be a boolean or a function'); + const octokit = new Octokit({ + throttle: { + onAbuseLimit: (retryAfter, options) => {/* ... */}, + onRateLimit: (retryAfter, options) => {/* ... */} + } + }) + `); } - for (index = 0, length = objectKeyList.length; index < length; index += 1) { - pairBuffer = ''; + const events = {}; + const emitter = new Bottleneck.Events(events); // @ts-ignore - if (!compact || _result !== '') { - pairBuffer += generateNextLine(state, level); + events.on("abuse-limit", state.onAbuseLimit); // @ts-ignore + + events.on("rate-limit", state.onRateLimit); // @ts-ignore + + events.on("error", e => console.warn("Error in throttling-plugin limit handler", e)); // @ts-ignore + + state.retryLimiter.on("failed", async function (error, info) { + const options = info.args[info.args.length - 1]; + const shouldRetryGraphQL = options.url.startsWith("/graphql") && error.status !== 401; + + if (!(shouldRetryGraphQL || error.status === 403)) { + return; } - objectKey = objectKeyList[index]; - objectValue = object[objectKey]; + const retryCount = ~~options.request.retryCount; + options.request.retryCount = retryCount; + const { + wantRetry, + retryAfter + } = await async function () { + if (/\babuse\b/i.test(error.message)) { + // The user has hit the abuse rate limit. (REST and GraphQL) + // https://docs.github.com/en/rest/overview/resources-in-the-rest-api#abuse-rate-limits + // The Retry-After header can sometimes be blank when hitting an abuse limit, + // but is always present after 2-3s, so make sure to set `retryAfter` to at least 5s by default. + const retryAfter = Math.max(~~error.headers["retry-after"], state.minimumAbuseRetryAfter); + const wantRetry = await emitter.trigger("abuse-limit", retryAfter, options, octokit); + return { + wantRetry, + retryAfter + }; + } + + if (error.headers != null && error.headers["x-ratelimit-remaining"] === "0") { + // The user has used all their allowed calls for the current time period (REST and GraphQL) + // https://docs.github.com/en/rest/reference/rate-limit (REST) + // https://docs.github.com/en/graphql/overview/resource-limitations#rate-limit (GraphQL) + const rateLimitReset = new Date(~~error.headers["x-ratelimit-reset"] * 1000).getTime(); + const retryAfter = Math.max(Math.ceil((rateLimitReset - Date.now()) / 1000), 0); + const wantRetry = await emitter.trigger("rate-limit", retryAfter, options, octokit); + return { + wantRetry, + retryAfter + }; + } - if (state.replacer) { - objectValue = state.replacer.call(object, objectKey, objectValue); - } + return {}; + }(); - if (!writeNode(state, level + 1, objectKey, true, true, true)) { - continue; // Skip this pair because of invalid key. + if (wantRetry) { + options.request.retryCount++; // @ts-ignore + + return retryAfter * state.retryAfterBaseValue; } + }); + octokit.hook.wrap("request", wrapRequest.bind(null, state)); +} +throttling.VERSION = VERSION; +throttling.triggersNotification = triggersNotification; - explicitPair = (state.tag !== null && state.tag !== '?') || - (state.dump && state.dump.length > 1024); +exports.throttling = throttling; +//# sourceMappingURL=index.js.map - if (explicitPair) { - if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { - pairBuffer += '?'; - } else { - pairBuffer += '? '; - } - } - pairBuffer += state.dump; +/***/ }), - if (explicitPair) { - pairBuffer += generateNextLine(state, level); - } +/***/ 537: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (!writeNode(state, level + 1, objectValue, true, explicitPair)) { - continue; // Skip this pair because of invalid value. - } +"use strict"; - if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { - pairBuffer += ':'; - } else { - pairBuffer += ': '; - } - pairBuffer += state.dump; +Object.defineProperty(exports, "__esModule", ({ value: true })); - // Both key and value are valid. - _result += pairBuffer; - } +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - state.tag = _tag; - state.dump = _result || '{}'; // Empty mapping if no valid pairs. -} +var deprecation = __nccwpck_require__(8932); +var once = _interopDefault(__nccwpck_require__(1223)); -function detectType(state, object, explicit) { - var _result, typeList, index, length, type, style; +const logOnce = once(deprecation => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ - typeList = explicit ? state.explicitTypes : state.implicitTypes; +class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); // Maintains proper stack trace (only available on V8) - for (index = 0, length = typeList.length; index < length; index += 1) { - type = typeList[index]; + /* istanbul ignore next */ - if ((type.instanceOf || type.predicate) && - (!type.instanceOf || ((typeof object === 'object') && (object instanceof type.instanceOf))) && - (!type.predicate || type.predicate(object))) { + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } - if (explicit) { - if (type.multi && type.representName) { - state.tag = type.representName(object); - } else { - state.tag = type.tag; - } - } else { - state.tag = '?'; + this.name = "HttpError"; + this.status = statusCode; + Object.defineProperty(this, "code", { + get() { + logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; } - if (type.represent) { - style = state.styleMap[type.tag] || type.defaultStyle; - - if (_toString.call(type.represent) === '[object Function]') { - _result = type.represent(object, style); - } else if (_hasOwnProperty.call(type.represent, style)) { - _result = type.represent[style](object, style); - } else { - throw new YAMLException('!<' + type.tag + '> tag resolver accepts not "' + style + '" style'); - } + }); + this.headers = options.headers || {}; // redact request credentials without mutating original request options - state.dump = _result; - } + const requestCopy = Object.assign({}, options.request); - return true; + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") + }); } + + requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; } - return false; } -// Serializes `object` and writes it to global `result`. -// Returns true on success, or false on invalid object. -// -function writeNode(state, level, object, block, compact, iskey, isblockseq) { - state.tag = null; - state.dump = object; +exports.RequestError = RequestError; +//# sourceMappingURL=index.js.map - if (!detectType(state, object, false)) { - detectType(state, object, true); - } - var type = _toString.call(state.dump); - var inblock = block; - var tagStr; +/***/ }), - if (block) { - block = (state.flowLevel < 0 || state.flowLevel > level); - } +/***/ 6234: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - var objectOrArray = type === '[object Object]' || type === '[object Array]', - duplicateIndex, - duplicate; +"use strict"; - if (objectOrArray) { - duplicateIndex = state.duplicates.indexOf(object); - duplicate = duplicateIndex !== -1; - } - if ((state.tag !== null && state.tag !== '?') || duplicate || (state.indent !== 2 && level > 0)) { - compact = false; +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var endpoint = __nccwpck_require__(9440); +var universalUserAgent = __nccwpck_require__(5030); +var isPlainObject = __nccwpck_require__(3287); +var nodeFetch = _interopDefault(__nccwpck_require__(467)); +var requestError = __nccwpck_require__(537); + +const VERSION = "5.4.12"; + +function getBufferResponse(response) { + return response.arrayBuffer(); +} + +function fetchWrapper(requestOptions) { + if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); } - if (duplicate && state.usedDuplicates[duplicateIndex]) { - state.dump = '*ref_' + duplicateIndex; - } else { - if (objectOrArray && duplicate && !state.usedDuplicates[duplicateIndex]) { - state.usedDuplicates[duplicateIndex] = true; - } - if (type === '[object Object]') { - if (block && (Object.keys(state.dump).length !== 0)) { - writeBlockMapping(state, level, state.dump, compact); - if (duplicate) { - state.dump = '&ref_' + duplicateIndex + state.dump; - } - } else { - writeFlowMapping(state, level, state.dump); - if (duplicate) { - state.dump = '&ref_' + duplicateIndex + ' ' + state.dump; - } - } - } else if (type === '[object Array]') { - if (block && (state.dump.length !== 0)) { - if (state.noArrayIndent && !isblockseq && level > 0) { - writeBlockSequence(state, level - 1, state.dump, compact); - } else { - writeBlockSequence(state, level, state.dump, compact); - } - if (duplicate) { - state.dump = '&ref_' + duplicateIndex + state.dump; - } - } else { - writeFlowSequence(state, level, state.dump); - if (duplicate) { - state.dump = '&ref_' + duplicateIndex + ' ' + state.dump; - } - } - } else if (type === '[object String]') { - if (state.tag !== '?') { - writeScalar(state, state.dump, level, iskey, inblock); - } - } else if (type === '[object Undefined]') { - return false; - } else { - if (state.skipInvalid) return false; - throw new YAMLException('unacceptable kind of an object to dump ' + type); + let headers = {}; + let status; + let url; + const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; + return fetch(requestOptions.url, Object.assign({ + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + redirect: requestOptions.redirect + }, requestOptions.request)).then(response => { + url = response.url; + status = response.status; + + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; } - if (state.tag !== null && state.tag !== '?') { - // Need to encode all characters except those allowed by the spec: - // - // [35] ns-dec-digit ::= [#x30-#x39] /* 0-9 */ - // [36] ns-hex-digit ::= ns-dec-digit - // | [#x41-#x46] /* A-F */ | [#x61-#x66] /* a-f */ - // [37] ns-ascii-letter ::= [#x41-#x5A] /* A-Z */ | [#x61-#x7A] /* a-z */ - // [38] ns-word-char ::= ns-dec-digit | ns-ascii-letter | “-” - // [39] ns-uri-char ::= “%” ns-hex-digit ns-hex-digit | ns-word-char | “#” - // | “;” | “/” | “?” | “:” | “@” | “&” | “=” | “+” | “$” | “,” - // | “_” | “.” | “!” | “~” | “*” | “'” | “(” | “)” | “[” | “]” - // - // Also need to encode '!' because it has special meaning (end of tag prefix). - // - tagStr = encodeURI( - state.tag[0] === '!' ? state.tag.slice(1) : state.tag - ).replace(/!/g, '%21'); + if (status === 204 || status === 205) { + return; + } // GitHub API returns 200 for HEAD requests - if (state.tag[0] === '!') { - tagStr = '!' + tagStr; - } else if (tagStr.slice(0, 18) === 'tag:yaml.org,2002:') { - tagStr = '!!' + tagStr.slice(18); - } else { - tagStr = '!<' + tagStr + '>'; + + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; } - state.dump = tagStr + ' ' + state.dump; + throw new requestError.RequestError(response.statusText, status, { + headers, + request: requestOptions + }); + } + + if (status === 304) { + throw new requestError.RequestError("Not modified", status, { + headers, + request: requestOptions + }); } - } - return true; -} + if (status >= 400) { + return response.text().then(message => { + const error = new requestError.RequestError(message, status, { + headers, + request: requestOptions + }); -function getDuplicateReferences(object, state) { - var objects = [], - duplicatesIndexes = [], - index, - length; + try { + let responseBody = JSON.parse(error.message); + Object.assign(error, responseBody); + let errors = responseBody.errors; // Assumption `errors` would always be in Array format - inspectNode(object, objects, duplicatesIndexes); + error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); + } catch (e) {// ignore, see octokit/rest.js#684 + } - for (index = 0, length = duplicatesIndexes.length; index < length; index += 1) { - state.duplicates.push(objects[duplicatesIndexes[index]]); - } - state.usedDuplicates = new Array(length); -} + throw error; + }); + } -function inspectNode(object, objects, duplicatesIndexes) { - var objectKeyList, - index, - length; + const contentType = response.headers.get("content-type"); - if (object !== null && typeof object === 'object') { - index = objects.indexOf(object); - if (index !== -1) { - if (duplicatesIndexes.indexOf(index) === -1) { - duplicatesIndexes.push(index); - } - } else { - objects.push(object); + if (/application\/json/.test(contentType)) { + return response.json(); + } - if (Array.isArray(object)) { - for (index = 0, length = object.length; index < length; index += 1) { - inspectNode(object[index], objects, duplicatesIndexes); - } - } else { - objectKeyList = Object.keys(object); + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } - for (index = 0, length = objectKeyList.length; index < length; index += 1) { - inspectNode(object[objectKeyList[index]], objects, duplicatesIndexes); - } - } + return getBufferResponse(response); + }).then(data => { + return { + status, + url, + headers, + data + }; + }).catch(error => { + if (error instanceof requestError.RequestError) { + throw error; } - } -} -function dump(input, options) { - options = options || {}; + throw new requestError.RequestError(error.message, 500, { + headers, + request: requestOptions + }); + }); +} - var state = new State(options); +function withDefaults(oldEndpoint, newDefaults) { + const endpoint = oldEndpoint.defaults(newDefaults); - if (!state.noRefs) getDuplicateReferences(input, state); + const newApi = function (route, parameters) { + const endpointOptions = endpoint.merge(route, parameters); - var value = input; + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint.parse(endpointOptions)); + } - if (state.replacer) { - value = state.replacer.call({ '': value }, '', value); - } + const request = (route, parameters) => { + return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); + }; - if (writeNode(state, 0, value, true, true)) return state.dump + '\n'; + Object.assign(request, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); + return endpointOptions.request.hook(request, endpointOptions); + }; - return ''; + return Object.assign(newApi, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); } -module.exports.dump = dump; +const request = withDefaults(endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` + } +}); + +exports.request = request; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 8179: -/***/ ((module) => { +/***/ 1150: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// YAML error class. http://stackoverflow.com/questions/8458984 -// +const {spawnSync} = __nccwpck_require__(3129); -function formatError(exception, compact) { - var where = '', message = exception.reason || '(unknown reason)'; +const isString = (a) => typeof a === 'string'; - if (!exception.mark) return message; +module.exports = (str, filter = {}) => { + if (!isString(str)) { + filter = str || {}; + str = run(); + } + + const { + added, + modified, + untracked, + deleted, + renamed, + } = filter; + + const files = parse(str); + const picked = pick(files, { + added, + modified, + untracked, + deleted, + renamed, + }); + + const names = getNames(picked); + + return names; +}; - if (exception.mark.name) { - where += 'in "' + exception.mark.name + '" '; - } +const getName = ({name}) => name; - where += '(' + (exception.mark.line + 1) + ':' + (exception.mark.column + 1) + ')'; +module.exports.getNames = getNames; +function getNames(files) { + return files.map(getName); +} - if (!compact && exception.mark.snippet) { - where += '\n\n' + exception.mark.snippet; - } +module.exports.run = run; +function run() { + const result = spawnSync('git', ['status', '--porcelain']); + return result.stdout.toString(); +} - return message + ' ' + where; +module.exports.parse = parse; +function parse(str) { + const result = []; + const lines = str + .split('\n') + .filter(Boolean); + + for (const line of lines) { + const {name, mode} = parseLine(line); + + result.push({ + name, + mode, + }); + } + + return result; } +const UNTRACKED = '?'; +const RENAMED = 'R'; +const ARROW = '-> '; + +// "R a -> b" -> "b" +const cutRenameTo = (line) => { + const i = line.indexOf(ARROW); + const count = i + ARROW.length; + + return line.slice(count); +}; -function YAMLException(reason, mark) { - // Super constructor - Error.call(this); +function parseLine(line) { + const [first] = line; + + if (first === UNTRACKED) + return { + name: line.replace('?? ', ''), + mode: UNTRACKED, + }; + + if (first === RENAMED) + return { + name: cutRenameTo(line), + mode: RENAMED, + }; + + const [mode] = line.match(/^[\sA-Z]{1,}\s/, ''); + const name = line.replace(mode, ''); + + return { + name, + mode, + }; +} - this.name = 'YAMLException'; - this.reason = reason; - this.mark = mark; - this.message = formatError(this, false); +const isModified = ({mode}) => /M/.test(mode); +const isAdded = ({mode}) => /A/.test(mode); +const isRenamed = ({mode}) => /R/.test(mode); +const isDeleted = ({mode}) => /D/.test(mode); +const isUntracked = ({mode}) => /\?/.test(mode); + +const check = ({added, modified, untracked, deleted, renamed}) => (file) => { + let is = false; + + if (added) + is = is || isAdded(file); + + if (modified) + is = is || isModified(file); + + if (untracked) + is = is || isUntracked(file); + + if (deleted) + is = is || isDeleted(file); + + if (renamed) + is = is || isRenamed(file); + + return is; +}; - // Include stack trace in error object - if (Error.captureStackTrace) { - // Chrome and NodeJS - Error.captureStackTrace(this, this.constructor); - } else { - // FF, IE 10+ and Safari 6+. Fallback for others - this.stack = (new Error()).stack || ''; - } +module.exports.pick = pick; +function pick(files, {added, modified, deleted, untracked, renamed}) { + return files.filter(check({ + added, + modified, + untracked, + deleted, + renamed, + })); } -// Inherit from Error -YAMLException.prototype = Object.create(Error.prototype); -YAMLException.prototype.constructor = YAMLException; +/***/ }), -YAMLException.prototype.toString = function toString(compact) { - return this.name + ': ' + formatError(this, compact); -}; +/***/ 4623: +/***/ (function(module, exports, __nccwpck_require__) { +"use strict"; -module.exports = YAMLException; +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getInput = void 0; +var dotenv_1 = __importDefault(__nccwpck_require__(2437)); +dotenv_1.default.config(); +var VALID_TYPES = ['string', 'array', 'boolean', 'number']; +var DEFAULT_OPTIONS = { + required: false, + type: 'string', + disableable: false +}; +var getEnvVar = function (key) { + var parsed = process.env["INPUT_" + key.replace(/ /g, '_').toUpperCase()]; + var raw = process.env[key]; + return parsed || raw || undefined; +}; +var parseArray = function (val) { + var array = val.split('\n').join(',').split(','); + var filtered = array.filter(function (n) { return n; }); + return filtered.map(function (n) { return n.trim(); }); +}; +var parseBoolean = function (val) { + var trueValue = ['true', 'True', 'TRUE']; + var falseValue = ['false', 'False', 'FALSE']; + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new Error('boolean input has to be one of \`true | True | TRUE | false | False | FALSE\`'); +}; +var parseNumber = function (val) { + var parsed = Number(val); + if (isNaN(parsed)) + throw new Error('input has to be a valid number'); + return parsed; +}; +var parseValue = function (val, type) { + if (type === 'array') { + return parseArray(val); + } + if (type === 'boolean') { + return parseBoolean(val); + } + if (type === 'number') { + return parseNumber(val); + } + return val.trim(); +}; +var getInput = function (key, opts) { + var parsedOptions; + if (typeof key === 'string' || Array.isArray(key)) { + parsedOptions = __assign({ key: key }, opts); + } + else if (typeof key === 'object') { + parsedOptions = key; + } + else { + throw new Error('No key for input specified'); + } + if (!parsedOptions.key) + throw new Error('No key for input specified'); + var options = Object.assign({}, DEFAULT_OPTIONS, parsedOptions); + if (VALID_TYPES.includes(options.type) === false) + throw new Error('option type has to be one of `string | array | boolean | number`'); + var val = typeof options.key === 'string' ? getEnvVar(options.key) : options.key.map(function (key) { return getEnvVar(key); }).filter(function (item) { return item; })[0]; + if (options.disableable && val === 'false') + return undefined; + var parsed = val !== undefined ? parseValue(val, options.type) : undefined; + if (parsed === undefined) { + if (options.required) + throw new Error("Input `" + options.key + "` is required but was not provided."); + if (options.default !== undefined) + return options.default; + return undefined; + } + if (options.modifier) + return options.modifier(parsed); + return parsed; +}; +exports.getInput = getInput; +module.exports.getInput = exports.getInput; /***/ }), -/***/ 1161: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 1174: +/***/ (function(module) { -"use strict"; +/** + * This file contains the Bottleneck library (MIT), compiled to ES2017, and without Clustering support. + * https://github.com/SGrondin/bottleneck + */ +(function (global, factory) { + true ? module.exports = factory() : + 0; +}(this, (function () { 'use strict'; + var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {}; -/*eslint-disable max-len,no-use-before-define*/ + function getCjsExportFromNamespace (n) { + return n && n['default'] || n; + } -var common = __nccwpck_require__(6829); -var YAMLException = __nccwpck_require__(8179); -var makeSnippet = __nccwpck_require__(6975); -var DEFAULT_SCHEMA = __nccwpck_require__(8759); + var load = function(received, defaults, onto = {}) { + var k, ref, v; + for (k in defaults) { + v = defaults[k]; + onto[k] = (ref = received[k]) != null ? ref : v; + } + return onto; + }; + var overwrite = function(received, defaults, onto = {}) { + var k, v; + for (k in received) { + v = received[k]; + if (defaults[k] !== void 0) { + onto[k] = v; + } + } + return onto; + }; -var _hasOwnProperty = Object.prototype.hasOwnProperty; + var parser = { + load: load, + overwrite: overwrite + }; + var DLList; + + DLList = class DLList { + constructor(incr, decr) { + this.incr = incr; + this.decr = decr; + this._first = null; + this._last = null; + this.length = 0; + } + + push(value) { + var node; + this.length++; + if (typeof this.incr === "function") { + this.incr(); + } + node = { + value, + prev: this._last, + next: null + }; + if (this._last != null) { + this._last.next = node; + this._last = node; + } else { + this._first = this._last = node; + } + return void 0; + } + + shift() { + var value; + if (this._first == null) { + return; + } else { + this.length--; + if (typeof this.decr === "function") { + this.decr(); + } + } + value = this._first.value; + if ((this._first = this._first.next) != null) { + this._first.prev = null; + } else { + this._last = null; + } + return value; + } + + first() { + if (this._first != null) { + return this._first.value; + } + } + + getArray() { + var node, ref, results; + node = this._first; + results = []; + while (node != null) { + results.push((ref = node, node = node.next, ref.value)); + } + return results; + } + + forEachShift(cb) { + var node; + node = this.shift(); + while (node != null) { + (cb(node), node = this.shift()); + } + return void 0; + } + + debug() { + var node, ref, ref1, ref2, results; + node = this._first; + results = []; + while (node != null) { + results.push((ref = node, node = node.next, { + value: ref.value, + prev: (ref1 = ref.prev) != null ? ref1.value : void 0, + next: (ref2 = ref.next) != null ? ref2.value : void 0 + })); + } + return results; + } -var CONTEXT_FLOW_IN = 1; -var CONTEXT_FLOW_OUT = 2; -var CONTEXT_BLOCK_IN = 3; -var CONTEXT_BLOCK_OUT = 4; + }; + var DLList_1 = DLList; + + var Events; + + Events = class Events { + constructor(instance) { + this.instance = instance; + this._events = {}; + if ((this.instance.on != null) || (this.instance.once != null) || (this.instance.removeAllListeners != null)) { + throw new Error("An Emitter already exists for this object"); + } + this.instance.on = (name, cb) => { + return this._addListener(name, "many", cb); + }; + this.instance.once = (name, cb) => { + return this._addListener(name, "once", cb); + }; + this.instance.removeAllListeners = (name = null) => { + if (name != null) { + return delete this._events[name]; + } else { + return this._events = {}; + } + }; + } + + _addListener(name, status, cb) { + var base; + if ((base = this._events)[name] == null) { + base[name] = []; + } + this._events[name].push({cb, status}); + return this.instance; + } + + listenerCount(name) { + if (this._events[name] != null) { + return this._events[name].length; + } else { + return 0; + } + } + + async trigger(name, ...args) { + var e, promises; + try { + if (name !== "debug") { + this.trigger("debug", `Event triggered: ${name}`, args); + } + if (this._events[name] == null) { + return; + } + this._events[name] = this._events[name].filter(function(listener) { + return listener.status !== "none"; + }); + promises = this._events[name].map(async(listener) => { + var e, returned; + if (listener.status === "none") { + return; + } + if (listener.status === "once") { + listener.status = "none"; + } + try { + returned = typeof listener.cb === "function" ? listener.cb(...args) : void 0; + if (typeof (returned != null ? returned.then : void 0) === "function") { + return (await returned); + } else { + return returned; + } + } catch (error) { + e = error; + { + this.trigger("error", e); + } + return null; + } + }); + return ((await Promise.all(promises))).find(function(x) { + return x != null; + }); + } catch (error) { + e = error; + { + this.trigger("error", e); + } + return null; + } + } -var CHOMPING_CLIP = 1; -var CHOMPING_STRIP = 2; -var CHOMPING_KEEP = 3; + }; + var Events_1 = Events; + + var DLList$1, Events$1, Queues; + + DLList$1 = DLList_1; + + Events$1 = Events_1; + + Queues = class Queues { + constructor(num_priorities) { + var i; + this.Events = new Events$1(this); + this._length = 0; + this._lists = (function() { + var j, ref, results; + results = []; + for (i = j = 1, ref = num_priorities; (1 <= ref ? j <= ref : j >= ref); i = 1 <= ref ? ++j : --j) { + results.push(new DLList$1((() => { + return this.incr(); + }), (() => { + return this.decr(); + }))); + } + return results; + }).call(this); + } + + incr() { + if (this._length++ === 0) { + return this.Events.trigger("leftzero"); + } + } + + decr() { + if (--this._length === 0) { + return this.Events.trigger("zero"); + } + } + + push(job) { + return this._lists[job.options.priority].push(job); + } + + queued(priority) { + if (priority != null) { + return this._lists[priority].length; + } else { + return this._length; + } + } + + shiftAll(fn) { + return this._lists.forEach(function(list) { + return list.forEachShift(fn); + }); + } + + getFirst(arr = this._lists) { + var j, len, list; + for (j = 0, len = arr.length; j < len; j++) { + list = arr[j]; + if (list.length > 0) { + return list; + } + } + return []; + } + + shiftLastFrom(priority) { + return this.getFirst(this._lists.slice(priority).reverse()).shift(); + } -var PATTERN_NON_PRINTABLE = /[\x00-\x08\x0B\x0C\x0E-\x1F\x7F-\x84\x86-\x9F\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/; -var PATTERN_NON_ASCII_LINE_BREAKS = /[\x85\u2028\u2029]/; -var PATTERN_FLOW_INDICATORS = /[,\[\]\{\}]/; -var PATTERN_TAG_HANDLE = /^(?:!|!!|![a-z\-]+!)$/i; -var PATTERN_TAG_URI = /^(?:!|[^,\[\]\{\}])(?:%[0-9a-f]{2}|[0-9a-z\-#;\/\?:@&=\+\$,_\.!~\*'\(\)\[\]])*$/i; + }; + var Queues_1 = Queues; + + var BottleneckError; + + BottleneckError = class BottleneckError extends Error {}; + + var BottleneckError_1 = BottleneckError; + + var BottleneckError$1, DEFAULT_PRIORITY, Job, NUM_PRIORITIES, parser$1; + + NUM_PRIORITIES = 10; + + DEFAULT_PRIORITY = 5; + + parser$1 = parser; + + BottleneckError$1 = BottleneckError_1; + + Job = class Job { + constructor(task, args, options, jobDefaults, rejectOnDrop, Events, _states, Promise) { + this.task = task; + this.args = args; + this.rejectOnDrop = rejectOnDrop; + this.Events = Events; + this._states = _states; + this.Promise = Promise; + this.options = parser$1.load(options, jobDefaults); + this.options.priority = this._sanitizePriority(this.options.priority); + if (this.options.id === jobDefaults.id) { + this.options.id = `${this.options.id}-${this._randomIndex()}`; + } + this.promise = new this.Promise((_resolve, _reject) => { + this._resolve = _resolve; + this._reject = _reject; + }); + this.retryCount = 0; + } + + _sanitizePriority(priority) { + var sProperty; + sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority; + if (sProperty < 0) { + return 0; + } else if (sProperty > NUM_PRIORITIES - 1) { + return NUM_PRIORITIES - 1; + } else { + return sProperty; + } + } + + _randomIndex() { + return Math.random().toString(36).slice(2); + } + + doDrop({error, message = "This job has been dropped by Bottleneck"} = {}) { + if (this._states.remove(this.options.id)) { + if (this.rejectOnDrop) { + this._reject(error != null ? error : new BottleneckError$1(message)); + } + this.Events.trigger("dropped", {args: this.args, options: this.options, task: this.task, promise: this.promise}); + return true; + } else { + return false; + } + } + + _assertStatus(expected) { + var status; + status = this._states.jobStatus(this.options.id); + if (!(status === expected || (expected === "DONE" && status === null))) { + throw new BottleneckError$1(`Invalid job status ${status}, expected ${expected}. Please open an issue at https://github.com/SGrondin/bottleneck/issues`); + } + } + + doReceive() { + this._states.start(this.options.id); + return this.Events.trigger("received", {args: this.args, options: this.options}); + } + + doQueue(reachedHWM, blocked) { + this._assertStatus("RECEIVED"); + this._states.next(this.options.id); + return this.Events.trigger("queued", {args: this.args, options: this.options, reachedHWM, blocked}); + } + + doRun() { + if (this.retryCount === 0) { + this._assertStatus("QUEUED"); + this._states.next(this.options.id); + } else { + this._assertStatus("EXECUTING"); + } + return this.Events.trigger("scheduled", {args: this.args, options: this.options}); + } + + async doExecute(chained, clearGlobalState, run, free) { + var error, eventInfo, passed; + if (this.retryCount === 0) { + this._assertStatus("RUNNING"); + this._states.next(this.options.id); + } else { + this._assertStatus("EXECUTING"); + } + eventInfo = {args: this.args, options: this.options, retryCount: this.retryCount}; + this.Events.trigger("executing", eventInfo); + try { + passed = (await (chained != null ? chained.schedule(this.options, this.task, ...this.args) : this.task(...this.args))); + if (clearGlobalState()) { + this.doDone(eventInfo); + await free(this.options, eventInfo); + this._assertStatus("DONE"); + return this._resolve(passed); + } + } catch (error1) { + error = error1; + return this._onFailure(error, eventInfo, clearGlobalState, run, free); + } + } + + doExpire(clearGlobalState, run, free) { + var error, eventInfo; + if (this._states.jobStatus(this.options.id === "RUNNING")) { + this._states.next(this.options.id); + } + this._assertStatus("EXECUTING"); + eventInfo = {args: this.args, options: this.options, retryCount: this.retryCount}; + error = new BottleneckError$1(`This job timed out after ${this.options.expiration} ms.`); + return this._onFailure(error, eventInfo, clearGlobalState, run, free); + } + + async _onFailure(error, eventInfo, clearGlobalState, run, free) { + var retry, retryAfter; + if (clearGlobalState()) { + retry = (await this.Events.trigger("failed", error, eventInfo)); + if (retry != null) { + retryAfter = ~~retry; + this.Events.trigger("retry", `Retrying ${this.options.id} after ${retryAfter} ms`, eventInfo); + this.retryCount++; + return run(retryAfter); + } else { + this.doDone(eventInfo); + await free(this.options, eventInfo); + this._assertStatus("DONE"); + return this._reject(error); + } + } + } + + doDone(eventInfo) { + this._assertStatus("EXECUTING"); + this._states.next(this.options.id); + return this.Events.trigger("done", eventInfo); + } -function _class(obj) { return Object.prototype.toString.call(obj); } + }; -function is_EOL(c) { - return (c === 0x0A/* LF */) || (c === 0x0D/* CR */); -} + var Job_1 = Job; + + var BottleneckError$2, LocalDatastore, parser$2; + + parser$2 = parser; + + BottleneckError$2 = BottleneckError_1; + + LocalDatastore = class LocalDatastore { + constructor(instance, storeOptions, storeInstanceOptions) { + this.instance = instance; + this.storeOptions = storeOptions; + this.clientId = this.instance._randomIndex(); + parser$2.load(storeInstanceOptions, storeInstanceOptions, this); + this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now(); + this._running = 0; + this._done = 0; + this._unblockTime = 0; + this.ready = this.Promise.resolve(); + this.clients = {}; + this._startHeartbeat(); + } + + _startHeartbeat() { + var base; + if ((this.heartbeat == null) && (((this.storeOptions.reservoirRefreshInterval != null) && (this.storeOptions.reservoirRefreshAmount != null)) || ((this.storeOptions.reservoirIncreaseInterval != null) && (this.storeOptions.reservoirIncreaseAmount != null)))) { + return typeof (base = (this.heartbeat = setInterval(() => { + var amount, incr, maximum, now, reservoir; + now = Date.now(); + if ((this.storeOptions.reservoirRefreshInterval != null) && now >= this._lastReservoirRefresh + this.storeOptions.reservoirRefreshInterval) { + this._lastReservoirRefresh = now; + this.storeOptions.reservoir = this.storeOptions.reservoirRefreshAmount; + this.instance._drainAll(this.computeCapacity()); + } + if ((this.storeOptions.reservoirIncreaseInterval != null) && now >= this._lastReservoirIncrease + this.storeOptions.reservoirIncreaseInterval) { + ({ + reservoirIncreaseAmount: amount, + reservoirIncreaseMaximum: maximum, + reservoir + } = this.storeOptions); + this._lastReservoirIncrease = now; + incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount; + if (incr > 0) { + this.storeOptions.reservoir += incr; + return this.instance._drainAll(this.computeCapacity()); + } + } + }, this.heartbeatInterval))).unref === "function" ? base.unref() : void 0; + } else { + return clearInterval(this.heartbeat); + } + } + + async __publish__(message) { + await this.yieldLoop(); + return this.instance.Events.trigger("message", message.toString()); + } + + async __disconnect__(flush) { + await this.yieldLoop(); + clearInterval(this.heartbeat); + return this.Promise.resolve(); + } + + yieldLoop(t = 0) { + return new this.Promise(function(resolve, reject) { + return setTimeout(resolve, t); + }); + } + + computePenalty() { + var ref; + return (ref = this.storeOptions.penalty) != null ? ref : (15 * this.storeOptions.minTime) || 5000; + } + + async __updateSettings__(options) { + await this.yieldLoop(); + parser$2.overwrite(options, options, this.storeOptions); + this._startHeartbeat(); + this.instance._drainAll(this.computeCapacity()); + return true; + } + + async __running__() { + await this.yieldLoop(); + return this._running; + } + + async __queued__() { + await this.yieldLoop(); + return this.instance.queued(); + } + + async __done__() { + await this.yieldLoop(); + return this._done; + } + + async __groupCheck__(time) { + await this.yieldLoop(); + return (this._nextRequest + this.timeout) < time; + } + + computeCapacity() { + var maxConcurrent, reservoir; + ({maxConcurrent, reservoir} = this.storeOptions); + if ((maxConcurrent != null) && (reservoir != null)) { + return Math.min(maxConcurrent - this._running, reservoir); + } else if (maxConcurrent != null) { + return maxConcurrent - this._running; + } else if (reservoir != null) { + return reservoir; + } else { + return null; + } + } + + conditionsCheck(weight) { + var capacity; + capacity = this.computeCapacity(); + return (capacity == null) || weight <= capacity; + } + + async __incrementReservoir__(incr) { + var reservoir; + await this.yieldLoop(); + reservoir = this.storeOptions.reservoir += incr; + this.instance._drainAll(this.computeCapacity()); + return reservoir; + } + + async __currentReservoir__() { + await this.yieldLoop(); + return this.storeOptions.reservoir; + } + + isBlocked(now) { + return this._unblockTime >= now; + } + + check(weight, now) { + return this.conditionsCheck(weight) && (this._nextRequest - now) <= 0; + } + + async __check__(weight) { + var now; + await this.yieldLoop(); + now = Date.now(); + return this.check(weight, now); + } + + async __register__(index, weight, expiration) { + var now, wait; + await this.yieldLoop(); + now = Date.now(); + if (this.conditionsCheck(weight)) { + this._running += weight; + if (this.storeOptions.reservoir != null) { + this.storeOptions.reservoir -= weight; + } + wait = Math.max(this._nextRequest - now, 0); + this._nextRequest = now + wait + this.storeOptions.minTime; + return { + success: true, + wait, + reservoir: this.storeOptions.reservoir + }; + } else { + return { + success: false + }; + } + } + + strategyIsBlock() { + return this.storeOptions.strategy === 3; + } + + async __submit__(queueLength, weight) { + var blocked, now, reachedHWM; + await this.yieldLoop(); + if ((this.storeOptions.maxConcurrent != null) && weight > this.storeOptions.maxConcurrent) { + throw new BottleneckError$2(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${this.storeOptions.maxConcurrent}`); + } + now = Date.now(); + reachedHWM = (this.storeOptions.highWater != null) && queueLength === this.storeOptions.highWater && !this.check(weight, now); + blocked = this.strategyIsBlock() && (reachedHWM || this.isBlocked(now)); + if (blocked) { + this._unblockTime = now + this.computePenalty(); + this._nextRequest = this._unblockTime + this.storeOptions.minTime; + this.instance._dropAllQueued(); + } + return { + reachedHWM, + blocked, + strategy: this.storeOptions.strategy + }; + } + + async __free__(index, weight) { + await this.yieldLoop(); + this._running -= weight; + this._done += weight; + this.instance._drainAll(this.computeCapacity()); + return { + running: this._running + }; + } -function is_WHITE_SPACE(c) { - return (c === 0x09/* Tab */) || (c === 0x20/* Space */); -} + }; -function is_WS_OR_EOL(c) { - return (c === 0x09/* Tab */) || - (c === 0x20/* Space */) || - (c === 0x0A/* LF */) || - (c === 0x0D/* CR */); -} + var LocalDatastore_1 = LocalDatastore; + + var BottleneckError$3, States; + + BottleneckError$3 = BottleneckError_1; + + States = class States { + constructor(status1) { + this.status = status1; + this._jobs = {}; + this.counts = this.status.map(function() { + return 0; + }); + } + + next(id) { + var current, next; + current = this._jobs[id]; + next = current + 1; + if ((current != null) && next < this.status.length) { + this.counts[current]--; + this.counts[next]++; + return this._jobs[id]++; + } else if (current != null) { + this.counts[current]--; + return delete this._jobs[id]; + } + } + + start(id) { + var initial; + initial = 0; + this._jobs[id] = initial; + return this.counts[initial]++; + } + + remove(id) { + var current; + current = this._jobs[id]; + if (current != null) { + this.counts[current]--; + delete this._jobs[id]; + } + return current != null; + } + + jobStatus(id) { + var ref; + return (ref = this.status[this._jobs[id]]) != null ? ref : null; + } + + statusJobs(status) { + var k, pos, ref, results, v; + if (status != null) { + pos = this.status.indexOf(status); + if (pos < 0) { + throw new BottleneckError$3(`status must be one of ${this.status.join(', ')}`); + } + ref = this._jobs; + results = []; + for (k in ref) { + v = ref[k]; + if (v === pos) { + results.push(k); + } + } + return results; + } else { + return Object.keys(this._jobs); + } + } + + statusCounts() { + return this.counts.reduce(((acc, v, i) => { + acc[this.status[i]] = v; + return acc; + }), {}); + } -function is_FLOW_INDICATOR(c) { - return c === 0x2C/* , */ || - c === 0x5B/* [ */ || - c === 0x5D/* ] */ || - c === 0x7B/* { */ || - c === 0x7D/* } */; -} + }; -function fromHexCode(c) { - var lc; + var States_1 = States; + + var DLList$2, Sync; + + DLList$2 = DLList_1; + + Sync = class Sync { + constructor(name, Promise) { + this.schedule = this.schedule.bind(this); + this.name = name; + this.Promise = Promise; + this._running = 0; + this._queue = new DLList$2(); + } + + isEmpty() { + return this._queue.length === 0; + } + + async _tryToRun() { + var args, cb, error, reject, resolve, returned, task; + if ((this._running < 1) && this._queue.length > 0) { + this._running++; + ({task, args, resolve, reject} = this._queue.shift()); + cb = (await (async function() { + try { + returned = (await task(...args)); + return function() { + return resolve(returned); + }; + } catch (error1) { + error = error1; + return function() { + return reject(error); + }; + } + })()); + this._running--; + this._tryToRun(); + return cb(); + } + } + + schedule(task, ...args) { + var promise, reject, resolve; + resolve = reject = null; + promise = new this.Promise(function(_resolve, _reject) { + resolve = _resolve; + return reject = _reject; + }); + this._queue.push({task, args, resolve, reject}); + this._tryToRun(); + return promise; + } - if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) { - return c - 0x30; - } + }; - /*eslint-disable no-bitwise*/ - lc = c | 0x20; + var Sync_1 = Sync; - if ((0x61/* a */ <= lc) && (lc <= 0x66/* f */)) { - return lc - 0x61 + 10; - } + var version = "2.19.5"; + var version$1 = { + version: version + }; - return -1; -} + var version$2 = /*#__PURE__*/Object.freeze({ + version: version, + default: version$1 + }); -function escapedHexLen(c) { - if (c === 0x78/* x */) { return 2; } - if (c === 0x75/* u */) { return 4; } - if (c === 0x55/* U */) { return 8; } - return 0; -} + var require$$2 = () => console.log('You must import the full version of Bottleneck in order to use this feature.'); + + var require$$3 = () => console.log('You must import the full version of Bottleneck in order to use this feature.'); + + var require$$4 = () => console.log('You must import the full version of Bottleneck in order to use this feature.'); + + var Events$2, Group, IORedisConnection$1, RedisConnection$1, Scripts$1, parser$3; + + parser$3 = parser; + + Events$2 = Events_1; + + RedisConnection$1 = require$$2; + + IORedisConnection$1 = require$$3; + + Scripts$1 = require$$4; + + Group = (function() { + class Group { + constructor(limiterOptions = {}) { + this.deleteKey = this.deleteKey.bind(this); + this.limiterOptions = limiterOptions; + parser$3.load(this.limiterOptions, this.defaults, this); + this.Events = new Events$2(this); + this.instances = {}; + this.Bottleneck = Bottleneck_1; + this._startAutoCleanup(); + this.sharedConnection = this.connection != null; + if (this.connection == null) { + if (this.limiterOptions.datastore === "redis") { + this.connection = new RedisConnection$1(Object.assign({}, this.limiterOptions, {Events: this.Events})); + } else if (this.limiterOptions.datastore === "ioredis") { + this.connection = new IORedisConnection$1(Object.assign({}, this.limiterOptions, {Events: this.Events})); + } + } + } + + key(key = "") { + var ref; + return (ref = this.instances[key]) != null ? ref : (() => { + var limiter; + limiter = this.instances[key] = new this.Bottleneck(Object.assign(this.limiterOptions, { + id: `${this.id}-${key}`, + timeout: this.timeout, + connection: this.connection + })); + this.Events.trigger("created", limiter, key); + return limiter; + })(); + } + + async deleteKey(key = "") { + var deleted, instance; + instance = this.instances[key]; + if (this.connection) { + deleted = (await this.connection.__runCommand__(['del', ...Scripts$1.allKeys(`${this.id}-${key}`)])); + } + if (instance != null) { + delete this.instances[key]; + await instance.disconnect(); + } + return (instance != null) || deleted > 0; + } + + limiters() { + var k, ref, results, v; + ref = this.instances; + results = []; + for (k in ref) { + v = ref[k]; + results.push({ + key: k, + limiter: v + }); + } + return results; + } + + keys() { + return Object.keys(this.instances); + } + + async clusterKeys() { + var cursor, end, found, i, k, keys, len, next, start; + if (this.connection == null) { + return this.Promise.resolve(this.keys()); + } + keys = []; + cursor = null; + start = `b_${this.id}-`.length; + end = "_settings".length; + while (cursor !== 0) { + [next, found] = (await this.connection.__runCommand__(["scan", cursor != null ? cursor : 0, "match", `b_${this.id}-*_settings`, "count", 10000])); + cursor = ~~next; + for (i = 0, len = found.length; i < len; i++) { + k = found[i]; + keys.push(k.slice(start, -end)); + } + } + return keys; + } + + _startAutoCleanup() { + var base; + clearInterval(this.interval); + return typeof (base = (this.interval = setInterval(async() => { + var e, k, ref, results, time, v; + time = Date.now(); + ref = this.instances; + results = []; + for (k in ref) { + v = ref[k]; + try { + if ((await v._store.__groupCheck__(time))) { + results.push(this.deleteKey(k)); + } else { + results.push(void 0); + } + } catch (error) { + e = error; + results.push(v.Events.trigger("error", e)); + } + } + return results; + }, this.timeout / 2))).unref === "function" ? base.unref() : void 0; + } + + updateSettings(options = {}) { + parser$3.overwrite(options, this.defaults, this); + parser$3.overwrite(options, options, this.limiterOptions); + if (options.timeout != null) { + return this._startAutoCleanup(); + } + } + + disconnect(flush = true) { + var ref; + if (!this.sharedConnection) { + return (ref = this.connection) != null ? ref.disconnect(flush) : void 0; + } + } + + } + Group.prototype.defaults = { + timeout: 1000 * 60 * 5, + connection: null, + Promise: Promise, + id: "group-key" + }; + + return Group; + + }).call(commonjsGlobal); + + var Group_1 = Group; + + var Batcher, Events$3, parser$4; + + parser$4 = parser; + + Events$3 = Events_1; + + Batcher = (function() { + class Batcher { + constructor(options = {}) { + this.options = options; + parser$4.load(this.options, this.defaults, this); + this.Events = new Events$3(this); + this._arr = []; + this._resetPromise(); + this._lastFlush = Date.now(); + } + + _resetPromise() { + return this._promise = new this.Promise((res, rej) => { + return this._resolve = res; + }); + } + + _flush() { + clearTimeout(this._timeout); + this._lastFlush = Date.now(); + this._resolve(); + this.Events.trigger("batch", this._arr); + this._arr = []; + return this._resetPromise(); + } + + add(data) { + var ret; + this._arr.push(data); + ret = this._promise; + if (this._arr.length === this.maxSize) { + this._flush(); + } else if ((this.maxTime != null) && this._arr.length === 1) { + this._timeout = setTimeout(() => { + return this._flush(); + }, this.maxTime); + } + return ret; + } + + } + Batcher.prototype.defaults = { + maxTime: null, + maxSize: null, + Promise: Promise + }; + + return Batcher; + + }).call(commonjsGlobal); + + var Batcher_1 = Batcher; + + var require$$4$1 = () => console.log('You must import the full version of Bottleneck in order to use this feature.'); + + var require$$8 = getCjsExportFromNamespace(version$2); + + var Bottleneck, DEFAULT_PRIORITY$1, Events$4, Job$1, LocalDatastore$1, NUM_PRIORITIES$1, Queues$1, RedisDatastore$1, States$1, Sync$1, parser$5, + splice = [].splice; + + NUM_PRIORITIES$1 = 10; + + DEFAULT_PRIORITY$1 = 5; + + parser$5 = parser; + + Queues$1 = Queues_1; + + Job$1 = Job_1; + + LocalDatastore$1 = LocalDatastore_1; + + RedisDatastore$1 = require$$4$1; + + Events$4 = Events_1; + + States$1 = States_1; + + Sync$1 = Sync_1; + + Bottleneck = (function() { + class Bottleneck { + constructor(options = {}, ...invalid) { + var storeInstanceOptions, storeOptions; + this._addToQueue = this._addToQueue.bind(this); + this._validateOptions(options, invalid); + parser$5.load(options, this.instanceDefaults, this); + this._queues = new Queues$1(NUM_PRIORITIES$1); + this._scheduled = {}; + this._states = new States$1(["RECEIVED", "QUEUED", "RUNNING", "EXECUTING"].concat(this.trackDoneStatus ? ["DONE"] : [])); + this._limiter = null; + this.Events = new Events$4(this); + this._submitLock = new Sync$1("submit", this.Promise); + this._registerLock = new Sync$1("register", this.Promise); + storeOptions = parser$5.load(options, this.storeDefaults, {}); + this._store = (function() { + if (this.datastore === "redis" || this.datastore === "ioredis" || (this.connection != null)) { + storeInstanceOptions = parser$5.load(options, this.redisStoreDefaults, {}); + return new RedisDatastore$1(this, storeOptions, storeInstanceOptions); + } else if (this.datastore === "local") { + storeInstanceOptions = parser$5.load(options, this.localStoreDefaults, {}); + return new LocalDatastore$1(this, storeOptions, storeInstanceOptions); + } else { + throw new Bottleneck.prototype.BottleneckError(`Invalid datastore type: ${this.datastore}`); + } + }).call(this); + this._queues.on("leftzero", () => { + var ref; + return (ref = this._store.heartbeat) != null ? typeof ref.ref === "function" ? ref.ref() : void 0 : void 0; + }); + this._queues.on("zero", () => { + var ref; + return (ref = this._store.heartbeat) != null ? typeof ref.unref === "function" ? ref.unref() : void 0 : void 0; + }); + } + + _validateOptions(options, invalid) { + if (!((options != null) && typeof options === "object" && invalid.length === 0)) { + throw new Bottleneck.prototype.BottleneckError("Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1."); + } + } + + ready() { + return this._store.ready; + } + + clients() { + return this._store.clients; + } + + channel() { + return `b_${this.id}`; + } + + channel_client() { + return `b_${this.id}_${this._store.clientId}`; + } + + publish(message) { + return this._store.__publish__(message); + } + + disconnect(flush = true) { + return this._store.__disconnect__(flush); + } + + chain(_limiter) { + this._limiter = _limiter; + return this; + } + + queued(priority) { + return this._queues.queued(priority); + } + + clusterQueued() { + return this._store.__queued__(); + } + + empty() { + return this.queued() === 0 && this._submitLock.isEmpty(); + } + + running() { + return this._store.__running__(); + } + + done() { + return this._store.__done__(); + } + + jobStatus(id) { + return this._states.jobStatus(id); + } + + jobs(status) { + return this._states.statusJobs(status); + } + + counts() { + return this._states.statusCounts(); + } + + _randomIndex() { + return Math.random().toString(36).slice(2); + } + + check(weight = 1) { + return this._store.__check__(weight); + } + + _clearGlobalState(index) { + if (this._scheduled[index] != null) { + clearTimeout(this._scheduled[index].expiration); + delete this._scheduled[index]; + return true; + } else { + return false; + } + } + + async _free(index, job, options, eventInfo) { + var e, running; + try { + ({running} = (await this._store.__free__(index, options.weight))); + this.Events.trigger("debug", `Freed ${options.id}`, eventInfo); + if (running === 0 && this.empty()) { + return this.Events.trigger("idle"); + } + } catch (error1) { + e = error1; + return this.Events.trigger("error", e); + } + } + + _run(index, job, wait) { + var clearGlobalState, free, run; + job.doRun(); + clearGlobalState = this._clearGlobalState.bind(this, index); + run = this._run.bind(this, index, job); + free = this._free.bind(this, index, job); + return this._scheduled[index] = { + timeout: setTimeout(() => { + return job.doExecute(this._limiter, clearGlobalState, run, free); + }, wait), + expiration: job.options.expiration != null ? setTimeout(function() { + return job.doExpire(clearGlobalState, run, free); + }, wait + job.options.expiration) : void 0, + job: job + }; + } + + _drainOne(capacity) { + return this._registerLock.schedule(() => { + var args, index, next, options, queue; + if (this.queued() === 0) { + return this.Promise.resolve(null); + } + queue = this._queues.getFirst(); + ({options, args} = next = queue.first()); + if ((capacity != null) && options.weight > capacity) { + return this.Promise.resolve(null); + } + this.Events.trigger("debug", `Draining ${options.id}`, {args, options}); + index = this._randomIndex(); + return this._store.__register__(index, options.weight, options.expiration).then(({success, wait, reservoir}) => { + var empty; + this.Events.trigger("debug", `Drained ${options.id}`, {success, args, options}); + if (success) { + queue.shift(); + empty = this.empty(); + if (empty) { + this.Events.trigger("empty"); + } + if (reservoir === 0) { + this.Events.trigger("depleted", empty); + } + this._run(index, next, wait); + return this.Promise.resolve(options.weight); + } else { + return this.Promise.resolve(null); + } + }); + }); + } + + _drainAll(capacity, total = 0) { + return this._drainOne(capacity).then((drained) => { + var newCapacity; + if (drained != null) { + newCapacity = capacity != null ? capacity - drained : capacity; + return this._drainAll(newCapacity, total + drained); + } else { + return this.Promise.resolve(total); + } + }).catch((e) => { + return this.Events.trigger("error", e); + }); + } + + _dropAllQueued(message) { + return this._queues.shiftAll(function(job) { + return job.doDrop({message}); + }); + } + + stop(options = {}) { + var done, waitForExecuting; + options = parser$5.load(options, this.stopDefaults); + waitForExecuting = (at) => { + var finished; + finished = () => { + var counts; + counts = this._states.counts; + return (counts[0] + counts[1] + counts[2] + counts[3]) === at; + }; + return new this.Promise((resolve, reject) => { + if (finished()) { + return resolve(); + } else { + return this.on("done", () => { + if (finished()) { + this.removeAllListeners("done"); + return resolve(); + } + }); + } + }); + }; + done = options.dropWaitingJobs ? (this._run = function(index, next) { + return next.doDrop({ + message: options.dropErrorMessage + }); + }, this._drainOne = () => { + return this.Promise.resolve(null); + }, this._registerLock.schedule(() => { + return this._submitLock.schedule(() => { + var k, ref, v; + ref = this._scheduled; + for (k in ref) { + v = ref[k]; + if (this.jobStatus(v.job.options.id) === "RUNNING") { + clearTimeout(v.timeout); + clearTimeout(v.expiration); + v.job.doDrop({ + message: options.dropErrorMessage + }); + } + } + this._dropAllQueued(options.dropErrorMessage); + return waitForExecuting(0); + }); + })) : this.schedule({ + priority: NUM_PRIORITIES$1 - 1, + weight: 0 + }, () => { + return waitForExecuting(1); + }); + this._receive = function(job) { + return job._reject(new Bottleneck.prototype.BottleneckError(options.enqueueErrorMessage)); + }; + this.stop = () => { + return this.Promise.reject(new Bottleneck.prototype.BottleneckError("stop() has already been called")); + }; + return done; + } + + async _addToQueue(job) { + var args, blocked, error, options, reachedHWM, shifted, strategy; + ({args, options} = job); + try { + ({reachedHWM, blocked, strategy} = (await this._store.__submit__(this.queued(), options.weight))); + } catch (error1) { + error = error1; + this.Events.trigger("debug", `Could not queue ${options.id}`, {args, options, error}); + job.doDrop({error}); + return false; + } + if (blocked) { + job.doDrop(); + return true; + } else if (reachedHWM) { + shifted = strategy === Bottleneck.prototype.strategy.LEAK ? this._queues.shiftLastFrom(options.priority) : strategy === Bottleneck.prototype.strategy.OVERFLOW_PRIORITY ? this._queues.shiftLastFrom(options.priority + 1) : strategy === Bottleneck.prototype.strategy.OVERFLOW ? job : void 0; + if (shifted != null) { + shifted.doDrop(); + } + if ((shifted == null) || strategy === Bottleneck.prototype.strategy.OVERFLOW) { + if (shifted == null) { + job.doDrop(); + } + return reachedHWM; + } + } + job.doQueue(reachedHWM, blocked); + this._queues.push(job); + await this._drainAll(); + return reachedHWM; + } + + _receive(job) { + if (this._states.jobStatus(job.options.id) != null) { + job._reject(new Bottleneck.prototype.BottleneckError(`A job with the same id already exists (id=${job.options.id})`)); + return false; + } else { + job.doReceive(); + return this._submitLock.schedule(this._addToQueue, job); + } + } + + submit(...args) { + var cb, fn, job, options, ref, ref1, task; + if (typeof args[0] === "function") { + ref = args, [fn, ...args] = ref, [cb] = splice.call(args, -1); + options = parser$5.load({}, this.jobDefaults); + } else { + ref1 = args, [options, fn, ...args] = ref1, [cb] = splice.call(args, -1); + options = parser$5.load(options, this.jobDefaults); + } + task = (...args) => { + return new this.Promise(function(resolve, reject) { + return fn(...args, function(...args) { + return (args[0] != null ? reject : resolve)(args); + }); + }); + }; + job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise); + job.promise.then(function(args) { + return typeof cb === "function" ? cb(...args) : void 0; + }).catch(function(args) { + if (Array.isArray(args)) { + return typeof cb === "function" ? cb(...args) : void 0; + } else { + return typeof cb === "function" ? cb(args) : void 0; + } + }); + return this._receive(job); + } + + schedule(...args) { + var job, options, task; + if (typeof args[0] === "function") { + [task, ...args] = args; + options = {}; + } else { + [options, task, ...args] = args; + } + job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise); + this._receive(job); + return job.promise; + } + + wrap(fn) { + var schedule, wrapped; + schedule = this.schedule.bind(this); + wrapped = function(...args) { + return schedule(fn.bind(this), ...args); + }; + wrapped.withOptions = function(options, ...args) { + return schedule(options, fn, ...args); + }; + return wrapped; + } + + async updateSettings(options = {}) { + await this._store.__updateSettings__(parser$5.overwrite(options, this.storeDefaults)); + parser$5.overwrite(options, this.instanceDefaults, this); + return this; + } + + currentReservoir() { + return this._store.__currentReservoir__(); + } + + incrementReservoir(incr = 0) { + return this._store.__incrementReservoir__(incr); + } + + } + Bottleneck.default = Bottleneck; + + Bottleneck.Events = Events$4; + + Bottleneck.version = Bottleneck.prototype.version = require$$8.version; + + Bottleneck.strategy = Bottleneck.prototype.strategy = { + LEAK: 1, + OVERFLOW: 2, + OVERFLOW_PRIORITY: 4, + BLOCK: 3 + }; + + Bottleneck.BottleneckError = Bottleneck.prototype.BottleneckError = BottleneckError_1; + + Bottleneck.Group = Bottleneck.prototype.Group = Group_1; + + Bottleneck.RedisConnection = Bottleneck.prototype.RedisConnection = require$$2; + + Bottleneck.IORedisConnection = Bottleneck.prototype.IORedisConnection = require$$3; + + Bottleneck.Batcher = Bottleneck.prototype.Batcher = Batcher_1; + + Bottleneck.prototype.jobDefaults = { + priority: DEFAULT_PRIORITY$1, + weight: 1, + expiration: null, + id: "" + }; + + Bottleneck.prototype.storeDefaults = { + maxConcurrent: null, + minTime: 0, + highWater: null, + strategy: Bottleneck.prototype.strategy.LEAK, + penalty: null, + reservoir: null, + reservoirRefreshInterval: null, + reservoirRefreshAmount: null, + reservoirIncreaseInterval: null, + reservoirIncreaseAmount: null, + reservoirIncreaseMaximum: null + }; + + Bottleneck.prototype.localStoreDefaults = { + Promise: Promise, + timeout: null, + heartbeatInterval: 250 + }; + + Bottleneck.prototype.redisStoreDefaults = { + Promise: Promise, + timeout: null, + heartbeatInterval: 5000, + clientTimeout: 10000, + Redis: null, + clientOptions: {}, + clusterNodes: null, + clearDatastore: false, + connection: null + }; + + Bottleneck.prototype.instanceDefaults = { + datastore: "local", + connection: null, + id: "", + rejectOnDrop: true, + trackDoneStatus: false, + Promise: Promise + }; + + Bottleneck.prototype.stopDefaults = { + enqueueErrorMessage: "This limiter has been stopped and cannot accept new jobs.", + dropWaitingJobs: true, + dropErrorMessage: "This limiter has been stopped." + }; + + return Bottleneck; -function fromDecimalCode(c) { - if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) { - return c - 0x30; - } + }).call(commonjsGlobal); - return -1; -} + var Bottleneck_1 = Bottleneck; -function simpleEscapeSequence(c) { - /* eslint-disable indent */ - return (c === 0x30/* 0 */) ? '\x00' : - (c === 0x61/* a */) ? '\x07' : - (c === 0x62/* b */) ? '\x08' : - (c === 0x74/* t */) ? '\x09' : - (c === 0x09/* Tab */) ? '\x09' : - (c === 0x6E/* n */) ? '\x0A' : - (c === 0x76/* v */) ? '\x0B' : - (c === 0x66/* f */) ? '\x0C' : - (c === 0x72/* r */) ? '\x0D' : - (c === 0x65/* e */) ? '\x1B' : - (c === 0x20/* Space */) ? ' ' : - (c === 0x22/* " */) ? '\x22' : - (c === 0x2F/* / */) ? '/' : - (c === 0x5C/* \ */) ? '\x5C' : - (c === 0x4E/* N */) ? '\x85' : - (c === 0x5F/* _ */) ? '\xA0' : - (c === 0x4C/* L */) ? '\u2028' : - (c === 0x50/* P */) ? '\u2029' : ''; -} + var lib = Bottleneck_1; -function charFromCodepoint(c) { - if (c <= 0xFFFF) { - return String.fromCharCode(c); - } - // Encode UTF-16 surrogate pair - // https://en.wikipedia.org/wiki/UTF-16#Code_points_U.2B010000_to_U.2B10FFFF - return String.fromCharCode( - ((c - 0x010000) >> 10) + 0xD800, - ((c - 0x010000) & 0x03FF) + 0xDC00 - ); -} + return lib; -var simpleEscapeCheck = new Array(256); // integer, for fast access -var simpleEscapeMap = new Array(256); -for (var i = 0; i < 256; i++) { - simpleEscapeCheck[i] = simpleEscapeSequence(i) ? 1 : 0; - simpleEscapeMap[i] = simpleEscapeSequence(i); -} +}))); -function State(input, options) { - this.input = input; +/***/ }), - this.filename = options['filename'] || null; - this.schema = options['schema'] || DEFAULT_SCHEMA; - this.onWarning = options['onWarning'] || null; - // (Hidden) Remove? makes the loader to expect YAML 1.1 documents - // if such documents have no explicit %YAML directive - this.legacy = options['legacy'] || false; +/***/ 8932: +/***/ ((__unused_webpack_module, exports) => { - this.json = options['json'] || false; - this.listener = options['listener'] || null; +"use strict"; - this.implicitTypes = this.schema.compiledImplicit; - this.typeMap = this.schema.compiledTypeMap; - this.length = input.length; - this.position = 0; - this.line = 0; - this.lineStart = 0; - this.lineIndent = 0; +Object.defineProperty(exports, "__esModule", ({ value: true })); + +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) - // position of first leading tab in the current line, - // used to make sure there are no tabs in the indentation - this.firstTabInLine = -1; + /* istanbul ignore next */ - this.documents = []; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } - /* - this.version; - this.checkLineBreaks; - this.tagMap; - this.anchorMap; - this.tag; - this.anchor; - this.kind; - this.result;*/ + this.name = 'Deprecation'; + } } +exports.Deprecation = Deprecation; -function generateError(state, message) { - var mark = { - name: state.filename, - buffer: state.input.slice(0, -1), // omit trailing \0 - position: state.position, - line: state.line, - column: state.position - state.lineStart - }; - mark.snippet = makeSnippet(mark); +/***/ }), - return new YAMLException(message, mark); -} +/***/ 2437: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function throwError(state, message) { - throw generateError(state, message); -} +/* @flow */ +/*:: -function throwWarning(state, message) { - if (state.onWarning) { - state.onWarning.call(null, generateError(state, message)); - } +type DotenvParseOptions = { + debug?: boolean } +// keys and values from src +type DotenvParseOutput = { [string]: string } -var directiveHandlers = { +type DotenvConfigOptions = { + path?: string, // path to .env file + encoding?: string, // encoding of .env file + debug?: string // turn on logging for debugging purposes +} - YAML: function handleYamlDirective(state, name, args) { +type DotenvConfigOutput = { + parsed?: DotenvParseOutput, + error?: Error +} - var match, major, minor; +*/ - if (state.version !== null) { - throwError(state, 'duplication of %YAML directive'); - } +const fs = __nccwpck_require__(5747) +const path = __nccwpck_require__(5622) - if (args.length !== 1) { - throwError(state, 'YAML directive accepts exactly one argument'); - } +function log (message /*: string */) { + console.log(`[dotenv][DEBUG] ${message}`) +} - match = /^([0-9]+)\.([0-9]+)$/.exec(args[0]); +const NEWLINE = '\n' +const RE_INI_KEY_VAL = /^\s*([\w.-]+)\s*=\s*(.*)?\s*$/ +const RE_NEWLINES = /\\n/g +const NEWLINES_MATCH = /\n|\r|\r\n/ - if (match === null) { - throwError(state, 'ill-formed argument of the YAML directive'); - } +// Parses src into an Object +function parse (src /*: string | Buffer */, options /*: ?DotenvParseOptions */) /*: DotenvParseOutput */ { + const debug = Boolean(options && options.debug) + const obj = {} - major = parseInt(match[1], 10); - minor = parseInt(match[2], 10); + // convert Buffers before splitting into lines and processing + src.toString().split(NEWLINES_MATCH).forEach(function (line, idx) { + // matching "KEY' and 'VAL' in 'KEY=VAL' + const keyValueArr = line.match(RE_INI_KEY_VAL) + // matched? + if (keyValueArr != null) { + const key = keyValueArr[1] + // default undefined or missing values to empty string + let val = (keyValueArr[2] || '') + const end = val.length - 1 + const isDoubleQuoted = val[0] === '"' && val[end] === '"' + const isSingleQuoted = val[0] === "'" && val[end] === "'" - if (major !== 1) { - throwError(state, 'unacceptable YAML version of the document'); - } + // if single or double quoted, remove quotes + if (isSingleQuoted || isDoubleQuoted) { + val = val.substring(1, end) - state.version = args[0]; - state.checkLineBreaks = (minor < 2); + // if double quoted, expand newlines + if (isDoubleQuoted) { + val = val.replace(RE_NEWLINES, NEWLINE) + } + } else { + // remove surrounding whitespace + val = val.trim() + } - if (minor !== 1 && minor !== 2) { - throwWarning(state, 'unsupported YAML version of the document'); + obj[key] = val + } else if (debug) { + log(`did not match key and value when parsing line ${idx + 1}: ${line}`) } - }, + }) - TAG: function handleTagDirective(state, name, args) { + return obj +} - var handle, prefix; +// Populates process.env from .env file +function config (options /*: ?DotenvConfigOptions */) /*: DotenvConfigOutput */ { + let dotenvPath = path.resolve(process.cwd(), '.env') + let encoding /*: string */ = 'utf8' + let debug = false - if (args.length !== 2) { - throwError(state, 'TAG directive accepts exactly two arguments'); + if (options) { + if (options.path != null) { + dotenvPath = options.path } - - handle = args[0]; - prefix = args[1]; - - if (!PATTERN_TAG_HANDLE.test(handle)) { - throwError(state, 'ill-formed tag handle (first argument) of the TAG directive'); + if (options.encoding != null) { + encoding = options.encoding } - - if (_hasOwnProperty.call(state.tagMap, handle)) { - throwError(state, 'there is a previously declared suffix for "' + handle + '" tag handle'); + if (options.debug != null) { + debug = true } + } - if (!PATTERN_TAG_URI.test(prefix)) { - throwError(state, 'ill-formed tag prefix (second argument) of the TAG directive'); - } + try { + // specifying an encoding returns a string instead of a buffer + const parsed = parse(fs.readFileSync(dotenvPath, { encoding }), { debug }) - try { - prefix = decodeURIComponent(prefix); - } catch (err) { - throwError(state, 'tag prefix is malformed: ' + prefix); - } + Object.keys(parsed).forEach(function (key) { + if (!Object.prototype.hasOwnProperty.call(process.env, key)) { + process.env[key] = parsed[key] + } else if (debug) { + log(`"${key}" is already defined in \`process.env\` and will not be overwritten`) + } + }) - state.tagMap[handle] = prefix; + return { parsed } + } catch (e) { + return { error: e } } -}; +} +module.exports.config = config +module.exports.parse = parse -function captureSegment(state, start, end, checkJson) { - var _position, _length, _character, _result; - if (start < end) { - _result = state.input.slice(start, end); +/***/ }), - if (checkJson) { - for (_position = 0, _length = _result.length; _position < _length; _position += 1) { - _character = _result.charCodeAt(_position); - if (!(_character === 0x09 || - (0x20 <= _character && _character <= 0x10FFFF))) { - throwError(state, 'expected valid JSON character'); - } - } - } else if (PATTERN_NON_PRINTABLE.test(_result)) { - throwError(state, 'the stream contains non-printable characters'); - } +/***/ 3338: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - state.result += _result; - } -} +"use strict"; -function mergeMappings(state, destination, source, overridableKeys) { - var sourceKeys, key, index, quantity; - if (!common.isObject(source)) { - throwError(state, 'cannot merge mappings; the provided source object is unacceptable'); - } +const fs = __nccwpck_require__(7758) +const path = __nccwpck_require__(5622) +const mkdirsSync = __nccwpck_require__(2915).mkdirsSync +const utimesMillisSync = __nccwpck_require__(2548).utimesMillisSync +const stat = __nccwpck_require__(3901) - sourceKeys = Object.keys(source); +function copySync (src, dest, opts) { + if (typeof opts === 'function') { + opts = { filter: opts } + } - for (index = 0, quantity = sourceKeys.length; index < quantity; index += 1) { - key = sourceKeys[index]; + opts = opts || {} + opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now + opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - if (!_hasOwnProperty.call(destination, key)) { - destination[key] = source[key]; - overridableKeys[key] = true; - } + // Warn about using preserveTimestamps on 32-bit node + if (opts.preserveTimestamps && process.arch === 'ia32') { + console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n + see https://github.com/jprichardson/node-fs-extra/issues/269`) } + + const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy', opts) + stat.checkParentPathsSync(src, srcStat, dest, 'copy') + return handleFilterAndCopy(destStat, src, dest, opts) } -function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, - startLine, startLineStart, startPos) { +function handleFilterAndCopy (destStat, src, dest, opts) { + if (opts.filter && !opts.filter(src, dest)) return + const destParent = path.dirname(dest) + if (!fs.existsSync(destParent)) mkdirsSync(destParent) + return getStats(destStat, src, dest, opts) +} - var index, quantity; +function startCopy (destStat, src, dest, opts) { + if (opts.filter && !opts.filter(src, dest)) return + return getStats(destStat, src, dest, opts) +} - // The output is a plain object here, so keys can only be strings. - // We need to convert keyNode to a string, but doing so can hang the process - // (deeply nested arrays that explode exponentially using aliases). - if (Array.isArray(keyNode)) { - keyNode = Array.prototype.slice.call(keyNode); +function getStats (destStat, src, dest, opts) { + const statSync = opts.dereference ? fs.statSync : fs.lstatSync + const srcStat = statSync(src) - for (index = 0, quantity = keyNode.length; index < quantity; index += 1) { - if (Array.isArray(keyNode[index])) { - throwError(state, 'nested arrays are not supported inside keys'); - } + if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts) + else if (srcStat.isFile() || + srcStat.isCharacterDevice() || + srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts) + else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts) + else if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`) + else if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`) + throw new Error(`Unknown file: ${src}`) +} - if (typeof keyNode === 'object' && _class(keyNode[index]) === '[object Object]') { - keyNode[index] = '[object Object]'; - } - } - } +function onFile (srcStat, destStat, src, dest, opts) { + if (!destStat) return copyFile(srcStat, src, dest, opts) + return mayCopyFile(srcStat, src, dest, opts) +} - // Avoid code execution in load() via toString property - // (still use its own toString for arrays, timestamps, - // and whatever user schema extensions happen to have @@toStringTag) - if (typeof keyNode === 'object' && _class(keyNode) === '[object Object]') { - keyNode = '[object Object]'; +function mayCopyFile (srcStat, src, dest, opts) { + if (opts.overwrite) { + fs.unlinkSync(dest) + return copyFile(srcStat, src, dest, opts) + } else if (opts.errorOnExist) { + throw new Error(`'${dest}' already exists`) } +} +function copyFile (srcStat, src, dest, opts) { + fs.copyFileSync(src, dest) + if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest) + return setDestMode(dest, srcStat.mode) +} - keyNode = String(keyNode); +function handleTimestamps (srcMode, src, dest) { + // Make sure the file is writable before setting the timestamp + // otherwise open fails with EPERM when invoked with 'r+' + // (through utimes call) + if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode) + return setDestTimestamps(src, dest) +} - if (_result === null) { - _result = {}; - } +function fileIsNotWritable (srcMode) { + return (srcMode & 0o200) === 0 +} - if (keyTag === 'tag:yaml.org,2002:merge') { - if (Array.isArray(valueNode)) { - for (index = 0, quantity = valueNode.length; index < quantity; index += 1) { - mergeMappings(state, _result, valueNode[index], overridableKeys); - } - } else { - mergeMappings(state, _result, valueNode, overridableKeys); - } - } else { - if (!state.json && - !_hasOwnProperty.call(overridableKeys, keyNode) && - _hasOwnProperty.call(_result, keyNode)) { - state.line = startLine || state.line; - state.lineStart = startLineStart || state.lineStart; - state.position = startPos || state.position; - throwError(state, 'duplicated mapping key'); - } +function makeFileWritable (dest, srcMode) { + return setDestMode(dest, srcMode | 0o200) +} - // used for this specific key only because Object.defineProperty is slow - if (keyNode === '__proto__') { - Object.defineProperty(_result, keyNode, { - configurable: true, - enumerable: true, - writable: true, - value: valueNode - }); - } else { - _result[keyNode] = valueNode; - } - delete overridableKeys[keyNode]; - } +function setDestMode (dest, srcMode) { + return fs.chmodSync(dest, srcMode) +} - return _result; +function setDestTimestamps (src, dest) { + // The initial srcStat.atime cannot be trusted + // because it is modified by the read(2) system call + // (See https://nodejs.org/api/fs.html#fs_stat_time_values) + const updatedSrcStat = fs.statSync(src) + return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime) } -function readLineBreak(state) { - var ch; +function onDir (srcStat, destStat, src, dest, opts) { + if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts) + return copyDir(src, dest, opts) +} - ch = state.input.charCodeAt(state.position); +function mkDirAndCopy (srcMode, src, dest, opts) { + fs.mkdirSync(dest) + copyDir(src, dest, opts) + return setDestMode(dest, srcMode) +} - if (ch === 0x0A/* LF */) { - state.position++; - } else if (ch === 0x0D/* CR */) { - state.position++; - if (state.input.charCodeAt(state.position) === 0x0A/* LF */) { - state.position++; - } - } else { - throwError(state, 'a line break is expected'); - } +function copyDir (src, dest, opts) { + fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts)) +} - state.line += 1; - state.lineStart = state.position; - state.firstTabInLine = -1; +function copyDirItem (item, src, dest, opts) { + const srcItem = path.join(src, item) + const destItem = path.join(dest, item) + const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy', opts) + return startCopy(destStat, srcItem, destItem, opts) } -function skipSeparationSpace(state, allowComments, checkIndent) { - var lineBreaks = 0, - ch = state.input.charCodeAt(state.position); +function onLink (destStat, src, dest, opts) { + let resolvedSrc = fs.readlinkSync(src) + if (opts.dereference) { + resolvedSrc = path.resolve(process.cwd(), resolvedSrc) + } - while (ch !== 0) { - while (is_WHITE_SPACE(ch)) { - if (ch === 0x09/* Tab */ && state.firstTabInLine === -1) { - state.firstTabInLine = state.position; - } - ch = state.input.charCodeAt(++state.position); + if (!destStat) { + return fs.symlinkSync(resolvedSrc, dest) + } else { + let resolvedDest + try { + resolvedDest = fs.readlinkSync(dest) + } catch (err) { + // dest exists and is a regular file or directory, + // Windows may throw UNKNOWN error. If dest already exists, + // fs throws error anyway, so no need to guard against it here. + if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest) + throw err } - - if (allowComments && ch === 0x23/* # */) { - do { - ch = state.input.charCodeAt(++state.position); - } while (ch !== 0x0A/* LF */ && ch !== 0x0D/* CR */ && ch !== 0); + if (opts.dereference) { + resolvedDest = path.resolve(process.cwd(), resolvedDest) } - - if (is_EOL(ch)) { - readLineBreak(state); - - ch = state.input.charCodeAt(state.position); - lineBreaks++; - state.lineIndent = 0; - - while (ch === 0x20/* Space */) { - state.lineIndent++; - ch = state.input.charCodeAt(++state.position); - } - } else { - break; + if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { + throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`) } - } - if (checkIndent !== -1 && lineBreaks !== 0 && state.lineIndent < checkIndent) { - throwWarning(state, 'deficient indentation'); + // prevent copy if src is a subdir of dest since unlinking + // dest in this case would result in removing src contents + // and therefore a broken symlink would be created. + if (fs.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { + throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`) + } + return copyLink(resolvedSrc, dest) } - - return lineBreaks; } -function testDocumentSeparator(state) { - var _position = state.position, - ch; +function copyLink (resolvedSrc, dest) { + fs.unlinkSync(dest) + return fs.symlinkSync(resolvedSrc, dest) +} - ch = state.input.charCodeAt(_position); +module.exports = copySync - // Condition state.position === state.lineStart is tested - // in parent on each call, for efficiency. No needs to test here again. - if ((ch === 0x2D/* - */ || ch === 0x2E/* . */) && - ch === state.input.charCodeAt(_position + 1) && - ch === state.input.charCodeAt(_position + 2)) { - _position += 3; +/***/ }), - ch = state.input.charCodeAt(_position); +/***/ 1135: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (ch === 0 || is_WS_OR_EOL(ch)) { - return true; - } - } +"use strict"; - return false; -} -function writeFoldedLines(state, count) { - if (count === 1) { - state.result += ' '; - } else if (count > 1) { - state.result += common.repeat('\n', count - 1); - } +module.exports = { + copySync: __nccwpck_require__(3338) } -function readPlainScalar(state, nodeIndent, withinFlowCollection) { - var preceding, - following, - captureStart, - captureEnd, - hasPendingContent, - _line, - _lineStart, - _lineIndent, - _kind = state.kind, - _result = state.result, - ch; - - ch = state.input.charCodeAt(state.position); +/***/ }), - if (is_WS_OR_EOL(ch) || - is_FLOW_INDICATOR(ch) || - ch === 0x23/* # */ || - ch === 0x26/* & */ || - ch === 0x2A/* * */ || - ch === 0x21/* ! */ || - ch === 0x7C/* | */ || - ch === 0x3E/* > */ || - ch === 0x27/* ' */ || - ch === 0x22/* " */ || - ch === 0x25/* % */ || - ch === 0x40/* @ */ || - ch === 0x60/* ` */) { - return false; - } +/***/ 8834: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (ch === 0x3F/* ? */ || ch === 0x2D/* - */) { - following = state.input.charCodeAt(state.position + 1); +"use strict"; - if (is_WS_OR_EOL(following) || - withinFlowCollection && is_FLOW_INDICATOR(following)) { - return false; - } + +const fs = __nccwpck_require__(7758) +const path = __nccwpck_require__(5622) +const mkdirs = __nccwpck_require__(2915).mkdirs +const pathExists = __nccwpck_require__(3835).pathExists +const utimesMillis = __nccwpck_require__(2548).utimesMillis +const stat = __nccwpck_require__(3901) + +function copy (src, dest, opts, cb) { + if (typeof opts === 'function' && !cb) { + cb = opts + opts = {} + } else if (typeof opts === 'function') { + opts = { filter: opts } } - state.kind = 'scalar'; - state.result = ''; - captureStart = captureEnd = state.position; - hasPendingContent = false; + cb = cb || function () {} + opts = opts || {} - while (ch !== 0) { - if (ch === 0x3A/* : */) { - following = state.input.charCodeAt(state.position + 1); + opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now + opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - if (is_WS_OR_EOL(following) || - withinFlowCollection && is_FLOW_INDICATOR(following)) { - break; - } + // Warn about using preserveTimestamps on 32-bit node + if (opts.preserveTimestamps && process.arch === 'ia32') { + console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n + see https://github.com/jprichardson/node-fs-extra/issues/269`) + } - } else if (ch === 0x23/* # */) { - preceding = state.input.charCodeAt(state.position - 1); + stat.checkPaths(src, dest, 'copy', opts, (err, stats) => { + if (err) return cb(err) + const { srcStat, destStat } = stats + stat.checkParentPaths(src, srcStat, dest, 'copy', err => { + if (err) return cb(err) + if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb) + return checkParentDir(destStat, src, dest, opts, cb) + }) + }) +} - if (is_WS_OR_EOL(preceding)) { - break; - } +function checkParentDir (destStat, src, dest, opts, cb) { + const destParent = path.dirname(dest) + pathExists(destParent, (err, dirExists) => { + if (err) return cb(err) + if (dirExists) return getStats(destStat, src, dest, opts, cb) + mkdirs(destParent, err => { + if (err) return cb(err) + return getStats(destStat, src, dest, opts, cb) + }) + }) +} - } else if ((state.position === state.lineStart && testDocumentSeparator(state)) || - withinFlowCollection && is_FLOW_INDICATOR(ch)) { - break; +function handleFilter (onInclude, destStat, src, dest, opts, cb) { + Promise.resolve(opts.filter(src, dest)).then(include => { + if (include) return onInclude(destStat, src, dest, opts, cb) + return cb() + }, error => cb(error)) +} - } else if (is_EOL(ch)) { - _line = state.line; - _lineStart = state.lineStart; - _lineIndent = state.lineIndent; - skipSeparationSpace(state, false, -1); +function startCopy (destStat, src, dest, opts, cb) { + if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb) + return getStats(destStat, src, dest, opts, cb) +} - if (state.lineIndent >= nodeIndent) { - hasPendingContent = true; - ch = state.input.charCodeAt(state.position); - continue; - } else { - state.position = captureEnd; - state.line = _line; - state.lineStart = _lineStart; - state.lineIndent = _lineIndent; - break; - } - } +function getStats (destStat, src, dest, opts, cb) { + const stat = opts.dereference ? fs.stat : fs.lstat + stat(src, (err, srcStat) => { + if (err) return cb(err) - if (hasPendingContent) { - captureSegment(state, captureStart, captureEnd, false); - writeFoldedLines(state, state.line - _line); - captureStart = captureEnd = state.position; - hasPendingContent = false; - } + if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb) + else if (srcStat.isFile() || + srcStat.isCharacterDevice() || + srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb) + else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb) + else if (srcStat.isSocket()) return cb(new Error(`Cannot copy a socket file: ${src}`)) + else if (srcStat.isFIFO()) return cb(new Error(`Cannot copy a FIFO pipe: ${src}`)) + return cb(new Error(`Unknown file: ${src}`)) + }) +} - if (!is_WHITE_SPACE(ch)) { - captureEnd = state.position + 1; - } +function onFile (srcStat, destStat, src, dest, opts, cb) { + if (!destStat) return copyFile(srcStat, src, dest, opts, cb) + return mayCopyFile(srcStat, src, dest, opts, cb) +} - ch = state.input.charCodeAt(++state.position); - } +function mayCopyFile (srcStat, src, dest, opts, cb) { + if (opts.overwrite) { + fs.unlink(dest, err => { + if (err) return cb(err) + return copyFile(srcStat, src, dest, opts, cb) + }) + } else if (opts.errorOnExist) { + return cb(new Error(`'${dest}' already exists`)) + } else return cb() +} - captureSegment(state, captureStart, captureEnd, false); +function copyFile (srcStat, src, dest, opts, cb) { + fs.copyFile(src, dest, err => { + if (err) return cb(err) + if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb) + return setDestMode(dest, srcStat.mode, cb) + }) +} - if (state.result) { - return true; +function handleTimestampsAndMode (srcMode, src, dest, cb) { + // Make sure the file is writable before setting the timestamp + // otherwise open fails with EPERM when invoked with 'r+' + // (through utimes call) + if (fileIsNotWritable(srcMode)) { + return makeFileWritable(dest, srcMode, err => { + if (err) return cb(err) + return setDestTimestampsAndMode(srcMode, src, dest, cb) + }) } + return setDestTimestampsAndMode(srcMode, src, dest, cb) +} - state.kind = _kind; - state.result = _result; - return false; +function fileIsNotWritable (srcMode) { + return (srcMode & 0o200) === 0 } -function readSingleQuotedScalar(state, nodeIndent) { - var ch, - captureStart, captureEnd; +function makeFileWritable (dest, srcMode, cb) { + return setDestMode(dest, srcMode | 0o200, cb) +} - ch = state.input.charCodeAt(state.position); +function setDestTimestampsAndMode (srcMode, src, dest, cb) { + setDestTimestamps(src, dest, err => { + if (err) return cb(err) + return setDestMode(dest, srcMode, cb) + }) +} - if (ch !== 0x27/* ' */) { - return false; - } +function setDestMode (dest, srcMode, cb) { + return fs.chmod(dest, srcMode, cb) +} - state.kind = 'scalar'; - state.result = ''; - state.position++; - captureStart = captureEnd = state.position; +function setDestTimestamps (src, dest, cb) { + // The initial srcStat.atime cannot be trusted + // because it is modified by the read(2) system call + // (See https://nodejs.org/api/fs.html#fs_stat_time_values) + fs.stat(src, (err, updatedSrcStat) => { + if (err) return cb(err) + return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb) + }) +} - while ((ch = state.input.charCodeAt(state.position)) !== 0) { - if (ch === 0x27/* ' */) { - captureSegment(state, captureStart, state.position, true); - ch = state.input.charCodeAt(++state.position); +function onDir (srcStat, destStat, src, dest, opts, cb) { + if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb) + return copyDir(src, dest, opts, cb) +} - if (ch === 0x27/* ' */) { - captureStart = state.position; - state.position++; - captureEnd = state.position; - } else { - return true; - } +function mkDirAndCopy (srcMode, src, dest, opts, cb) { + fs.mkdir(dest, err => { + if (err) return cb(err) + copyDir(src, dest, opts, err => { + if (err) return cb(err) + return setDestMode(dest, srcMode, cb) + }) + }) +} - } else if (is_EOL(ch)) { - captureSegment(state, captureStart, captureEnd, true); - writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); - captureStart = captureEnd = state.position; +function copyDir (src, dest, opts, cb) { + fs.readdir(src, (err, items) => { + if (err) return cb(err) + return copyDirItems(items, src, dest, opts, cb) + }) +} - } else if (state.position === state.lineStart && testDocumentSeparator(state)) { - throwError(state, 'unexpected end of the document within a single quoted scalar'); +function copyDirItems (items, src, dest, opts, cb) { + const item = items.pop() + if (!item) return cb() + return copyDirItem(items, item, src, dest, opts, cb) +} + +function copyDirItem (items, item, src, dest, opts, cb) { + const srcItem = path.join(src, item) + const destItem = path.join(dest, item) + stat.checkPaths(srcItem, destItem, 'copy', opts, (err, stats) => { + if (err) return cb(err) + const { destStat } = stats + startCopy(destStat, srcItem, destItem, opts, err => { + if (err) return cb(err) + return copyDirItems(items, src, dest, opts, cb) + }) + }) +} + +function onLink (destStat, src, dest, opts, cb) { + fs.readlink(src, (err, resolvedSrc) => { + if (err) return cb(err) + if (opts.dereference) { + resolvedSrc = path.resolve(process.cwd(), resolvedSrc) + } + if (!destStat) { + return fs.symlink(resolvedSrc, dest, cb) } else { - state.position++; - captureEnd = state.position; - } - } + fs.readlink(dest, (err, resolvedDest) => { + if (err) { + // dest exists and is a regular file or directory, + // Windows may throw UNKNOWN error. If dest already exists, + // fs throws error anyway, so no need to guard against it here. + if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest, cb) + return cb(err) + } + if (opts.dereference) { + resolvedDest = path.resolve(process.cwd(), resolvedDest) + } + if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { + return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)) + } - throwError(state, 'unexpected end of the stream within a single quoted scalar'); + // do not copy if src is a subdir of dest since unlinking + // dest in this case would result in removing src contents + // and therefore a broken symlink would be created. + if (destStat.isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { + return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)) + } + return copyLink(resolvedSrc, dest, cb) + }) + } + }) } -function readDoubleQuotedScalar(state, nodeIndent) { - var captureStart, - captureEnd, - hexLength, - hexResult, - tmp, - ch; - - ch = state.input.charCodeAt(state.position); - - if (ch !== 0x22/* " */) { - return false; - } +function copyLink (resolvedSrc, dest, cb) { + fs.unlink(dest, err => { + if (err) return cb(err) + return fs.symlink(resolvedSrc, dest, cb) + }) +} - state.kind = 'scalar'; - state.result = ''; - state.position++; - captureStart = captureEnd = state.position; +module.exports = copy - while ((ch = state.input.charCodeAt(state.position)) !== 0) { - if (ch === 0x22/* " */) { - captureSegment(state, captureStart, state.position, true); - state.position++; - return true; - } else if (ch === 0x5C/* \ */) { - captureSegment(state, captureStart, state.position, true); - ch = state.input.charCodeAt(++state.position); +/***/ }), - if (is_EOL(ch)) { - skipSeparationSpace(state, false, nodeIndent); +/***/ 1335: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // TODO: rework to inline fn with no type cast? - } else if (ch < 256 && simpleEscapeCheck[ch]) { - state.result += simpleEscapeMap[ch]; - state.position++; +"use strict"; - } else if ((tmp = escapedHexLen(ch)) > 0) { - hexLength = tmp; - hexResult = 0; - for (; hexLength > 0; hexLength--) { - ch = state.input.charCodeAt(++state.position); +const u = __nccwpck_require__(1463).fromCallback +module.exports = { + copy: u(__nccwpck_require__(8834)) +} - if ((tmp = fromHexCode(ch)) >= 0) { - hexResult = (hexResult << 4) + tmp; - } else { - throwError(state, 'expected hexadecimal character'); - } - } +/***/ }), - state.result += charFromCodepoint(hexResult); +/***/ 6970: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - state.position++; +"use strict"; - } else { - throwError(state, 'unknown escape sequence'); - } - captureStart = captureEnd = state.position; +const u = __nccwpck_require__(1463).fromPromise +const fs = __nccwpck_require__(1176) +const path = __nccwpck_require__(5622) +const mkdir = __nccwpck_require__(2915) +const remove = __nccwpck_require__(7357) - } else if (is_EOL(ch)) { - captureSegment(state, captureStart, captureEnd, true); - writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); - captureStart = captureEnd = state.position; +const emptyDir = u(async function emptyDir (dir) { + let items + try { + items = await fs.readdir(dir) + } catch { + return mkdir.mkdirs(dir) + } - } else if (state.position === state.lineStart && testDocumentSeparator(state)) { - throwError(state, 'unexpected end of the document within a double quoted scalar'); + return Promise.all(items.map(item => remove.remove(path.join(dir, item)))) +}) - } else { - state.position++; - captureEnd = state.position; - } +function emptyDirSync (dir) { + let items + try { + items = fs.readdirSync(dir) + } catch { + return mkdir.mkdirsSync(dir) } - throwError(state, 'unexpected end of the stream within a double quoted scalar'); + items.forEach(item => { + item = path.join(dir, item) + remove.removeSync(item) + }) } -function readFlowCollection(state, nodeIndent) { - var readNext = true, - _line, - _lineStart, - _pos, - _tag = state.tag, - _result, - _anchor = state.anchor, - following, - terminator, - isPair, - isExplicitPair, - isMapping, - overridableKeys = Object.create(null), - keyNode, - keyTag, - valueNode, - ch; - - ch = state.input.charCodeAt(state.position); +module.exports = { + emptyDirSync, + emptydirSync: emptyDirSync, + emptyDir, + emptydir: emptyDir +} - if (ch === 0x5B/* [ */) { - terminator = 0x5D;/* ] */ - isMapping = false; - _result = []; - } else if (ch === 0x7B/* { */) { - terminator = 0x7D;/* } */ - isMapping = true; - _result = {}; - } else { - return false; - } - if (state.anchor !== null) { - state.anchorMap[state.anchor] = _result; - } +/***/ }), - ch = state.input.charCodeAt(++state.position); +/***/ 2164: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - while (ch !== 0) { - skipSeparationSpace(state, true, nodeIndent); +"use strict"; - ch = state.input.charCodeAt(state.position); - if (ch === terminator) { - state.position++; - state.tag = _tag; - state.anchor = _anchor; - state.kind = isMapping ? 'mapping' : 'sequence'; - state.result = _result; - return true; - } else if (!readNext) { - throwError(state, 'missed comma between flow collection entries'); - } else if (ch === 0x2C/* , */) { - // "flow collection entries can never be completely empty", as per YAML 1.2, section 7.4 - throwError(state, "expected the node content, but found ','"); - } +const u = __nccwpck_require__(1463).fromCallback +const path = __nccwpck_require__(5622) +const fs = __nccwpck_require__(7758) +const mkdir = __nccwpck_require__(2915) - keyTag = keyNode = valueNode = null; - isPair = isExplicitPair = false; +function createFile (file, callback) { + function makeFile () { + fs.writeFile(file, '', err => { + if (err) return callback(err) + callback() + }) + } - if (ch === 0x3F/* ? */) { - following = state.input.charCodeAt(state.position + 1); + fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err + if (!err && stats.isFile()) return callback() + const dir = path.dirname(file) + fs.stat(dir, (err, stats) => { + if (err) { + // if the directory doesn't exist, make it + if (err.code === 'ENOENT') { + return mkdir.mkdirs(dir, err => { + if (err) return callback(err) + makeFile() + }) + } + return callback(err) + } - if (is_WS_OR_EOL(following)) { - isPair = isExplicitPair = true; - state.position++; - skipSeparationSpace(state, true, nodeIndent); + if (stats.isDirectory()) makeFile() + else { + // parent is not a directory + // This is just to cause an internal ENOTDIR error to be thrown + fs.readdir(dir, err => { + if (err) return callback(err) + }) } + }) + }) +} + +function createFileSync (file) { + let stats + try { + stats = fs.statSync(file) + } catch {} + if (stats && stats.isFile()) return + + const dir = path.dirname(file) + try { + if (!fs.statSync(dir).isDirectory()) { + // parent is not a directory + // This is just to cause an internal ENOTDIR error to be thrown + fs.readdirSync(dir) } + } catch (err) { + // If the stat call above failed because the directory doesn't exist, create it + if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir) + else throw err + } - _line = state.line; // Save the current line. - _lineStart = state.lineStart; - _pos = state.position; - composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); - keyTag = state.tag; - keyNode = state.result; - skipSeparationSpace(state, true, nodeIndent); + fs.writeFileSync(file, '') +} - ch = state.input.charCodeAt(state.position); +module.exports = { + createFile: u(createFile), + createFileSync +} - if ((isExplicitPair || state.line === _line) && ch === 0x3A/* : */) { - isPair = true; - ch = state.input.charCodeAt(++state.position); - skipSeparationSpace(state, true, nodeIndent); - composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); - valueNode = state.result; - } - if (isMapping) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos); - } else if (isPair) { - _result.push(storeMappingPair(state, null, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos)); - } else { - _result.push(keyNode); - } +/***/ }), - skipSeparationSpace(state, true, nodeIndent); +/***/ 55: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - ch = state.input.charCodeAt(state.position); +"use strict"; - if (ch === 0x2C/* , */) { - readNext = true; - ch = state.input.charCodeAt(++state.position); - } else { - readNext = false; - } - } - throwError(state, 'unexpected end of the stream within a flow collection'); +const file = __nccwpck_require__(2164) +const link = __nccwpck_require__(3797) +const symlink = __nccwpck_require__(2549) + +module.exports = { + // file + createFile: file.createFile, + createFileSync: file.createFileSync, + ensureFile: file.createFile, + ensureFileSync: file.createFileSync, + // link + createLink: link.createLink, + createLinkSync: link.createLinkSync, + ensureLink: link.createLink, + ensureLinkSync: link.createLinkSync, + // symlink + createSymlink: symlink.createSymlink, + createSymlinkSync: symlink.createSymlinkSync, + ensureSymlink: symlink.createSymlink, + ensureSymlinkSync: symlink.createSymlinkSync } -function readBlockScalar(state, nodeIndent) { - var captureStart, - folding, - chomping = CHOMPING_CLIP, - didReadContent = false, - detectedIndent = false, - textIndent = nodeIndent, - emptyLines = 0, - atMoreIndented = false, - tmp, - ch; - ch = state.input.charCodeAt(state.position); +/***/ }), - if (ch === 0x7C/* | */) { - folding = false; - } else if (ch === 0x3E/* > */) { - folding = true; - } else { - return false; - } +/***/ 3797: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - state.kind = 'scalar'; - state.result = ''; +"use strict"; - while (ch !== 0) { - ch = state.input.charCodeAt(++state.position); - if (ch === 0x2B/* + */ || ch === 0x2D/* - */) { - if (CHOMPING_CLIP === chomping) { - chomping = (ch === 0x2B/* + */) ? CHOMPING_KEEP : CHOMPING_STRIP; - } else { - throwError(state, 'repeat of a chomping mode identifier'); - } +const u = __nccwpck_require__(1463).fromCallback +const path = __nccwpck_require__(5622) +const fs = __nccwpck_require__(7758) +const mkdir = __nccwpck_require__(2915) +const pathExists = __nccwpck_require__(3835).pathExists +const { areIdentical } = __nccwpck_require__(3901) - } else if ((tmp = fromDecimalCode(ch)) >= 0) { - if (tmp === 0) { - throwError(state, 'bad explicit indentation width of a block scalar; it cannot be less than one'); - } else if (!detectedIndent) { - textIndent = nodeIndent + tmp - 1; - detectedIndent = true; - } else { - throwError(state, 'repeat of an indentation width identifier'); +function createLink (srcpath, dstpath, callback) { + function makeLink (srcpath, dstpath) { + fs.link(srcpath, dstpath, err => { + if (err) return callback(err) + callback(null) + }) + } + + fs.lstat(dstpath, (_, dstStat) => { + fs.lstat(srcpath, (err, srcStat) => { + if (err) { + err.message = err.message.replace('lstat', 'ensureLink') + return callback(err) } + if (dstStat && areIdentical(srcStat, dstStat)) return callback(null) - } else { - break; - } - } + const dir = path.dirname(dstpath) + pathExists(dir, (err, dirExists) => { + if (err) return callback(err) + if (dirExists) return makeLink(srcpath, dstpath) + mkdir.mkdirs(dir, err => { + if (err) return callback(err) + makeLink(srcpath, dstpath) + }) + }) + }) + }) +} - if (is_WHITE_SPACE(ch)) { - do { ch = state.input.charCodeAt(++state.position); } - while (is_WHITE_SPACE(ch)); +function createLinkSync (srcpath, dstpath) { + let dstStat + try { + dstStat = fs.lstatSync(dstpath) + } catch {} - if (ch === 0x23/* # */) { - do { ch = state.input.charCodeAt(++state.position); } - while (!is_EOL(ch) && (ch !== 0)); - } + try { + const srcStat = fs.lstatSync(srcpath) + if (dstStat && areIdentical(srcStat, dstStat)) return + } catch (err) { + err.message = err.message.replace('lstat', 'ensureLink') + throw err } - while (ch !== 0) { - readLineBreak(state); - state.lineIndent = 0; + const dir = path.dirname(dstpath) + const dirExists = fs.existsSync(dir) + if (dirExists) return fs.linkSync(srcpath, dstpath) + mkdir.mkdirsSync(dir) - ch = state.input.charCodeAt(state.position); + return fs.linkSync(srcpath, dstpath) +} - while ((!detectedIndent || state.lineIndent < textIndent) && - (ch === 0x20/* Space */)) { - state.lineIndent++; - ch = state.input.charCodeAt(++state.position); - } +module.exports = { + createLink: u(createLink), + createLinkSync +} - if (!detectedIndent && state.lineIndent > textIndent) { - textIndent = state.lineIndent; - } - if (is_EOL(ch)) { - emptyLines++; - continue; - } +/***/ }), - // End of the scalar. - if (state.lineIndent < textIndent) { +/***/ 3727: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Perform the chomping. - if (chomping === CHOMPING_KEEP) { - state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); - } else if (chomping === CHOMPING_CLIP) { - if (didReadContent) { // i.e. only if the scalar is not empty. - state.result += '\n'; - } +"use strict"; + + +const path = __nccwpck_require__(5622) +const fs = __nccwpck_require__(7758) +const pathExists = __nccwpck_require__(3835).pathExists + +/** + * Function that returns two types of paths, one relative to symlink, and one + * relative to the current working directory. Checks if path is absolute or + * relative. If the path is relative, this function checks if the path is + * relative to symlink or relative to current working directory. This is an + * initiative to find a smarter `srcpath` to supply when building symlinks. + * This allows you to determine which path to use out of one of three possible + * types of source paths. The first is an absolute path. This is detected by + * `path.isAbsolute()`. When an absolute path is provided, it is checked to + * see if it exists. If it does it's used, if not an error is returned + * (callback)/ thrown (sync). The other two options for `srcpath` are a + * relative url. By default Node's `fs.symlink` works by creating a symlink + * using `dstpath` and expects the `srcpath` to be relative to the newly + * created symlink. If you provide a `srcpath` that does not exist on the file + * system it results in a broken symlink. To minimize this, the function + * checks to see if the 'relative to symlink' source file exists, and if it + * does it will use it. If it does not, it checks if there's a file that + * exists that is relative to the current working directory, if does its used. + * This preserves the expectations of the original fs.symlink spec and adds + * the ability to pass in `relative to current working direcotry` paths. + */ + +function symlinkPaths (srcpath, dstpath, callback) { + if (path.isAbsolute(srcpath)) { + return fs.lstat(srcpath, (err) => { + if (err) { + err.message = err.message.replace('lstat', 'ensureSymlink') + return callback(err) + } + return callback(null, { + toCwd: srcpath, + toDst: srcpath + }) + }) + } else { + const dstdir = path.dirname(dstpath) + const relativeToDst = path.join(dstdir, srcpath) + return pathExists(relativeToDst, (err, exists) => { + if (err) return callback(err) + if (exists) { + return callback(null, { + toCwd: relativeToDst, + toDst: srcpath + }) + } else { + return fs.lstat(srcpath, (err) => { + if (err) { + err.message = err.message.replace('lstat', 'ensureSymlink') + return callback(err) + } + return callback(null, { + toCwd: srcpath, + toDst: path.relative(dstdir, srcpath) + }) + }) } + }) + } +} - // Break this `while` cycle and go to the funciton's epilogue. - break; +function symlinkPathsSync (srcpath, dstpath) { + let exists + if (path.isAbsolute(srcpath)) { + exists = fs.existsSync(srcpath) + if (!exists) throw new Error('absolute srcpath does not exist') + return { + toCwd: srcpath, + toDst: srcpath + } + } else { + const dstdir = path.dirname(dstpath) + const relativeToDst = path.join(dstdir, srcpath) + exists = fs.existsSync(relativeToDst) + if (exists) { + return { + toCwd: relativeToDst, + toDst: srcpath + } + } else { + exists = fs.existsSync(srcpath) + if (!exists) throw new Error('relative srcpath does not exist') + return { + toCwd: srcpath, + toDst: path.relative(dstdir, srcpath) + } } + } +} - // Folded style: use fancy rules to handle line breaks. - if (folding) { +module.exports = { + symlinkPaths, + symlinkPathsSync +} - // Lines starting with white space characters (more-indented lines) are not folded. - if (is_WHITE_SPACE(ch)) { - atMoreIndented = true; - // except for the first content line (cf. Example 8.1) - state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); - // End of more-indented block. - } else if (atMoreIndented) { - atMoreIndented = false; - state.result += common.repeat('\n', emptyLines + 1); +/***/ }), - // Just one line break - perceive as the same line. - } else if (emptyLines === 0) { - if (didReadContent) { // i.e. only if we have already read some scalar content. - state.result += ' '; - } +/***/ 8254: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Several line breaks - perceive as different lines. - } else { - state.result += common.repeat('\n', emptyLines); - } +"use strict"; - // Literal style: just add exact number of line breaks between content lines. - } else { - // Keep all line breaks except the header line break. - state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); - } - didReadContent = true; - detectedIndent = true; - emptyLines = 0; - captureStart = state.position; +const fs = __nccwpck_require__(7758) - while (!is_EOL(ch) && (ch !== 0)) { - ch = state.input.charCodeAt(++state.position); - } +function symlinkType (srcpath, type, callback) { + callback = (typeof type === 'function') ? type : callback + type = (typeof type === 'function') ? false : type + if (type) return callback(null, type) + fs.lstat(srcpath, (err, stats) => { + if (err) return callback(null, 'file') + type = (stats && stats.isDirectory()) ? 'dir' : 'file' + callback(null, type) + }) +} - captureSegment(state, captureStart, state.position, false); - } +function symlinkTypeSync (srcpath, type) { + let stats - return true; + if (type) return type + try { + stats = fs.lstatSync(srcpath) + } catch { + return 'file' + } + return (stats && stats.isDirectory()) ? 'dir' : 'file' } -function readBlockSequence(state, nodeIndent) { - var _line, - _tag = state.tag, - _anchor = state.anchor, - _result = [], - following, - detected = false, - ch; - - // there is a leading tab before this token, so it can't be a block sequence/mapping; - // it can still be flow sequence/mapping or a scalar - if (state.firstTabInLine !== -1) return false; +module.exports = { + symlinkType, + symlinkTypeSync +} - if (state.anchor !== null) { - state.anchorMap[state.anchor] = _result; - } - ch = state.input.charCodeAt(state.position); +/***/ }), - while (ch !== 0) { - if (state.firstTabInLine !== -1) { - state.position = state.firstTabInLine; - throwError(state, 'tab characters must not be used in indentation'); - } +/***/ 2549: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (ch !== 0x2D/* - */) { - break; - } +"use strict"; - following = state.input.charCodeAt(state.position + 1); - if (!is_WS_OR_EOL(following)) { - break; - } +const u = __nccwpck_require__(1463).fromCallback +const path = __nccwpck_require__(5622) +const fs = __nccwpck_require__(1176) +const _mkdirs = __nccwpck_require__(2915) +const mkdirs = _mkdirs.mkdirs +const mkdirsSync = _mkdirs.mkdirsSync - detected = true; - state.position++; +const _symlinkPaths = __nccwpck_require__(3727) +const symlinkPaths = _symlinkPaths.symlinkPaths +const symlinkPathsSync = _symlinkPaths.symlinkPathsSync - if (skipSeparationSpace(state, true, -1)) { - if (state.lineIndent <= nodeIndent) { - _result.push(null); - ch = state.input.charCodeAt(state.position); - continue; - } - } +const _symlinkType = __nccwpck_require__(8254) +const symlinkType = _symlinkType.symlinkType +const symlinkTypeSync = _symlinkType.symlinkTypeSync - _line = state.line; - composeNode(state, nodeIndent, CONTEXT_BLOCK_IN, false, true); - _result.push(state.result); - skipSeparationSpace(state, true, -1); +const pathExists = __nccwpck_require__(3835).pathExists - ch = state.input.charCodeAt(state.position); +const { areIdentical } = __nccwpck_require__(3901) - if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) { - throwError(state, 'bad indentation of a sequence entry'); - } else if (state.lineIndent < nodeIndent) { - break; - } - } +function createSymlink (srcpath, dstpath, type, callback) { + callback = (typeof type === 'function') ? type : callback + type = (typeof type === 'function') ? false : type - if (detected) { - state.tag = _tag; - state.anchor = _anchor; - state.kind = 'sequence'; - state.result = _result; - return true; - } - return false; + fs.lstat(dstpath, (err, stats) => { + if (!err && stats.isSymbolicLink()) { + Promise.all([ + fs.stat(srcpath), + fs.stat(dstpath) + ]).then(([srcStat, dstStat]) => { + if (areIdentical(srcStat, dstStat)) return callback(null) + _createSymlink(srcpath, dstpath, type, callback) + }) + } else _createSymlink(srcpath, dstpath, type, callback) + }) } -function readBlockMapping(state, nodeIndent, flowIndent) { - var following, - allowCompact, - _line, - _keyLine, - _keyLineStart, - _keyPos, - _tag = state.tag, - _anchor = state.anchor, - _result = {}, - overridableKeys = Object.create(null), - keyTag = null, - keyNode = null, - valueNode = null, - atExplicitKey = false, - detected = false, - ch; - - // there is a leading tab before this token, so it can't be a block sequence/mapping; - // it can still be flow sequence/mapping or a scalar - if (state.firstTabInLine !== -1) return false; +function _createSymlink (srcpath, dstpath, type, callback) { + symlinkPaths(srcpath, dstpath, (err, relative) => { + if (err) return callback(err) + srcpath = relative.toDst + symlinkType(relative.toCwd, type, (err, type) => { + if (err) return callback(err) + const dir = path.dirname(dstpath) + pathExists(dir, (err, dirExists) => { + if (err) return callback(err) + if (dirExists) return fs.symlink(srcpath, dstpath, type, callback) + mkdirs(dir, err => { + if (err) return callback(err) + fs.symlink(srcpath, dstpath, type, callback) + }) + }) + }) + }) +} - if (state.anchor !== null) { - state.anchorMap[state.anchor] = _result; +function createSymlinkSync (srcpath, dstpath, type) { + let stats + try { + stats = fs.lstatSync(dstpath) + } catch {} + if (stats && stats.isSymbolicLink()) { + const srcStat = fs.statSync(srcpath) + const dstStat = fs.statSync(dstpath) + if (areIdentical(srcStat, dstStat)) return } - ch = state.input.charCodeAt(state.position); + const relative = symlinkPathsSync(srcpath, dstpath) + srcpath = relative.toDst + type = symlinkTypeSync(relative.toCwd, type) + const dir = path.dirname(dstpath) + const exists = fs.existsSync(dir) + if (exists) return fs.symlinkSync(srcpath, dstpath, type) + mkdirsSync(dir) + return fs.symlinkSync(srcpath, dstpath, type) +} - while (ch !== 0) { - if (!atExplicitKey && state.firstTabInLine !== -1) { - state.position = state.firstTabInLine; - throwError(state, 'tab characters must not be used in indentation'); - } +module.exports = { + createSymlink: u(createSymlink), + createSymlinkSync +} - following = state.input.charCodeAt(state.position + 1); - _line = state.line; // Save the current line. - // - // Explicit notation case. There are two separate blocks: - // first for the key (denoted by "?") and second for the value (denoted by ":") - // - if ((ch === 0x3F/* ? */ || ch === 0x3A/* : */) && is_WS_OR_EOL(following)) { +/***/ }), - if (ch === 0x3F/* ? */) { - if (atExplicitKey) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); - keyTag = keyNode = valueNode = null; - } +/***/ 1176: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - detected = true; - atExplicitKey = true; - allowCompact = true; +"use strict"; - } else if (atExplicitKey) { - // i.e. 0x3A/* : */ === character after the explicit key. - atExplicitKey = false; - allowCompact = true; +// This is adapted from https://github.com/normalize/mz +// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors +const u = __nccwpck_require__(1463).fromCallback +const fs = __nccwpck_require__(7758) - } else { - throwError(state, 'incomplete explicit mapping pair; a key node is missed; or followed by a non-tabulated empty line'); - } +const api = [ + 'access', + 'appendFile', + 'chmod', + 'chown', + 'close', + 'copyFile', + 'fchmod', + 'fchown', + 'fdatasync', + 'fstat', + 'fsync', + 'ftruncate', + 'futimes', + 'lchmod', + 'lchown', + 'link', + 'lstat', + 'mkdir', + 'mkdtemp', + 'open', + 'opendir', + 'readdir', + 'readFile', + 'readlink', + 'realpath', + 'rename', + 'rm', + 'rmdir', + 'stat', + 'symlink', + 'truncate', + 'unlink', + 'utimes', + 'writeFile' +].filter(key => { + // Some commands are not available on some systems. Ex: + // fs.opendir was added in Node.js v12.12.0 + // fs.rm was added in Node.js v14.14.0 + // fs.lchown is not available on at least some Linux + return typeof fs[key] === 'function' +}) - state.position += 1; - ch = following; +// Export cloned fs: +Object.assign(exports, fs) - // - // Implicit notation case. Flow-style node as the key first, then ":", and the value. - // - } else { - _keyLine = state.line; - _keyLineStart = state.lineStart; - _keyPos = state.position; +// Universalify async methods: +api.forEach(method => { + exports[method] = u(fs[method]) +}) +exports.realpath.native = u(fs.realpath.native) - if (!composeNode(state, flowIndent, CONTEXT_FLOW_OUT, false, true)) { - // Neither implicit nor explicit notation. - // Reading is done. Go to the epilogue. - break; - } +// We differ from mz/fs in that we still ship the old, broken, fs.exists() +// since we are a drop-in replacement for the native module +exports.exists = function (filename, callback) { + if (typeof callback === 'function') { + return fs.exists(filename, callback) + } + return new Promise(resolve => { + return fs.exists(filename, resolve) + }) +} - if (state.line === _line) { - ch = state.input.charCodeAt(state.position); +// fs.read(), fs.write(), & fs.writev() need special treatment due to multiple callback args - while (is_WHITE_SPACE(ch)) { - ch = state.input.charCodeAt(++state.position); - } +exports.read = function (fd, buffer, offset, length, position, callback) { + if (typeof callback === 'function') { + return fs.read(fd, buffer, offset, length, position, callback) + } + return new Promise((resolve, reject) => { + fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => { + if (err) return reject(err) + resolve({ bytesRead, buffer }) + }) + }) +} - if (ch === 0x3A/* : */) { - ch = state.input.charCodeAt(++state.position); +// Function signature can be +// fs.write(fd, buffer[, offset[, length[, position]]], callback) +// OR +// fs.write(fd, string[, position[, encoding]], callback) +// We need to handle both cases, so we use ...args +exports.write = function (fd, buffer, ...args) { + if (typeof args[args.length - 1] === 'function') { + return fs.write(fd, buffer, ...args) + } - if (!is_WS_OR_EOL(ch)) { - throwError(state, 'a whitespace character is expected after the key-value separator within a block mapping'); - } + return new Promise((resolve, reject) => { + fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => { + if (err) return reject(err) + resolve({ bytesWritten, buffer }) + }) + }) +} - if (atExplicitKey) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); - keyTag = keyNode = valueNode = null; - } +// fs.writev only available in Node v12.9.0+ +if (typeof fs.writev === 'function') { + // Function signature is + // s.writev(fd, buffers[, position], callback) + // We need to handle the optional arg, so we use ...args + exports.writev = function (fd, buffers, ...args) { + if (typeof args[args.length - 1] === 'function') { + return fs.writev(fd, buffers, ...args) + } - detected = true; - atExplicitKey = false; - allowCompact = false; - keyTag = state.tag; - keyNode = state.result; + return new Promise((resolve, reject) => { + fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => { + if (err) return reject(err) + resolve({ bytesWritten, buffers }) + }) + }) + } +} - } else if (detected) { - throwError(state, 'can not read an implicit mapping pair; a colon is missed'); - } else { - state.tag = _tag; - state.anchor = _anchor; - return true; // Keep the result of `composeNode`. - } +/***/ }), - } else if (detected) { - throwError(state, 'can not read a block mapping entry; a multiline key may not be an implicit key'); +/***/ 5630: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - } else { - state.tag = _tag; - state.anchor = _anchor; - return true; // Keep the result of `composeNode`. - } - } +"use strict"; - // - // Common reading code for both explicit and implicit notations. - // - if (state.line === _line || state.lineIndent > nodeIndent) { - if (atExplicitKey) { - _keyLine = state.line; - _keyLineStart = state.lineStart; - _keyPos = state.position; - } - if (composeNode(state, nodeIndent, CONTEXT_BLOCK_OUT, true, allowCompact)) { - if (atExplicitKey) { - keyNode = state.result; - } else { - valueNode = state.result; - } - } +module.exports = { + // Export promiseified graceful-fs: + ...__nccwpck_require__(1176), + // Export extra methods: + ...__nccwpck_require__(1135), + ...__nccwpck_require__(1335), + ...__nccwpck_require__(6970), + ...__nccwpck_require__(55), + ...__nccwpck_require__(213), + ...__nccwpck_require__(2915), + ...__nccwpck_require__(9665), + ...__nccwpck_require__(1497), + ...__nccwpck_require__(6570), + ...__nccwpck_require__(3835), + ...__nccwpck_require__(7357) +} - if (!atExplicitKey) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _keyLine, _keyLineStart, _keyPos); - keyTag = keyNode = valueNode = null; - } - skipSeparationSpace(state, true, -1); - ch = state.input.charCodeAt(state.position); - } +/***/ }), - if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) { - throwError(state, 'bad indentation of a mapping entry'); - } else if (state.lineIndent < nodeIndent) { - break; - } - } +/***/ 213: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // - // Epilogue. - // +"use strict"; - // Special case: last mapping's node contains only the key in explicit notation. - if (atExplicitKey) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); - } - // Expose the resulting mapping. - if (detected) { - state.tag = _tag; - state.anchor = _anchor; - state.kind = 'mapping'; - state.result = _result; - } +const u = __nccwpck_require__(1463).fromPromise +const jsonFile = __nccwpck_require__(8970) - return detected; -} +jsonFile.outputJson = u(__nccwpck_require__(531)) +jsonFile.outputJsonSync = __nccwpck_require__(9421) +// aliases +jsonFile.outputJSON = jsonFile.outputJson +jsonFile.outputJSONSync = jsonFile.outputJsonSync +jsonFile.writeJSON = jsonFile.writeJson +jsonFile.writeJSONSync = jsonFile.writeJsonSync +jsonFile.readJSON = jsonFile.readJson +jsonFile.readJSONSync = jsonFile.readJsonSync -function readTagProperty(state) { - var _position, - isVerbatim = false, - isNamed = false, - tagHandle, - tagName, - ch; +module.exports = jsonFile - ch = state.input.charCodeAt(state.position); - if (ch !== 0x21/* ! */) return false; +/***/ }), - if (state.tag !== null) { - throwError(state, 'duplication of a tag property'); - } +/***/ 8970: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - ch = state.input.charCodeAt(++state.position); +"use strict"; - if (ch === 0x3C/* < */) { - isVerbatim = true; - ch = state.input.charCodeAt(++state.position); - } else if (ch === 0x21/* ! */) { - isNamed = true; - tagHandle = '!!'; - ch = state.input.charCodeAt(++state.position); +const jsonFile = __nccwpck_require__(6160) - } else { - tagHandle = '!'; - } +module.exports = { + // jsonfile exports + readJson: jsonFile.readFile, + readJsonSync: jsonFile.readFileSync, + writeJson: jsonFile.writeFile, + writeJsonSync: jsonFile.writeFileSync +} - _position = state.position; - if (isVerbatim) { - do { ch = state.input.charCodeAt(++state.position); } - while (ch !== 0 && ch !== 0x3E/* > */); +/***/ }), - if (state.position < state.length) { - tagName = state.input.slice(_position, state.position); - ch = state.input.charCodeAt(++state.position); - } else { - throwError(state, 'unexpected end of the stream within a verbatim tag'); - } - } else { - while (ch !== 0 && !is_WS_OR_EOL(ch)) { +/***/ 9421: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (ch === 0x21/* ! */) { - if (!isNamed) { - tagHandle = state.input.slice(_position - 1, state.position + 1); +"use strict"; - if (!PATTERN_TAG_HANDLE.test(tagHandle)) { - throwError(state, 'named tag handle cannot contain such characters'); - } - isNamed = true; - _position = state.position + 1; - } else { - throwError(state, 'tag suffix cannot contain exclamation marks'); - } - } +const { stringify } = __nccwpck_require__(5902) +const { outputFileSync } = __nccwpck_require__(6570) - ch = state.input.charCodeAt(++state.position); - } +function outputJsonSync (file, data, options) { + const str = stringify(data, options) - tagName = state.input.slice(_position, state.position); + outputFileSync(file, str, options) +} - if (PATTERN_FLOW_INDICATORS.test(tagName)) { - throwError(state, 'tag suffix cannot contain flow indicator characters'); - } - } +module.exports = outputJsonSync - if (tagName && !PATTERN_TAG_URI.test(tagName)) { - throwError(state, 'tag name cannot contain such characters: ' + tagName); - } - try { - tagName = decodeURIComponent(tagName); - } catch (err) { - throwError(state, 'tag name is malformed: ' + tagName); - } +/***/ }), - if (isVerbatim) { - state.tag = tagName; +/***/ 531: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - } else if (_hasOwnProperty.call(state.tagMap, tagHandle)) { - state.tag = state.tagMap[tagHandle] + tagName; +"use strict"; - } else if (tagHandle === '!') { - state.tag = '!' + tagName; - } else if (tagHandle === '!!') { - state.tag = 'tag:yaml.org,2002:' + tagName; +const { stringify } = __nccwpck_require__(5902) +const { outputFile } = __nccwpck_require__(6570) - } else { - throwError(state, 'undeclared tag handle "' + tagHandle + '"'); - } +async function outputJson (file, data, options = {}) { + const str = stringify(data, options) - return true; + await outputFile(file, str, options) } -function readAnchorProperty(state) { - var _position, - ch; - - ch = state.input.charCodeAt(state.position); +module.exports = outputJson - if (ch !== 0x26/* & */) return false; - if (state.anchor !== null) { - throwError(state, 'duplication of an anchor property'); - } +/***/ }), - ch = state.input.charCodeAt(++state.position); - _position = state.position; +/***/ 2915: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) { - ch = state.input.charCodeAt(++state.position); - } +"use strict"; - if (state.position === _position) { - throwError(state, 'name of an anchor node must contain at least one character'); - } +const u = __nccwpck_require__(1463).fromPromise +const { makeDir: _makeDir, makeDirSync } = __nccwpck_require__(2751) +const makeDir = u(_makeDir) - state.anchor = state.input.slice(_position, state.position); - return true; +module.exports = { + mkdirs: makeDir, + mkdirsSync: makeDirSync, + // alias + mkdirp: makeDir, + mkdirpSync: makeDirSync, + ensureDir: makeDir, + ensureDirSync: makeDirSync } -function readAlias(state) { - var _position, alias, - ch; - - ch = state.input.charCodeAt(state.position); - if (ch !== 0x2A/* * */) return false; +/***/ }), - ch = state.input.charCodeAt(++state.position); - _position = state.position; +/***/ 2751: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) { - ch = state.input.charCodeAt(++state.position); - } +"use strict"; - if (state.position === _position) { - throwError(state, 'name of an alias node must contain at least one character'); - } +const fs = __nccwpck_require__(1176) +const { checkPath } = __nccwpck_require__(9907) - alias = state.input.slice(_position, state.position); +const getMode = options => { + const defaults = { mode: 0o777 } + if (typeof options === 'number') return options + return ({ ...defaults, ...options }).mode +} - if (!_hasOwnProperty.call(state.anchorMap, alias)) { - throwError(state, 'unidentified alias "' + alias + '"'); - } +module.exports.makeDir = async (dir, options) => { + checkPath(dir) - state.result = state.anchorMap[alias]; - skipSeparationSpace(state, true, -1); - return true; + return fs.mkdir(dir, { + mode: getMode(options), + recursive: true + }) } -function composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact) { - var allowBlockStyles, - allowBlockScalars, - allowBlockCollections, - indentStatus = 1, // 1: this>parent, 0: this=parent, -1: this { + checkPath(dir) - if (state.listener !== null) { - state.listener('open', state); - } + return fs.mkdirSync(dir, { + mode: getMode(options), + recursive: true + }) +} - state.tag = null; - state.anchor = null; - state.kind = null; - state.result = null; - allowBlockStyles = allowBlockScalars = allowBlockCollections = - CONTEXT_BLOCK_OUT === nodeContext || - CONTEXT_BLOCK_IN === nodeContext; +/***/ }), - if (allowToSeek) { - if (skipSeparationSpace(state, true, -1)) { - atNewLine = true; +/***/ 9907: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (state.lineIndent > parentIndent) { - indentStatus = 1; - } else if (state.lineIndent === parentIndent) { - indentStatus = 0; - } else if (state.lineIndent < parentIndent) { - indentStatus = -1; - } - } - } +"use strict"; +// Adapted from https://github.com/sindresorhus/make-dir +// Copyright (c) Sindre Sorhus (sindresorhus.com) +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - if (indentStatus === 1) { - while (readTagProperty(state) || readAnchorProperty(state)) { - if (skipSeparationSpace(state, true, -1)) { - atNewLine = true; - allowBlockCollections = allowBlockStyles; +const path = __nccwpck_require__(5622) - if (state.lineIndent > parentIndent) { - indentStatus = 1; - } else if (state.lineIndent === parentIndent) { - indentStatus = 0; - } else if (state.lineIndent < parentIndent) { - indentStatus = -1; - } - } else { - allowBlockCollections = false; - } - } - } +// https://github.com/nodejs/node/issues/8987 +// https://github.com/libuv/libuv/pull/1088 +module.exports.checkPath = function checkPath (pth) { + if (process.platform === 'win32') { + const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, '')) - if (allowBlockCollections) { - allowBlockCollections = atNewLine || allowCompact; + if (pathHasInvalidWinCharacters) { + const error = new Error(`Path contains invalid characters: ${pth}`) + error.code = 'EINVAL' + throw error + } } +} - if (indentStatus === 1 || CONTEXT_BLOCK_OUT === nodeContext) { - if (CONTEXT_FLOW_IN === nodeContext || CONTEXT_FLOW_OUT === nodeContext) { - flowIndent = parentIndent; - } else { - flowIndent = parentIndent + 1; - } - blockIndent = state.position - state.lineStart; +/***/ }), - if (indentStatus === 1) { - if (allowBlockCollections && - (readBlockSequence(state, blockIndent) || - readBlockMapping(state, blockIndent, flowIndent)) || - readFlowCollection(state, flowIndent)) { - hasContent = true; - } else { - if ((allowBlockScalars && readBlockScalar(state, flowIndent)) || - readSingleQuotedScalar(state, flowIndent) || - readDoubleQuotedScalar(state, flowIndent)) { - hasContent = true; +/***/ 9665: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - } else if (readAlias(state)) { - hasContent = true; +"use strict"; - if (state.tag !== null || state.anchor !== null) { - throwError(state, 'alias node should not have any properties'); - } - } else if (readPlainScalar(state, flowIndent, CONTEXT_FLOW_IN === nodeContext)) { - hasContent = true; +module.exports = { + moveSync: __nccwpck_require__(6445) +} - if (state.tag === null) { - state.tag = '?'; - } - } - if (state.anchor !== null) { - state.anchorMap[state.anchor] = state.result; - } - } - } else if (indentStatus === 0) { - // Special case: block sequences are allowed to have same indentation level as the parent. - // http://www.yaml.org/spec/1.2/spec.html#id2799784 - hasContent = allowBlockCollections && readBlockSequence(state, blockIndent); - } - } +/***/ }), - if (state.tag === null) { - if (state.anchor !== null) { - state.anchorMap[state.anchor] = state.result; - } +/***/ 6445: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - } else if (state.tag === '?') { - // Implicit resolving is not allowed for non-scalar types, and '?' - // non-specific tag is only automatically assigned to plain scalars. - // - // We only need to check kind conformity in case user explicitly assigns '?' - // tag, for example like this: "! [0]" - // - if (state.result !== null && state.kind !== 'scalar') { - throwError(state, 'unacceptable node kind for ! tag; it should be "scalar", not "' + state.kind + '"'); - } +"use strict"; - for (typeIndex = 0, typeQuantity = state.implicitTypes.length; typeIndex < typeQuantity; typeIndex += 1) { - type = state.implicitTypes[typeIndex]; - if (type.resolve(state.result)) { // `state.result` updated in resolver if matched - state.result = type.construct(state.result); - state.tag = type.tag; - if (state.anchor !== null) { - state.anchorMap[state.anchor] = state.result; - } - break; - } - } - } else if (state.tag !== '!') { - if (_hasOwnProperty.call(state.typeMap[state.kind || 'fallback'], state.tag)) { - type = state.typeMap[state.kind || 'fallback'][state.tag]; - } else { - // looking for multi type - type = null; - typeList = state.typeMap.multi[state.kind || 'fallback']; +const fs = __nccwpck_require__(7758) +const path = __nccwpck_require__(5622) +const copySync = __nccwpck_require__(1135).copySync +const removeSync = __nccwpck_require__(7357).removeSync +const mkdirpSync = __nccwpck_require__(2915).mkdirpSync +const stat = __nccwpck_require__(3901) - for (typeIndex = 0, typeQuantity = typeList.length; typeIndex < typeQuantity; typeIndex += 1) { - if (state.tag.slice(0, typeList[typeIndex].tag.length) === typeList[typeIndex].tag) { - type = typeList[typeIndex]; - break; - } - } - } +function moveSync (src, dest, opts) { + opts = opts || {} + const overwrite = opts.overwrite || opts.clobber || false - if (!type) { - throwError(state, 'unknown tag !<' + state.tag + '>'); - } + const { srcStat, isChangingCase = false } = stat.checkPathsSync(src, dest, 'move', opts) + stat.checkParentPathsSync(src, srcStat, dest, 'move') + if (!isParentRoot(dest)) mkdirpSync(path.dirname(dest)) + return doRename(src, dest, overwrite, isChangingCase) +} - if (state.result !== null && type.kind !== state.kind) { - throwError(state, 'unacceptable node kind for !<' + state.tag + '> tag; it should be "' + type.kind + '", not "' + state.kind + '"'); - } +function isParentRoot (dest) { + const parent = path.dirname(dest) + const parsedPath = path.parse(parent) + return parsedPath.root === parent +} - if (!type.resolve(state.result, state.tag)) { // `state.result` updated in resolver if matched - throwError(state, 'cannot resolve a node with !<' + state.tag + '> explicit tag'); - } else { - state.result = type.construct(state.result, state.tag); - if (state.anchor !== null) { - state.anchorMap[state.anchor] = state.result; - } - } +function doRename (src, dest, overwrite, isChangingCase) { + if (isChangingCase) return rename(src, dest, overwrite) + if (overwrite) { + removeSync(dest) + return rename(src, dest, overwrite) } + if (fs.existsSync(dest)) throw new Error('dest already exists.') + return rename(src, dest, overwrite) +} - if (state.listener !== null) { - state.listener('close', state); +function rename (src, dest, overwrite) { + try { + fs.renameSync(src, dest) + } catch (err) { + if (err.code !== 'EXDEV') throw err + return moveAcrossDevice(src, dest, overwrite) } - return state.tag !== null || state.anchor !== null || hasContent; } -function readDocument(state) { - var documentStart = state.position, - _position, - directiveName, - directiveArgs, - hasDirectives = false, - ch; - - state.version = null; - state.checkLineBreaks = state.legacy; - state.tagMap = Object.create(null); - state.anchorMap = Object.create(null); - - while ((ch = state.input.charCodeAt(state.position)) !== 0) { - skipSeparationSpace(state, true, -1); +function moveAcrossDevice (src, dest, overwrite) { + const opts = { + overwrite, + errorOnExist: true + } + copySync(src, dest, opts) + return removeSync(src) +} - ch = state.input.charCodeAt(state.position); +module.exports = moveSync - if (state.lineIndent > 0 || ch !== 0x25/* % */) { - break; - } - hasDirectives = true; - ch = state.input.charCodeAt(++state.position); - _position = state.position; +/***/ }), - while (ch !== 0 && !is_WS_OR_EOL(ch)) { - ch = state.input.charCodeAt(++state.position); - } +/***/ 1497: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - directiveName = state.input.slice(_position, state.position); - directiveArgs = []; +"use strict"; - if (directiveName.length < 1) { - throwError(state, 'directive name must not be less than one character in length'); - } - while (ch !== 0) { - while (is_WHITE_SPACE(ch)) { - ch = state.input.charCodeAt(++state.position); - } +const u = __nccwpck_require__(1463).fromCallback +module.exports = { + move: u(__nccwpck_require__(2231)) +} - if (ch === 0x23/* # */) { - do { ch = state.input.charCodeAt(++state.position); } - while (ch !== 0 && !is_EOL(ch)); - break; - } - if (is_EOL(ch)) break; +/***/ }), - _position = state.position; +/***/ 2231: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - while (ch !== 0 && !is_WS_OR_EOL(ch)) { - ch = state.input.charCodeAt(++state.position); - } +"use strict"; - directiveArgs.push(state.input.slice(_position, state.position)); - } - if (ch !== 0) readLineBreak(state); +const fs = __nccwpck_require__(7758) +const path = __nccwpck_require__(5622) +const copy = __nccwpck_require__(1335).copy +const remove = __nccwpck_require__(7357).remove +const mkdirp = __nccwpck_require__(2915).mkdirp +const pathExists = __nccwpck_require__(3835).pathExists +const stat = __nccwpck_require__(3901) - if (_hasOwnProperty.call(directiveHandlers, directiveName)) { - directiveHandlers[directiveName](state, directiveName, directiveArgs); - } else { - throwWarning(state, 'unknown document directive "' + directiveName + '"'); - } +function move (src, dest, opts, cb) { + if (typeof opts === 'function') { + cb = opts + opts = {} } - skipSeparationSpace(state, true, -1); - - if (state.lineIndent === 0 && - state.input.charCodeAt(state.position) === 0x2D/* - */ && - state.input.charCodeAt(state.position + 1) === 0x2D/* - */ && - state.input.charCodeAt(state.position + 2) === 0x2D/* - */) { - state.position += 3; - skipSeparationSpace(state, true, -1); + const overwrite = opts.overwrite || opts.clobber || false - } else if (hasDirectives) { - throwError(state, 'directives end mark is expected'); - } + stat.checkPaths(src, dest, 'move', opts, (err, stats) => { + if (err) return cb(err) + const { srcStat, isChangingCase = false } = stats + stat.checkParentPaths(src, srcStat, dest, 'move', err => { + if (err) return cb(err) + if (isParentRoot(dest)) return doRename(src, dest, overwrite, isChangingCase, cb) + mkdirp(path.dirname(dest), err => { + if (err) return cb(err) + return doRename(src, dest, overwrite, isChangingCase, cb) + }) + }) + }) +} - composeNode(state, state.lineIndent - 1, CONTEXT_BLOCK_OUT, false, true); - skipSeparationSpace(state, true, -1); +function isParentRoot (dest) { + const parent = path.dirname(dest) + const parsedPath = path.parse(parent) + return parsedPath.root === parent +} - if (state.checkLineBreaks && - PATTERN_NON_ASCII_LINE_BREAKS.test(state.input.slice(documentStart, state.position))) { - throwWarning(state, 'non-ASCII line breaks are interpreted as content'); +function doRename (src, dest, overwrite, isChangingCase, cb) { + if (isChangingCase) return rename(src, dest, overwrite, cb) + if (overwrite) { + return remove(dest, err => { + if (err) return cb(err) + return rename(src, dest, overwrite, cb) + }) } + pathExists(dest, (err, destExists) => { + if (err) return cb(err) + if (destExists) return cb(new Error('dest already exists.')) + return rename(src, dest, overwrite, cb) + }) +} - state.documents.push(state.result); - - if (state.position === state.lineStart && testDocumentSeparator(state)) { - - if (state.input.charCodeAt(state.position) === 0x2E/* . */) { - state.position += 3; - skipSeparationSpace(state, true, -1); - } - return; - } +function rename (src, dest, overwrite, cb) { + fs.rename(src, dest, err => { + if (!err) return cb() + if (err.code !== 'EXDEV') return cb(err) + return moveAcrossDevice(src, dest, overwrite, cb) + }) +} - if (state.position < (state.length - 1)) { - throwError(state, 'end of the stream or a document separator is expected'); - } else { - return; +function moveAcrossDevice (src, dest, overwrite, cb) { + const opts = { + overwrite, + errorOnExist: true } + copy(src, dest, opts, err => { + if (err) return cb(err) + return remove(src, cb) + }) } +module.exports = move -function loadDocuments(input, options) { - input = String(input); - options = options || {}; - if (input.length !== 0) { +/***/ }), - // Add tailing `\n` if not exists - if (input.charCodeAt(input.length - 1) !== 0x0A/* LF */ && - input.charCodeAt(input.length - 1) !== 0x0D/* CR */) { - input += '\n'; - } +/***/ 6570: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Strip BOM - if (input.charCodeAt(0) === 0xFEFF) { - input = input.slice(1); - } - } +"use strict"; - var state = new State(input, options); - var nullpos = input.indexOf('\0'); +const u = __nccwpck_require__(1463).fromCallback +const fs = __nccwpck_require__(7758) +const path = __nccwpck_require__(5622) +const mkdir = __nccwpck_require__(2915) +const pathExists = __nccwpck_require__(3835).pathExists - if (nullpos !== -1) { - state.position = nullpos; - throwError(state, 'null byte is not allowed in input'); +function outputFile (file, data, encoding, callback) { + if (typeof encoding === 'function') { + callback = encoding + encoding = 'utf8' } - // Use 0 as string terminator. That significantly simplifies bounds check. - state.input += '\0'; - - while (state.input.charCodeAt(state.position) === 0x20/* Space */) { - state.lineIndent += 1; - state.position += 1; - } + const dir = path.dirname(file) + pathExists(dir, (err, itDoes) => { + if (err) return callback(err) + if (itDoes) return fs.writeFile(file, data, encoding, callback) - while (state.position < (state.length - 1)) { - readDocument(state); - } + mkdir.mkdirs(dir, err => { + if (err) return callback(err) - return state.documents; + fs.writeFile(file, data, encoding, callback) + }) + }) } - -function loadAll(input, iterator, options) { - if (iterator !== null && typeof iterator === 'object' && typeof options === 'undefined') { - options = iterator; - iterator = null; +function outputFileSync (file, ...args) { + const dir = path.dirname(file) + if (fs.existsSync(dir)) { + return fs.writeFileSync(file, ...args) } + mkdir.mkdirsSync(dir) + fs.writeFileSync(file, ...args) +} - var documents = loadDocuments(input, options); +module.exports = { + outputFile: u(outputFile), + outputFileSync +} - if (typeof iterator !== 'function') { - return documents; - } - for (var index = 0, length = documents.length; index < length; index += 1) { - iterator(documents[index]); - } -} +/***/ }), +/***/ 3835: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function load(input, options) { - var documents = loadDocuments(input, options); +"use strict"; - if (documents.length === 0) { - /*eslint-disable no-undefined*/ - return undefined; - } else if (documents.length === 1) { - return documents[0]; - } - throw new YAMLException('expected a single document in the stream, but found more'); -} +const u = __nccwpck_require__(1463).fromPromise +const fs = __nccwpck_require__(1176) +function pathExists (path) { + return fs.access(path).then(() => true).catch(() => false) +} -module.exports.loadAll = loadAll; -module.exports.load = load; +module.exports = { + pathExists: u(pathExists), + pathExistsSync: fs.existsSync +} /***/ }), -/***/ 1082: +/***/ 7357: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -/*eslint-disable max-len*/ +const fs = __nccwpck_require__(7758) +const u = __nccwpck_require__(1463).fromCallback +const rimraf = __nccwpck_require__(7247) -var YAMLException = __nccwpck_require__(8179); -var Type = __nccwpck_require__(6073); +function remove (path, callback) { + // Node 14.14.0+ + if (fs.rm) return fs.rm(path, { recursive: true, force: true }, callback) + rimraf(path, callback) +} +function removeSync (path) { + // Node 14.14.0+ + if (fs.rmSync) return fs.rmSync(path, { recursive: true, force: true }) + rimraf.sync(path) +} -function compileList(schema, name) { - var result = []; +module.exports = { + remove: u(remove), + removeSync +} - schema[name].forEach(function (currentType) { - var newIndex = result.length; - result.forEach(function (previousType, previousIndex) { - if (previousType.tag === currentType.tag && - previousType.kind === currentType.kind && - previousType.multi === currentType.multi) { +/***/ }), - newIndex = previousIndex; - } - }); +/***/ 7247: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - result[newIndex] = currentType; - }); +"use strict"; - return result; -} +const fs = __nccwpck_require__(7758) +const path = __nccwpck_require__(5622) +const assert = __nccwpck_require__(2357) -function compileMap(/* lists... */) { - var result = { - scalar: {}, - sequence: {}, - mapping: {}, - fallback: {}, - multi: { - scalar: [], - sequence: [], - mapping: [], - fallback: [] - } - }, index, length; +const isWindows = (process.platform === 'win32') - function collectType(type) { - if (type.multi) { - result.multi[type.kind].push(type); - result.multi['fallback'].push(type); - } else { - result[type.kind][type.tag] = result['fallback'][type.tag] = type; - } - } +function defaults (options) { + const methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(m => { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] + }) - for (index = 0, length = arguments.length; index < length; index += 1) { - arguments[index].forEach(collectType); - } - return result; + options.maxBusyTries = options.maxBusyTries || 3 } +function rimraf (p, options, cb) { + let busyTries = 0 -function Schema(definition) { - return this.extend(definition); -} + if (typeof options === 'function') { + cb = options + options = {} + } + assert(p, 'rimraf: missing path') + assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') + assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required') + assert(options, 'rimraf: invalid options argument provided') + assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') -Schema.prototype.extend = function extend(definition) { - var implicit = []; - var explicit = []; + defaults(options) - if (definition instanceof Type) { - // Schema.extend(type) - explicit.push(definition); + rimraf_(p, options, function CB (er) { + if (er) { + if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') && + busyTries < options.maxBusyTries) { + busyTries++ + const time = busyTries * 100 + // try again, with the same exact callback as this one. + return setTimeout(() => rimraf_(p, options, CB), time) + } - } else if (Array.isArray(definition)) { - // Schema.extend([ type1, type2, ... ]) - explicit = explicit.concat(definition); + // already gone + if (er.code === 'ENOENT') er = null + } - } else if (definition && (Array.isArray(definition.implicit) || Array.isArray(definition.explicit))) { - // Schema.extend({ explicit: [ type1, type2, ... ], implicit: [ type1, type2, ... ] }) - if (definition.implicit) implicit = implicit.concat(definition.implicit); - if (definition.explicit) explicit = explicit.concat(definition.explicit); + cb(er) + }) +} - } else { - throw new YAMLException('Schema.extend argument should be a Type, [ Type ], ' + - 'or a schema definition ({ implicit: [...], explicit: [...] })'); - } +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR +// +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. +// +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +function rimraf_ (p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') - implicit.forEach(function (type) { - if (!(type instanceof Type)) { - throw new YAMLException('Specified list of YAML types (or a single Type object) contains a non-Type object.'); + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, (er, st) => { + if (er && er.code === 'ENOENT') { + return cb(null) } - if (type.loadKind && type.loadKind !== 'scalar') { - throw new YAMLException('There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.'); + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === 'EPERM' && isWindows) { + return fixWinEPERM(p, options, er, cb) } - if (type.multi) { - throw new YAMLException('There is a multi type in the implicit list of a schema. Multi tags can only be listed as explicit.'); + if (st && st.isDirectory()) { + return rmdir(p, options, er, cb) } - }); - explicit.forEach(function (type) { - if (!(type instanceof Type)) { - throw new YAMLException('Specified list of YAML types (or a single Type object) contains a non-Type object.'); - } - }); + options.unlink(p, er => { + if (er) { + if (er.code === 'ENOENT') { + return cb(null) + } + if (er.code === 'EPERM') { + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + } + if (er.code === 'EISDIR') { + return rmdir(p, options, er, cb) + } + } + return cb(er) + }) + }) +} - var result = Object.create(Schema.prototype); +function fixWinEPERM (p, options, er, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') - result.implicit = (this.implicit || []).concat(implicit); - result.explicit = (this.explicit || []).concat(explicit); + options.chmod(p, 0o666, er2 => { + if (er2) { + cb(er2.code === 'ENOENT' ? null : er) + } else { + options.stat(p, (er3, stats) => { + if (er3) { + cb(er3.code === 'ENOENT' ? null : er) + } else if (stats.isDirectory()) { + rmdir(p, options, er, cb) + } else { + options.unlink(p, cb) + } + }) + } + }) +} - result.compiledImplicit = compileList(result, 'implicit'); - result.compiledExplicit = compileList(result, 'explicit'); - result.compiledTypeMap = compileMap(result.compiledImplicit, result.compiledExplicit); +function fixWinEPERMSync (p, options, er) { + let stats - return result; -}; + assert(p) + assert(options) + + try { + options.chmodSync(p, 0o666) + } catch (er2) { + if (er2.code === 'ENOENT') { + return + } else { + throw er + } + } + try { + stats = options.statSync(p) + } catch (er3) { + if (er3.code === 'ENOENT') { + return + } else { + throw er + } + } -module.exports = Schema; + if (stats.isDirectory()) { + rmdirSync(p, options, er) + } else { + options.unlinkSync(p) + } +} +function rmdir (p, options, originalEr, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') -/***/ }), + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, er => { + if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) { + rmkids(p, options, cb) + } else if (er && er.code === 'ENOTDIR') { + cb(originalEr) + } else { + cb(er) + } + }) +} -/***/ 2011: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function rmkids (p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') -"use strict"; -// Standard YAML's Core schema. -// http://www.yaml.org/spec/1.2/spec.html#id2804923 -// -// NOTE: JS-YAML does not support schema-specific tag resolution restrictions. -// So, Core schema has no distinctions from JSON schema is JS-YAML. + options.readdir(p, (er, files) => { + if (er) return cb(er) + let n = files.length + let errState + if (n === 0) return options.rmdir(p, cb) + files.forEach(f => { + rimraf(path.join(p, f), options, er => { + if (errState) { + return + } + if (er) return cb(errState = er) + if (--n === 0) { + options.rmdir(p, cb) + } + }) + }) + }) +} +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +function rimrafSync (p, options) { + let st -module.exports = __nccwpck_require__(1035); + options = options || {} + defaults(options) + assert(p, 'rimraf: missing path') + assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') -/***/ }), + try { + st = options.lstatSync(p) + } catch (er) { + if (er.code === 'ENOENT') { + return + } -/***/ 8759: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Windows can EPERM on stat. Life is suffering. + if (er.code === 'EPERM' && isWindows) { + fixWinEPERMSync(p, options, er) + } + } -"use strict"; -// JS-YAML's default schema for `safeLoad` function. -// It is not described in the YAML specification. -// -// This schema is based on standard YAML's Core schema and includes most of -// extra types described at YAML tag repository. (http://yaml.org/type/) + try { + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) { + rmdirSync(p, options, null) + } else { + options.unlinkSync(p) + } + } catch (er) { + if (er.code === 'ENOENT') { + return + } else if (er.code === 'EPERM') { + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + } else if (er.code !== 'EISDIR') { + throw er + } + rmdirSync(p, options, er) + } +} +function rmdirSync (p, options, originalEr) { + assert(p) + assert(options) + try { + options.rmdirSync(p) + } catch (er) { + if (er.code === 'ENOTDIR') { + throw originalEr + } else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') { + rmkidsSync(p, options) + } else if (er.code !== 'ENOENT') { + throw er + } + } +} +function rmkidsSync (p, options) { + assert(p) + assert(options) + options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) + if (isWindows) { + // We only end up here once we got ENOTEMPTY at least once, and + // at this point, we are guaranteed to have removed all the kids. + // So, we know that it won't be ENOENT or ENOTDIR or anything else. + // try really hard to delete stuff on windows, because it has a + // PROFOUNDLY annoying habit of not closing handles promptly when + // files are deleted, resulting in spurious ENOTEMPTY errors. + const startTime = Date.now() + do { + try { + const ret = options.rmdirSync(p, options) + return ret + } catch {} + } while (Date.now() - startTime < 500) // give up after 500ms + } else { + const ret = options.rmdirSync(p, options) + return ret + } +} -module.exports = __nccwpck_require__(2011).extend({ - implicit: [ - __nccwpck_require__(9212), - __nccwpck_require__(6104) - ], - explicit: [ - __nccwpck_require__(7900), - __nccwpck_require__(9046), - __nccwpck_require__(6860), - __nccwpck_require__(9548) - ] -}); +module.exports = rimraf +rimraf.sync = rimrafSync /***/ }), -/***/ 8562: +/***/ 3901: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Standard YAML's Failsafe schema. -// http://www.yaml.org/spec/1.2/spec.html#id2802346 +const fs = __nccwpck_require__(1176) +const path = __nccwpck_require__(5622) +const util = __nccwpck_require__(1669) +function getStats (src, dest, opts) { + const statFunc = opts.dereference + ? (file) => fs.stat(file, { bigint: true }) + : (file) => fs.lstat(file, { bigint: true }) + return Promise.all([ + statFunc(src), + statFunc(dest).catch(err => { + if (err.code === 'ENOENT') return null + throw err + }) + ]).then(([srcStat, destStat]) => ({ srcStat, destStat })) +} +function getStatsSync (src, dest, opts) { + let destStat + const statFunc = opts.dereference + ? (file) => fs.statSync(file, { bigint: true }) + : (file) => fs.lstatSync(file, { bigint: true }) + const srcStat = statFunc(src) + try { + destStat = statFunc(dest) + } catch (err) { + if (err.code === 'ENOENT') return { srcStat, destStat: null } + throw err + } + return { srcStat, destStat } +} -var Schema = __nccwpck_require__(1082); +function checkPaths (src, dest, funcName, opts, cb) { + util.callbackify(getStats)(src, dest, opts, (err, stats) => { + if (err) return cb(err) + const { srcStat, destStat } = stats + if (destStat) { + if (areIdentical(srcStat, destStat)) { + const srcBaseName = path.basename(src) + const destBaseName = path.basename(dest) + if (funcName === 'move' && + srcBaseName !== destBaseName && + srcBaseName.toLowerCase() === destBaseName.toLowerCase()) { + return cb(null, { srcStat, destStat, isChangingCase: true }) + } + return cb(new Error('Source and destination must not be the same.')) + } + if (srcStat.isDirectory() && !destStat.isDirectory()) { + return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)) + } + if (!srcStat.isDirectory() && destStat.isDirectory()) { + return cb(new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`)) + } + } -module.exports = new Schema({ - explicit: [ - __nccwpck_require__(3619), - __nccwpck_require__(7283), - __nccwpck_require__(6150) - ] -}); + if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { + return cb(new Error(errMsg(src, dest, funcName))) + } + return cb(null, { srcStat, destStat }) + }) +} +function checkPathsSync (src, dest, funcName, opts) { + const { srcStat, destStat } = getStatsSync(src, dest, opts) -/***/ }), + if (destStat) { + if (areIdentical(srcStat, destStat)) { + const srcBaseName = path.basename(src) + const destBaseName = path.basename(dest) + if (funcName === 'move' && + srcBaseName !== destBaseName && + srcBaseName.toLowerCase() === destBaseName.toLowerCase()) { + return { srcStat, destStat, isChangingCase: true } + } + throw new Error('Source and destination must not be the same.') + } + if (srcStat.isDirectory() && !destStat.isDirectory()) { + throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`) + } + if (!srcStat.isDirectory() && destStat.isDirectory()) { + throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`) + } + } -/***/ 1035: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { + throw new Error(errMsg(src, dest, funcName)) + } + return { srcStat, destStat } +} -"use strict"; -// Standard YAML's JSON schema. -// http://www.yaml.org/spec/1.2/spec.html#id2803231 -// -// NOTE: JS-YAML does not support schema-specific tag resolution restrictions. -// So, this schema is not such strict as defined in the YAML specification. -// It allows numbers in binary notaion, use `Null` and `NULL` as `null`, etc. +// recursively check if dest parent is a subdirectory of src. +// It works for all file types including symlinks since it +// checks the src and dest inodes. It starts from the deepest +// parent and stops once it reaches the src parent or the root path. +function checkParentPaths (src, srcStat, dest, funcName, cb) { + const srcParent = path.resolve(path.dirname(src)) + const destParent = path.resolve(path.dirname(dest)) + if (destParent === srcParent || destParent === path.parse(destParent).root) return cb() + fs.stat(destParent, { bigint: true }, (err, destStat) => { + if (err) { + if (err.code === 'ENOENT') return cb() + return cb(err) + } + if (areIdentical(srcStat, destStat)) { + return cb(new Error(errMsg(src, dest, funcName))) + } + return checkParentPaths(src, srcStat, destParent, funcName, cb) + }) +} +function checkParentPathsSync (src, srcStat, dest, funcName) { + const srcParent = path.resolve(path.dirname(src)) + const destParent = path.resolve(path.dirname(dest)) + if (destParent === srcParent || destParent === path.parse(destParent).root) return + let destStat + try { + destStat = fs.statSync(destParent, { bigint: true }) + } catch (err) { + if (err.code === 'ENOENT') return + throw err + } + if (areIdentical(srcStat, destStat)) { + throw new Error(errMsg(src, dest, funcName)) + } + return checkParentPathsSync(src, srcStat, destParent, funcName) +} +function areIdentical (srcStat, destStat) { + return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev +} +// return true if dest is a subdir of src, otherwise false. +// It only checks the path strings. +function isSrcSubdir (src, dest) { + const srcArr = path.resolve(src).split(path.sep).filter(i => i) + const destArr = path.resolve(dest).split(path.sep).filter(i => i) + return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true) +} +function errMsg (src, dest, funcName) { + return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.` +} -module.exports = __nccwpck_require__(8562).extend({ - implicit: [ - __nccwpck_require__(721), - __nccwpck_require__(4993), - __nccwpck_require__(1615), - __nccwpck_require__(2705) - ] -}); +module.exports = { + checkPaths, + checkPathsSync, + checkParentPaths, + checkParentPathsSync, + isSrcSubdir, + areIdentical +} /***/ }), -/***/ 6975: +/***/ 2548: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +const fs = __nccwpck_require__(7758) -var common = __nccwpck_require__(6829); - - -// get snippet for a single line, respecting maxLength -function getLine(buffer, lineStart, lineEnd, position, maxLineLength) { - var head = ''; - var tail = ''; - var maxHalfLength = Math.floor(maxLineLength / 2) - 1; - - if (position - lineStart > maxHalfLength) { - head = ' ... '; - lineStart = position - maxHalfLength + head.length; - } - - if (lineEnd - position > maxHalfLength) { - tail = ' ...'; - lineEnd = position + maxHalfLength - tail.length; - } - - return { - str: head + buffer.slice(lineStart, lineEnd).replace(/\t/g, '→') + tail, - pos: position - lineStart + head.length // relative position - }; +function utimesMillis (path, atime, mtime, callback) { + // if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback) + fs.open(path, 'r+', (err, fd) => { + if (err) return callback(err) + fs.futimes(fd, atime, mtime, futimesErr => { + fs.close(fd, closeErr => { + if (callback) callback(futimesErr || closeErr) + }) + }) + }) } +function utimesMillisSync (path, atime, mtime) { + const fd = fs.openSync(path, 'r+') + fs.futimesSync(fd, atime, mtime) + return fs.closeSync(fd) +} -function padStart(string, max) { - return common.repeat(' ', max - string.length) + string; +module.exports = { + utimesMillis, + utimesMillisSync } -function makeSnippet(mark, options) { - options = Object.create(options || null); - - if (!mark.buffer) return null; - - if (!options.maxLength) options.maxLength = 79; - if (typeof options.indent !== 'number') options.indent = 1; - if (typeof options.linesBefore !== 'number') options.linesBefore = 3; - if (typeof options.linesAfter !== 'number') options.linesAfter = 2; +/***/ }), - var re = /\r?\n|\r|\0/g; - var lineStarts = [ 0 ]; - var lineEnds = []; - var match; - var foundLineNo = -1; +/***/ 7356: +/***/ ((module) => { - while ((match = re.exec(mark.buffer))) { - lineEnds.push(match.index); - lineStarts.push(match.index + match[0].length); +"use strict"; - if (mark.position <= match.index && foundLineNo < 0) { - foundLineNo = lineStarts.length - 2; - } - } - if (foundLineNo < 0) foundLineNo = lineStarts.length - 1; +module.exports = clone - var result = '', i, line; - var lineNoLength = Math.min(mark.line + options.linesAfter, lineEnds.length).toString().length; - var maxLineLength = options.maxLength - (options.indent + lineNoLength + 3); +var getPrototypeOf = Object.getPrototypeOf || function (obj) { + return obj.__proto__ +} - for (i = 1; i <= options.linesBefore; i++) { - if (foundLineNo - i < 0) break; - line = getLine( - mark.buffer, - lineStarts[foundLineNo - i], - lineEnds[foundLineNo - i], - mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo - i]), - maxLineLength - ); - result = common.repeat(' ', options.indent) + padStart((mark.line - i + 1).toString(), lineNoLength) + - ' | ' + line.str + '\n' + result; - } +function clone (obj) { + if (obj === null || typeof obj !== 'object') + return obj - line = getLine(mark.buffer, lineStarts[foundLineNo], lineEnds[foundLineNo], mark.position, maxLineLength); - result += common.repeat(' ', options.indent) + padStart((mark.line + 1).toString(), lineNoLength) + - ' | ' + line.str + '\n'; - result += common.repeat('-', options.indent + lineNoLength + 3 + line.pos) + '^' + '\n'; + if (obj instanceof Object) + var copy = { __proto__: getPrototypeOf(obj) } + else + var copy = Object.create(null) - for (i = 1; i <= options.linesAfter; i++) { - if (foundLineNo + i >= lineEnds.length) break; - line = getLine( - mark.buffer, - lineStarts[foundLineNo + i], - lineEnds[foundLineNo + i], - mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo + i]), - maxLineLength - ); - result += common.repeat(' ', options.indent) + padStart((mark.line + i + 1).toString(), lineNoLength) + - ' | ' + line.str + '\n'; - } + Object.getOwnPropertyNames(obj).forEach(function (key) { + Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key)) + }) - return result.replace(/\n$/, ''); + return copy } -module.exports = makeSnippet; - - /***/ }), -/***/ 6073: +/***/ 7758: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var fs = __nccwpck_require__(5747) +var polyfills = __nccwpck_require__(263) +var legacy = __nccwpck_require__(3086) +var clone = __nccwpck_require__(7356) +var util = __nccwpck_require__(1669) -var YAMLException = __nccwpck_require__(8179); +/* istanbul ignore next - node 0.x polyfill */ +var gracefulQueue +var previousSymbol -var TYPE_CONSTRUCTOR_OPTIONS = [ - 'kind', - 'multi', - 'resolve', - 'construct', - 'instanceOf', - 'predicate', - 'represent', - 'representName', - 'defaultStyle', - 'styleAliases' -]; +/* istanbul ignore else - node 0.x polyfill */ +if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { + gracefulQueue = Symbol.for('graceful-fs.queue') + // This is used in testing by future versions + previousSymbol = Symbol.for('graceful-fs.previous') +} else { + gracefulQueue = '___graceful-fs.queue' + previousSymbol = '___graceful-fs.previous' +} -var YAML_NODE_KINDS = [ - 'scalar', - 'sequence', - 'mapping' -]; +function noop () {} -function compileStyleAliases(map) { - var result = {}; +function publishQueue(context, queue) { + Object.defineProperty(context, gracefulQueue, { + get: function() { + return queue + } + }) +} - if (map !== null) { - Object.keys(map).forEach(function (style) { - map[style].forEach(function (alias) { - result[String(alias)] = style; - }); - }); +var debug = noop +if (util.debuglog) + debug = util.debuglog('gfs4') +else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) + debug = function() { + var m = util.format.apply(util, arguments) + m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ') + console.error(m) } - return result; -} +// Once time initialization +if (!fs[gracefulQueue]) { + // This queue can be shared by multiple loaded instances + var queue = global[gracefulQueue] || [] + publishQueue(fs, queue) -function Type(tag, options) { - options = options || {}; + // Patch fs.close/closeSync to shared queue version, because we need + // to retry() whenever a close happens *anywhere* in the program. + // This is essential when multiple graceful-fs instances are + // in play at the same time. + fs.close = (function (fs$close) { + function close (fd, cb) { + return fs$close.call(fs, fd, function (err) { + // This function uses the graceful-fs shared queue + if (!err) { + retry() + } - Object.keys(options).forEach(function (name) { - if (TYPE_CONSTRUCTOR_OPTIONS.indexOf(name) === -1) { - throw new YAMLException('Unknown option "' + name + '" is met in definition of "' + tag + '" YAML type.'); + if (typeof cb === 'function') + cb.apply(this, arguments) + }) } - }); - // TODO: Add tag format check. - this.options = options; // keep original options in case user wants to extend this type later - this.tag = tag; - this.kind = options['kind'] || null; - this.resolve = options['resolve'] || function () { return true; }; - this.construct = options['construct'] || function (data) { return data; }; - this.instanceOf = options['instanceOf'] || null; - this.predicate = options['predicate'] || null; - this.represent = options['represent'] || null; - this.representName = options['representName'] || null; - this.defaultStyle = options['defaultStyle'] || null; - this.multi = options['multi'] || false; - this.styleAliases = compileStyleAliases(options['styleAliases'] || null); + Object.defineProperty(close, previousSymbol, { + value: fs$close + }) + return close + })(fs.close) - if (YAML_NODE_KINDS.indexOf(this.kind) === -1) { - throw new YAMLException('Unknown kind "' + this.kind + '" is specified for "' + tag + '" YAML type.'); + fs.closeSync = (function (fs$closeSync) { + function closeSync (fd) { + // This function uses the graceful-fs shared queue + fs$closeSync.apply(fs, arguments) + retry() + } + + Object.defineProperty(closeSync, previousSymbol, { + value: fs$closeSync + }) + return closeSync + })(fs.closeSync) + + if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { + process.on('exit', function() { + debug(fs[gracefulQueue]) + __nccwpck_require__(2357).equal(fs[gracefulQueue].length, 0) + }) } } -module.exports = Type; +if (!global[gracefulQueue]) { + publishQueue(global, fs[gracefulQueue]); +} +module.exports = patch(clone(fs)) +if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) { + module.exports = patch(fs) + fs.__patched = true; +} -/***/ }), +function patch (fs) { + // Everything that references the open() function needs to be in here + polyfills(fs) + fs.gracefulify = patch -/***/ 7900: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + fs.createReadStream = createReadStream + fs.createWriteStream = createWriteStream + var fs$readFile = fs.readFile + fs.readFile = readFile + function readFile (path, options, cb) { + if (typeof options === 'function') + cb = options, options = null -"use strict"; + return go$readFile(path, options, cb) + + function go$readFile (path, options, cb) { + return fs$readFile(path, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$readFile, [path, options, cb]]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + retry() + } + }) + } + } + var fs$writeFile = fs.writeFile + fs.writeFile = writeFile + function writeFile (path, data, options, cb) { + if (typeof options === 'function') + cb = options, options = null -/*eslint-disable no-bitwise*/ + return go$writeFile(path, data, options, cb) + + function go$writeFile (path, data, options, cb) { + return fs$writeFile(path, data, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$writeFile, [path, data, options, cb]]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + retry() + } + }) + } + } + var fs$appendFile = fs.appendFile + if (fs$appendFile) + fs.appendFile = appendFile + function appendFile (path, data, options, cb) { + if (typeof options === 'function') + cb = options, options = null -var Type = __nccwpck_require__(6073); + return go$appendFile(path, data, options, cb) + function go$appendFile (path, data, options, cb) { + return fs$appendFile(path, data, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$appendFile, [path, data, options, cb]]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + retry() + } + }) + } + } -// [ 64, 65, 66 ] -> [ padding, CR, LF ] -var BASE64_MAP = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\n\r'; + var fs$copyFile = fs.copyFile + if (fs$copyFile) + fs.copyFile = copyFile + function copyFile (src, dest, flags, cb) { + if (typeof flags === 'function') { + cb = flags + flags = 0 + } + return fs$copyFile(src, dest, flags, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([fs$copyFile, [src, dest, flags, cb]]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + retry() + } + }) + } + var fs$readdir = fs.readdir + fs.readdir = readdir + function readdir (path, options, cb) { + var args = [path] + if (typeof options !== 'function') { + args.push(options) + } else { + cb = options + } + args.push(go$readdir$cb) -function resolveYamlBinary(data) { - if (data === null) return false; + return go$readdir(args) - var code, idx, bitlen = 0, max = data.length, map = BASE64_MAP; + function go$readdir$cb (err, files) { + if (files && files.sort) + files.sort() - // Convert one by one. - for (idx = 0; idx < max; idx++) { - code = map.indexOf(data.charAt(idx)); + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$readdir, [args]]) - // Skip CR/LF - if (code > 64) continue; + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + retry() + } + } + } - // Fail on illegal characters - if (code < 0) return false; + function go$readdir (args) { + return fs$readdir.apply(fs, args) + } - bitlen += 6; + if (process.version.substr(0, 4) === 'v0.8') { + var legStreams = legacy(fs) + ReadStream = legStreams.ReadStream + WriteStream = legStreams.WriteStream } - // If there are any bits left, source was corrupted - return (bitlen % 8) === 0; -} + var fs$ReadStream = fs.ReadStream + if (fs$ReadStream) { + ReadStream.prototype = Object.create(fs$ReadStream.prototype) + ReadStream.prototype.open = ReadStream$open + } -function constructYamlBinary(data) { - var idx, tailbits, - input = data.replace(/[\r\n=]/g, ''), // remove CR/LF & padding to simplify scan - max = input.length, - map = BASE64_MAP, - bits = 0, - result = []; + var fs$WriteStream = fs.WriteStream + if (fs$WriteStream) { + WriteStream.prototype = Object.create(fs$WriteStream.prototype) + WriteStream.prototype.open = WriteStream$open + } - // Collect by 6*4 bits (3 bytes) + Object.defineProperty(fs, 'ReadStream', { + get: function () { + return ReadStream + }, + set: function (val) { + ReadStream = val + }, + enumerable: true, + configurable: true + }) + Object.defineProperty(fs, 'WriteStream', { + get: function () { + return WriteStream + }, + set: function (val) { + WriteStream = val + }, + enumerable: true, + configurable: true + }) - for (idx = 0; idx < max; idx++) { - if ((idx % 4 === 0) && idx) { - result.push((bits >> 16) & 0xFF); - result.push((bits >> 8) & 0xFF); - result.push(bits & 0xFF); - } + // legacy names + var FileReadStream = ReadStream + Object.defineProperty(fs, 'FileReadStream', { + get: function () { + return FileReadStream + }, + set: function (val) { + FileReadStream = val + }, + enumerable: true, + configurable: true + }) + var FileWriteStream = WriteStream + Object.defineProperty(fs, 'FileWriteStream', { + get: function () { + return FileWriteStream + }, + set: function (val) { + FileWriteStream = val + }, + enumerable: true, + configurable: true + }) - bits = (bits << 6) | map.indexOf(input.charAt(idx)); + function ReadStream (path, options) { + if (this instanceof ReadStream) + return fs$ReadStream.apply(this, arguments), this + else + return ReadStream.apply(Object.create(ReadStream.prototype), arguments) } - // Dump tail - - tailbits = (max % 4) * 6; + function ReadStream$open () { + var that = this + open(that.path, that.flags, that.mode, function (err, fd) { + if (err) { + if (that.autoClose) + that.destroy() - if (tailbits === 0) { - result.push((bits >> 16) & 0xFF); - result.push((bits >> 8) & 0xFF); - result.push(bits & 0xFF); - } else if (tailbits === 18) { - result.push((bits >> 10) & 0xFF); - result.push((bits >> 2) & 0xFF); - } else if (tailbits === 12) { - result.push((bits >> 4) & 0xFF); + that.emit('error', err) + } else { + that.fd = fd + that.emit('open', fd) + that.read() + } + }) } - return new Uint8Array(result); -} - -function representYamlBinary(object /*, style*/) { - var result = '', bits = 0, idx, tail, - max = object.length, - map = BASE64_MAP; + function WriteStream (path, options) { + if (this instanceof WriteStream) + return fs$WriteStream.apply(this, arguments), this + else + return WriteStream.apply(Object.create(WriteStream.prototype), arguments) + } - // Convert every three bytes to 4 ASCII characters. + function WriteStream$open () { + var that = this + open(that.path, that.flags, that.mode, function (err, fd) { + if (err) { + that.destroy() + that.emit('error', err) + } else { + that.fd = fd + that.emit('open', fd) + } + }) + } - for (idx = 0; idx < max; idx++) { - if ((idx % 3 === 0) && idx) { - result += map[(bits >> 18) & 0x3F]; - result += map[(bits >> 12) & 0x3F]; - result += map[(bits >> 6) & 0x3F]; - result += map[bits & 0x3F]; - } + function createReadStream (path, options) { + return new fs.ReadStream(path, options) + } - bits = (bits << 8) + object[idx]; + function createWriteStream (path, options) { + return new fs.WriteStream(path, options) } - // Dump tail + var fs$open = fs.open + fs.open = open + function open (path, flags, mode, cb) { + if (typeof mode === 'function') + cb = mode, mode = null - tail = max % 3; + return go$open(path, flags, mode, cb) - if (tail === 0) { - result += map[(bits >> 18) & 0x3F]; - result += map[(bits >> 12) & 0x3F]; - result += map[(bits >> 6) & 0x3F]; - result += map[bits & 0x3F]; - } else if (tail === 2) { - result += map[(bits >> 10) & 0x3F]; - result += map[(bits >> 4) & 0x3F]; - result += map[(bits << 2) & 0x3F]; - result += map[64]; - } else if (tail === 1) { - result += map[(bits >> 2) & 0x3F]; - result += map[(bits << 4) & 0x3F]; - result += map[64]; - result += map[64]; + function go$open (path, flags, mode, cb) { + return fs$open(path, flags, mode, function (err, fd) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$open, [path, flags, mode, cb]]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + retry() + } + }) + } } - return result; + return fs } -function isBinary(obj) { - return Object.prototype.toString.call(obj) === '[object Uint8Array]'; +function enqueue (elem) { + debug('ENQUEUE', elem[0].name, elem[1]) + fs[gracefulQueue].push(elem) } -module.exports = new Type('tag:yaml.org,2002:binary', { - kind: 'scalar', - resolve: resolveYamlBinary, - construct: constructYamlBinary, - predicate: isBinary, - represent: representYamlBinary -}); +function retry () { + var elem = fs[gracefulQueue].shift() + if (elem) { + debug('RETRY', elem[0].name, elem[1]) + elem[0].apply(null, elem[1]) + } +} /***/ }), -/***/ 4993: +/***/ 3086: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; - +var Stream = __nccwpck_require__(2413).Stream -var Type = __nccwpck_require__(6073); +module.exports = legacy -function resolveYamlBoolean(data) { - if (data === null) return false; +function legacy (fs) { + return { + ReadStream: ReadStream, + WriteStream: WriteStream + } - var max = data.length; + function ReadStream (path, options) { + if (!(this instanceof ReadStream)) return new ReadStream(path, options); - return (max === 4 && (data === 'true' || data === 'True' || data === 'TRUE')) || - (max === 5 && (data === 'false' || data === 'False' || data === 'FALSE')); -} + Stream.call(this); -function constructYamlBoolean(data) { - return data === 'true' || - data === 'True' || - data === 'TRUE'; -} + var self = this; -function isBoolean(object) { - return Object.prototype.toString.call(object) === '[object Boolean]'; -} + this.path = path; + this.fd = null; + this.readable = true; + this.paused = false; -module.exports = new Type('tag:yaml.org,2002:bool', { - kind: 'scalar', - resolve: resolveYamlBoolean, - construct: constructYamlBoolean, - predicate: isBoolean, - represent: { - lowercase: function (object) { return object ? 'true' : 'false'; }, - uppercase: function (object) { return object ? 'TRUE' : 'FALSE'; }, - camelcase: function (object) { return object ? 'True' : 'False'; } - }, - defaultStyle: 'lowercase' -}); + this.flags = 'r'; + this.mode = 438; /*=0666*/ + this.bufferSize = 64 * 1024; + options = options || {}; -/***/ }), + // Mixin options into this + var keys = Object.keys(options); + for (var index = 0, length = keys.length; index < length; index++) { + var key = keys[index]; + this[key] = options[key]; + } -/***/ 2705: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (this.encoding) this.setEncoding(this.encoding); -"use strict"; + if (this.start !== undefined) { + if ('number' !== typeof this.start) { + throw TypeError('start must be a Number'); + } + if (this.end === undefined) { + this.end = Infinity; + } else if ('number' !== typeof this.end) { + throw TypeError('end must be a Number'); + } + if (this.start > this.end) { + throw new Error('start must be <= end'); + } -var common = __nccwpck_require__(6829); -var Type = __nccwpck_require__(6073); + this.pos = this.start; + } -var YAML_FLOAT_PATTERN = new RegExp( - // 2.5e4, 2.5 and integers - '^(?:[-+]?(?:[0-9][0-9_]*)(?:\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?' + - // .2e4, .2 - // special case, seems not from spec - '|\\.[0-9_]+(?:[eE][-+]?[0-9]+)?' + - // .inf - '|[-+]?\\.(?:inf|Inf|INF)' + - // .nan - '|\\.(?:nan|NaN|NAN))$'); + if (this.fd !== null) { + process.nextTick(function() { + self._read(); + }); + return; + } -function resolveYamlFloat(data) { - if (data === null) return false; + fs.open(this.path, this.flags, this.mode, function (err, fd) { + if (err) { + self.emit('error', err); + self.readable = false; + return; + } - if (!YAML_FLOAT_PATTERN.test(data) || - // Quick hack to not allow integers end with `_` - // Probably should update regexp & check speed - data[data.length - 1] === '_') { - return false; + self.fd = fd; + self.emit('open', fd); + self._read(); + }) } - return true; -} + function WriteStream (path, options) { + if (!(this instanceof WriteStream)) return new WriteStream(path, options); -function constructYamlFloat(data) { - var value, sign; + Stream.call(this); - value = data.replace(/_/g, '').toLowerCase(); - sign = value[0] === '-' ? -1 : 1; + this.path = path; + this.fd = null; + this.writable = true; - if ('+-'.indexOf(value[0]) >= 0) { - value = value.slice(1); - } + this.flags = 'w'; + this.encoding = 'binary'; + this.mode = 438; /*=0666*/ + this.bytesWritten = 0; - if (value === '.inf') { - return (sign === 1) ? Number.POSITIVE_INFINITY : Number.NEGATIVE_INFINITY; + options = options || {}; - } else if (value === '.nan') { - return NaN; - } - return sign * parseFloat(value, 10); -} + // Mixin options into this + var keys = Object.keys(options); + for (var index = 0, length = keys.length; index < length; index++) { + var key = keys[index]; + this[key] = options[key]; + } + if (this.start !== undefined) { + if ('number' !== typeof this.start) { + throw TypeError('start must be a Number'); + } + if (this.start < 0) { + throw new Error('start must be >= zero'); + } -var SCIENTIFIC_WITHOUT_DOT = /^[-+]?[0-9]+e/; + this.pos = this.start; + } -function representYamlFloat(object, style) { - var res; + this.busy = false; + this._queue = []; - if (isNaN(object)) { - switch (style) { - case 'lowercase': return '.nan'; - case 'uppercase': return '.NAN'; - case 'camelcase': return '.NaN'; - } - } else if (Number.POSITIVE_INFINITY === object) { - switch (style) { - case 'lowercase': return '.inf'; - case 'uppercase': return '.INF'; - case 'camelcase': return '.Inf'; - } - } else if (Number.NEGATIVE_INFINITY === object) { - switch (style) { - case 'lowercase': return '-.inf'; - case 'uppercase': return '-.INF'; - case 'camelcase': return '-.Inf'; + if (this.fd === null) { + this._open = fs.open; + this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); + this.flush(); } - } else if (common.isNegativeZero(object)) { - return '-0.0'; } +} - res = object.toString(10); - // JS stringifier can build scientific format without dots: 5e-100, - // while YAML requres dot: 5.e-100. Fix it with simple hack +/***/ }), - return SCIENTIFIC_WITHOUT_DOT.test(res) ? res.replace('e', '.e') : res; -} +/***/ 263: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function isFloat(object) { - return (Object.prototype.toString.call(object) === '[object Number]') && - (object % 1 !== 0 || common.isNegativeZero(object)); +var constants = __nccwpck_require__(7619) + +var origCwd = process.cwd +var cwd = null + +var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform + +process.cwd = function() { + if (!cwd) + cwd = origCwd.call(process) + return cwd } +try { + process.cwd() +} catch (er) {} -module.exports = new Type('tag:yaml.org,2002:float', { - kind: 'scalar', - resolve: resolveYamlFloat, - construct: constructYamlFloat, - predicate: isFloat, - represent: representYamlFloat, - defaultStyle: 'lowercase' -}); +// This check is needed until node.js 12 is required +if (typeof process.chdir === 'function') { + var chdir = process.chdir + process.chdir = function (d) { + cwd = null + chdir.call(process, d) + } + if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir) +} +module.exports = patch -/***/ }), +function patch (fs) { + // (re-)implement some things that are known busted or missing. -/***/ 1615: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // lchmod, broken prior to 0.6.2 + // back-port the fix here. + if (constants.hasOwnProperty('O_SYMLINK') && + process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { + patchLchmod(fs) + } -"use strict"; + // lutimes implementation, or no-op + if (!fs.lutimes) { + patchLutimes(fs) + } + // https://github.com/isaacs/node-graceful-fs/issues/4 + // Chown should not fail on einval or eperm if non-root. + // It should not fail on enosys ever, as this just indicates + // that a fs doesn't support the intended operation. -var common = __nccwpck_require__(6829); -var Type = __nccwpck_require__(6073); + fs.chown = chownFix(fs.chown) + fs.fchown = chownFix(fs.fchown) + fs.lchown = chownFix(fs.lchown) -function isHexCode(c) { - return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) || - ((0x41/* A */ <= c) && (c <= 0x46/* F */)) || - ((0x61/* a */ <= c) && (c <= 0x66/* f */)); -} + fs.chmod = chmodFix(fs.chmod) + fs.fchmod = chmodFix(fs.fchmod) + fs.lchmod = chmodFix(fs.lchmod) -function isOctCode(c) { - return ((0x30/* 0 */ <= c) && (c <= 0x37/* 7 */)); -} + fs.chownSync = chownFixSync(fs.chownSync) + fs.fchownSync = chownFixSync(fs.fchownSync) + fs.lchownSync = chownFixSync(fs.lchownSync) -function isDecCode(c) { - return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)); -} + fs.chmodSync = chmodFixSync(fs.chmodSync) + fs.fchmodSync = chmodFixSync(fs.fchmodSync) + fs.lchmodSync = chmodFixSync(fs.lchmodSync) -function resolveYamlInteger(data) { - if (data === null) return false; + fs.stat = statFix(fs.stat) + fs.fstat = statFix(fs.fstat) + fs.lstat = statFix(fs.lstat) - var max = data.length, - index = 0, - hasDigits = false, - ch; + fs.statSync = statFixSync(fs.statSync) + fs.fstatSync = statFixSync(fs.fstatSync) + fs.lstatSync = statFixSync(fs.lstatSync) - if (!max) return false; + // if lchmod/lchown do not exist, then make them no-ops + if (!fs.lchmod) { + fs.lchmod = function (path, mode, cb) { + if (cb) process.nextTick(cb) + } + fs.lchmodSync = function () {} + } + if (!fs.lchown) { + fs.lchown = function (path, uid, gid, cb) { + if (cb) process.nextTick(cb) + } + fs.lchownSync = function () {} + } - ch = data[index]; + // on Windows, A/V software can lock the directory, causing this + // to fail with an EACCES or EPERM if the directory contains newly + // created files. Try again on failure, for up to 60 seconds. - // sign - if (ch === '-' || ch === '+') { - ch = data[++index]; + // Set the timeout this long because some Windows Anti-Virus, such as Parity + // bit9, may lock files for up to a minute, causing npm package install + // failures. Also, take care to yield the scheduler. Windows scheduling gives + // CPU to a busy looping process, which can cause the program causing the lock + // contention to be starved of CPU by node, so the contention doesn't resolve. + if (platform === "win32") { + fs.rename = (function (fs$rename) { return function (from, to, cb) { + var start = Date.now() + var backoff = 0; + fs$rename(from, to, function CB (er) { + if (er + && (er.code === "EACCES" || er.code === "EPERM") + && Date.now() - start < 60000) { + setTimeout(function() { + fs.stat(to, function (stater, st) { + if (stater && stater.code === "ENOENT") + fs$rename(from, to, CB); + else + cb(er) + }) + }, backoff) + if (backoff < 100) + backoff += 10; + return; + } + if (cb) cb(er) + }) + }})(fs.rename) } - if (ch === '0') { - // 0 - if (index + 1 === max) return true; - ch = data[++index]; - - // base 2, base 8, base 16 + // if read() returns EAGAIN, then just try it again. + fs.read = (function (fs$read) { + function read (fd, buffer, offset, length, position, callback_) { + var callback + if (callback_ && typeof callback_ === 'function') { + var eagCounter = 0 + callback = function (er, _, __) { + if (er && er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + } + callback_.apply(this, arguments) + } + } + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + } - if (ch === 'b') { - // base 2 - index++; + // This ensures `util.promisify` works as it does for native `fs.read`. + if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read) + return read + })(fs.read) - for (; index < max; index++) { - ch = data[index]; - if (ch === '_') continue; - if (ch !== '0' && ch !== '1') return false; - hasDigits = true; + fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) { + var eagCounter = 0 + while (true) { + try { + return fs$readSync.call(fs, fd, buffer, offset, length, position) + } catch (er) { + if (er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + continue + } + throw er } - return hasDigits && ch !== '_'; } + }})(fs.readSync) + function patchLchmod (fs) { + fs.lchmod = function (path, mode, callback) { + fs.open( path + , constants.O_WRONLY | constants.O_SYMLINK + , mode + , function (err, fd) { + if (err) { + if (callback) callback(err) + return + } + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + fs.fchmod(fd, mode, function (err) { + fs.close(fd, function(err2) { + if (callback) callback(err || err2) + }) + }) + }) + } - if (ch === 'x') { - // base 16 - index++; + fs.lchmodSync = function (path, mode) { + var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) - for (; index < max; index++) { - ch = data[index]; - if (ch === '_') continue; - if (!isHexCode(data.charCodeAt(index))) return false; - hasDigits = true; + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + var threw = true + var ret + try { + ret = fs.fchmodSync(fd, mode) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } } - return hasDigits && ch !== '_'; + return ret } + } + function patchLutimes (fs) { + if (constants.hasOwnProperty("O_SYMLINK")) { + fs.lutimes = function (path, at, mt, cb) { + fs.open(path, constants.O_SYMLINK, function (er, fd) { + if (er) { + if (cb) cb(er) + return + } + fs.futimes(fd, at, mt, function (er) { + fs.close(fd, function (er2) { + if (cb) cb(er || er2) + }) + }) + }) + } - if (ch === 'o') { - // base 8 - index++; + fs.lutimesSync = function (path, at, mt) { + var fd = fs.openSync(path, constants.O_SYMLINK) + var ret + var threw = true + try { + ret = fs.futimesSync(fd, at, mt) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } + } + return ret + } - for (; index < max; index++) { - ch = data[index]; - if (ch === '_') continue; - if (!isOctCode(data.charCodeAt(index))) return false; - hasDigits = true; + } else { + fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } + fs.lutimesSync = function () {} + } + } + + function chmodFix (orig) { + if (!orig) return orig + return function (target, mode, cb) { + return orig.call(fs, target, mode, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) + } + } + + function chmodFixSync (orig) { + if (!orig) return orig + return function (target, mode) { + try { + return orig.call(fs, target, mode) + } catch (er) { + if (!chownErOk(er)) throw er } - return hasDigits && ch !== '_'; } } - // base 10 (except 0) - - // value should not start with `_`; - if (ch === '_') return false; - for (; index < max; index++) { - ch = data[index]; - if (ch === '_') continue; - if (!isDecCode(data.charCodeAt(index))) { - return false; + function chownFix (orig) { + if (!orig) return orig + return function (target, uid, gid, cb) { + return orig.call(fs, target, uid, gid, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) } - hasDigits = true; } - // Should have digits and should not end with `_` - if (!hasDigits || ch === '_') return false; - - return true; -} - -function constructYamlInteger(data) { - var value = data, sign = 1, ch; - - if (value.indexOf('_') !== -1) { - value = value.replace(/_/g, ''); + function chownFixSync (orig) { + if (!orig) return orig + return function (target, uid, gid) { + try { + return orig.call(fs, target, uid, gid) + } catch (er) { + if (!chownErOk(er)) throw er + } + } } - ch = value[0]; - - if (ch === '-' || ch === '+') { - if (ch === '-') sign = -1; - value = value.slice(1); - ch = value[0]; + function statFix (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + function callback (er, stats) { + if (stats) { + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + } + if (cb) cb.apply(this, arguments) + } + return options ? orig.call(fs, target, options, callback) + : orig.call(fs, target, callback) + } } - if (value === '0') return 0; - - if (ch === '0') { - if (value[1] === 'b') return sign * parseInt(value.slice(2), 2); - if (value[1] === 'x') return sign * parseInt(value.slice(2), 16); - if (value[1] === 'o') return sign * parseInt(value.slice(2), 8); + function statFixSync (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, options) { + var stats = options ? orig.call(fs, target, options) + : orig.call(fs, target) + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + return stats; + } } - return sign * parseInt(value, 10); -} + // ENOSYS means that the fs doesn't support the op. Just ignore + // that, because it doesn't matter. + // + // if there's no getuid, or if getuid() is something other + // than 0, and the error is EINVAL or EPERM, then just ignore + // it. + // + // This specific case is a silent failure in cp, install, tar, + // and most other unix tools that manage permissions. + // + // When running as root, or if other types of errors are + // encountered, then it's strict. + function chownErOk (er) { + if (!er) + return true -function isInteger(object) { - return (Object.prototype.toString.call(object)) === '[object Number]' && - (object % 1 === 0 && !common.isNegativeZero(object)); -} + if (er.code === "ENOSYS") + return true -module.exports = new Type('tag:yaml.org,2002:int', { - kind: 'scalar', - resolve: resolveYamlInteger, - construct: constructYamlInteger, - predicate: isInteger, - represent: { - binary: function (obj) { return obj >= 0 ? '0b' + obj.toString(2) : '-0b' + obj.toString(2).slice(1); }, - octal: function (obj) { return obj >= 0 ? '0o' + obj.toString(8) : '-0o' + obj.toString(8).slice(1); }, - decimal: function (obj) { return obj.toString(10); }, - /* eslint-disable max-len */ - hexadecimal: function (obj) { return obj >= 0 ? '0x' + obj.toString(16).toUpperCase() : '-0x' + obj.toString(16).toUpperCase().slice(1); } - }, - defaultStyle: 'decimal', - styleAliases: { - binary: [ 2, 'bin' ], - octal: [ 8, 'oct' ], - decimal: [ 10, 'dec' ], - hexadecimal: [ 16, 'hex' ] + var nonroot = !process.getuid || process.getuid() !== 0 + if (nonroot) { + if (er.code === "EINVAL" || er.code === "EPERM") + return true + } + + return false } -}); +} /***/ }), -/***/ 6150: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 3287: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -var Type = __nccwpck_require__(6073); +Object.defineProperty(exports, "__esModule", ({ value: true })); -module.exports = new Type('tag:yaml.org,2002:map', { - kind: 'mapping', - construct: function (data) { return data !== null ? data : {}; } -}); +/*! + * is-plain-object + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ +function isObject(o) { + return Object.prototype.toString.call(o) === '[object Object]'; +} -/***/ }), +function isPlainObject(o) { + var ctor,prot; -/***/ 6104: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (isObject(o) === false) return false; -"use strict"; + // If has modified constructor + ctor = o.constructor; + if (ctor === undefined) return true; + // If has modified prototype + prot = ctor.prototype; + if (isObject(prot) === false) return false; -var Type = __nccwpck_require__(6073); + // If constructor does not have an Object-specific method + if (prot.hasOwnProperty('isPrototypeOf') === false) { + return false; + } -function resolveYamlMerge(data) { - return data === '<<' || data === null; + // Most likely a plain Object + return true; } -module.exports = new Type('tag:yaml.org,2002:merge', { - kind: 'scalar', - resolve: resolveYamlMerge -}); +exports.isPlainObject = isPlainObject; /***/ }), -/***/ 721: +/***/ 1917: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var Type = __nccwpck_require__(6073); -function resolveYamlNull(data) { - if (data === null) return true; +var loader = __nccwpck_require__(1161); +var dumper = __nccwpck_require__(8866); - var max = data.length; - return (max === 1 && data === '~') || - (max === 4 && (data === 'null' || data === 'Null' || data === 'NULL')); +function renamed(from, to) { + return function () { + throw new Error('Function yaml.' + from + ' is removed in js-yaml 4. ' + + 'Use yaml.' + to + ' instead, which is now safe by default.'); + }; } -function constructYamlNull() { - return null; -} -function isNull(object) { - return object === null; -} +module.exports.Type = __nccwpck_require__(6073); +module.exports.Schema = __nccwpck_require__(1082); +module.exports.FAILSAFE_SCHEMA = __nccwpck_require__(8562); +module.exports.JSON_SCHEMA = __nccwpck_require__(1035); +module.exports.CORE_SCHEMA = __nccwpck_require__(2011); +module.exports.DEFAULT_SCHEMA = __nccwpck_require__(8759); +module.exports.load = loader.load; +module.exports.loadAll = loader.loadAll; +module.exports.dump = dumper.dump; +module.exports.YAMLException = __nccwpck_require__(8179); + +// Re-export all types in case user wants to create custom schema +module.exports.types = { + binary: __nccwpck_require__(7900), + float: __nccwpck_require__(2705), + map: __nccwpck_require__(6150), + null: __nccwpck_require__(721), + pairs: __nccwpck_require__(6860), + set: __nccwpck_require__(9548), + timestamp: __nccwpck_require__(9212), + bool: __nccwpck_require__(4993), + int: __nccwpck_require__(1615), + merge: __nccwpck_require__(6104), + omap: __nccwpck_require__(9046), + seq: __nccwpck_require__(7283), + str: __nccwpck_require__(3619) +}; + +// Removed functions from JS-YAML 3.0.x +module.exports.safeLoad = renamed('safeLoad', 'load'); +module.exports.safeLoadAll = renamed('safeLoadAll', 'loadAll'); +module.exports.safeDump = renamed('safeDump', 'dump'); + + +/***/ }), + +/***/ 6829: +/***/ ((module) => { + +"use strict"; -module.exports = new Type('tag:yaml.org,2002:null', { - kind: 'scalar', - resolve: resolveYamlNull, - construct: constructYamlNull, - predicate: isNull, - represent: { - canonical: function () { return '~'; }, - lowercase: function () { return 'null'; }, - uppercase: function () { return 'NULL'; }, - camelcase: function () { return 'Null'; }, - empty: function () { return ''; } - }, - defaultStyle: 'lowercase' -}); -/***/ }), +function isNothing(subject) { + return (typeof subject === 'undefined') || (subject === null); +} + -/***/ 9046: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function isObject(subject) { + return (typeof subject === 'object') && (subject !== null); +} -"use strict"; +function toArray(sequence) { + if (Array.isArray(sequence)) return sequence; + else if (isNothing(sequence)) return []; -var Type = __nccwpck_require__(6073); + return [ sequence ]; +} -var _hasOwnProperty = Object.prototype.hasOwnProperty; -var _toString = Object.prototype.toString; -function resolveYamlOmap(data) { - if (data === null) return true; +function extend(target, source) { + var index, length, key, sourceKeys; - var objectKeys = [], index, length, pair, pairKey, pairHasKey, - object = data; + if (source) { + sourceKeys = Object.keys(source); - for (index = 0, length = object.length; index < length; index += 1) { - pair = object[index]; - pairHasKey = false; + for (index = 0, length = sourceKeys.length; index < length; index += 1) { + key = sourceKeys[index]; + target[key] = source[key]; + } + } - if (_toString.call(pair) !== '[object Object]') return false; + return target; +} - for (pairKey in pair) { - if (_hasOwnProperty.call(pair, pairKey)) { - if (!pairHasKey) pairHasKey = true; - else return false; - } - } - if (!pairHasKey) return false; +function repeat(string, count) { + var result = '', cycle; - if (objectKeys.indexOf(pairKey) === -1) objectKeys.push(pairKey); - else return false; + for (cycle = 0; cycle < count; cycle += 1) { + result += string; } - return true; + return result; } -function constructYamlOmap(data) { - return data !== null ? data : []; + +function isNegativeZero(number) { + return (number === 0) && (Number.NEGATIVE_INFINITY === 1 / number); } -module.exports = new Type('tag:yaml.org,2002:omap', { - kind: 'sequence', - resolve: resolveYamlOmap, - construct: constructYamlOmap -}); + +module.exports.isNothing = isNothing; +module.exports.isObject = isObject; +module.exports.toArray = toArray; +module.exports.repeat = repeat; +module.exports.isNegativeZero = isNegativeZero; +module.exports.extend = extend; /***/ }), -/***/ 6860: +/***/ 8866: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var Type = __nccwpck_require__(6073); - -var _toString = Object.prototype.toString; - -function resolveYamlPairs(data) { - if (data === null) return true; +/*eslint-disable no-use-before-define*/ - var index, length, pair, keys, result, - object = data; +var common = __nccwpck_require__(6829); +var YAMLException = __nccwpck_require__(8179); +var DEFAULT_SCHEMA = __nccwpck_require__(8759); - result = new Array(object.length); +var _toString = Object.prototype.toString; +var _hasOwnProperty = Object.prototype.hasOwnProperty; - for (index = 0, length = object.length; index < length; index += 1) { - pair = object[index]; +var CHAR_BOM = 0xFEFF; +var CHAR_TAB = 0x09; /* Tab */ +var CHAR_LINE_FEED = 0x0A; /* LF */ +var CHAR_CARRIAGE_RETURN = 0x0D; /* CR */ +var CHAR_SPACE = 0x20; /* Space */ +var CHAR_EXCLAMATION = 0x21; /* ! */ +var CHAR_DOUBLE_QUOTE = 0x22; /* " */ +var CHAR_SHARP = 0x23; /* # */ +var CHAR_PERCENT = 0x25; /* % */ +var CHAR_AMPERSAND = 0x26; /* & */ +var CHAR_SINGLE_QUOTE = 0x27; /* ' */ +var CHAR_ASTERISK = 0x2A; /* * */ +var CHAR_COMMA = 0x2C; /* , */ +var CHAR_MINUS = 0x2D; /* - */ +var CHAR_COLON = 0x3A; /* : */ +var CHAR_EQUALS = 0x3D; /* = */ +var CHAR_GREATER_THAN = 0x3E; /* > */ +var CHAR_QUESTION = 0x3F; /* ? */ +var CHAR_COMMERCIAL_AT = 0x40; /* @ */ +var CHAR_LEFT_SQUARE_BRACKET = 0x5B; /* [ */ +var CHAR_RIGHT_SQUARE_BRACKET = 0x5D; /* ] */ +var CHAR_GRAVE_ACCENT = 0x60; /* ` */ +var CHAR_LEFT_CURLY_BRACKET = 0x7B; /* { */ +var CHAR_VERTICAL_LINE = 0x7C; /* | */ +var CHAR_RIGHT_CURLY_BRACKET = 0x7D; /* } */ - if (_toString.call(pair) !== '[object Object]') return false; +var ESCAPE_SEQUENCES = {}; - keys = Object.keys(pair); +ESCAPE_SEQUENCES[0x00] = '\\0'; +ESCAPE_SEQUENCES[0x07] = '\\a'; +ESCAPE_SEQUENCES[0x08] = '\\b'; +ESCAPE_SEQUENCES[0x09] = '\\t'; +ESCAPE_SEQUENCES[0x0A] = '\\n'; +ESCAPE_SEQUENCES[0x0B] = '\\v'; +ESCAPE_SEQUENCES[0x0C] = '\\f'; +ESCAPE_SEQUENCES[0x0D] = '\\r'; +ESCAPE_SEQUENCES[0x1B] = '\\e'; +ESCAPE_SEQUENCES[0x22] = '\\"'; +ESCAPE_SEQUENCES[0x5C] = '\\\\'; +ESCAPE_SEQUENCES[0x85] = '\\N'; +ESCAPE_SEQUENCES[0xA0] = '\\_'; +ESCAPE_SEQUENCES[0x2028] = '\\L'; +ESCAPE_SEQUENCES[0x2029] = '\\P'; - if (keys.length !== 1) return false; +var DEPRECATED_BOOLEANS_SYNTAX = [ + 'y', 'Y', 'yes', 'Yes', 'YES', 'on', 'On', 'ON', + 'n', 'N', 'no', 'No', 'NO', 'off', 'Off', 'OFF' +]; - result[index] = [ keys[0], pair[keys[0]] ]; - } +var DEPRECATED_BASE60_SYNTAX = /^[-+]?[0-9_]+(?::[0-9_]+)+(?:\.[0-9_]*)?$/; - return true; -} +function compileStyleMap(schema, map) { + var result, keys, index, length, tag, style, type; -function constructYamlPairs(data) { - if (data === null) return []; + if (map === null) return {}; - var index, length, pair, keys, result, - object = data; + result = {}; + keys = Object.keys(map); - result = new Array(object.length); + for (index = 0, length = keys.length; index < length; index += 1) { + tag = keys[index]; + style = String(map[tag]); - for (index = 0, length = object.length; index < length; index += 1) { - pair = object[index]; + if (tag.slice(0, 2) === '!!') { + tag = 'tag:yaml.org,2002:' + tag.slice(2); + } + type = schema.compiledTypeMap['fallback'][tag]; - keys = Object.keys(pair); + if (type && _hasOwnProperty.call(type.styleAliases, style)) { + style = type.styleAliases[style]; + } - result[index] = [ keys[0], pair[keys[0]] ]; + result[tag] = style; } return result; } -module.exports = new Type('tag:yaml.org,2002:pairs', { - kind: 'sequence', - resolve: resolveYamlPairs, - construct: constructYamlPairs -}); +function encodeHex(character) { + var string, handle, length; + string = character.toString(16).toUpperCase(); -/***/ }), + if (character <= 0xFF) { + handle = 'x'; + length = 2; + } else if (character <= 0xFFFF) { + handle = 'u'; + length = 4; + } else if (character <= 0xFFFFFFFF) { + handle = 'U'; + length = 8; + } else { + throw new YAMLException('code point within a string may not be greater than 0xFFFFFFFF'); + } -/***/ 7283: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return '\\' + handle + common.repeat('0', length - string.length) + string; +} -"use strict"; +var QUOTING_TYPE_SINGLE = 1, + QUOTING_TYPE_DOUBLE = 2; -var Type = __nccwpck_require__(6073); +function State(options) { + this.schema = options['schema'] || DEFAULT_SCHEMA; + this.indent = Math.max(1, (options['indent'] || 2)); + this.noArrayIndent = options['noArrayIndent'] || false; + this.skipInvalid = options['skipInvalid'] || false; + this.flowLevel = (common.isNothing(options['flowLevel']) ? -1 : options['flowLevel']); + this.styleMap = compileStyleMap(this.schema, options['styles'] || null); + this.sortKeys = options['sortKeys'] || false; + this.lineWidth = options['lineWidth'] || 80; + this.noRefs = options['noRefs'] || false; + this.noCompatMode = options['noCompatMode'] || false; + this.condenseFlow = options['condenseFlow'] || false; + this.quotingType = options['quotingType'] === '"' ? QUOTING_TYPE_DOUBLE : QUOTING_TYPE_SINGLE; + this.forceQuotes = options['forceQuotes'] || false; + this.replacer = typeof options['replacer'] === 'function' ? options['replacer'] : null; -module.exports = new Type('tag:yaml.org,2002:seq', { - kind: 'sequence', - construct: function (data) { return data !== null ? data : []; } -}); + this.implicitTypes = this.schema.compiledImplicit; + this.explicitTypes = this.schema.compiledExplicit; + this.tag = null; + this.result = ''; -/***/ }), + this.duplicates = []; + this.usedDuplicates = null; +} -/***/ 9548: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +// Indents every line in a string. Empty lines (\n only) are not indented. +function indentString(string, spaces) { + var ind = common.repeat(' ', spaces), + position = 0, + next = -1, + result = '', + line, + length = string.length; -"use strict"; + while (position < length) { + next = string.indexOf('\n', position); + if (next === -1) { + line = string.slice(position); + position = length; + } else { + line = string.slice(position, next + 1); + position = next + 1; + } + + if (line.length && line !== '\n') result += ind; + result += line; + } -var Type = __nccwpck_require__(6073); + return result; +} -var _hasOwnProperty = Object.prototype.hasOwnProperty; +function generateNextLine(state, level) { + return '\n' + common.repeat(' ', state.indent * level); +} -function resolveYamlSet(data) { - if (data === null) return true; +function testImplicitResolving(state, str) { + var index, length, type; - var key, object = data; + for (index = 0, length = state.implicitTypes.length; index < length; index += 1) { + type = state.implicitTypes[index]; - for (key in object) { - if (_hasOwnProperty.call(object, key)) { - if (object[key] !== null) return false; + if (type.resolve(str)) { + return true; } } - return true; + return false; } -function constructYamlSet(data) { - return data !== null ? data : {}; +// [33] s-white ::= s-space | s-tab +function isWhitespace(c) { + return c === CHAR_SPACE || c === CHAR_TAB; } -module.exports = new Type('tag:yaml.org,2002:set', { - kind: 'mapping', - resolve: resolveYamlSet, - construct: constructYamlSet -}); - +// Returns true if the character can be printed without escaping. +// From YAML 1.2: "any allowed characters known to be non-printable +// should also be escaped. [However,] This isn’t mandatory" +// Derived from nb-char - \t - #x85 - #xA0 - #x2028 - #x2029. +function isPrintable(c) { + return (0x00020 <= c && c <= 0x00007E) + || ((0x000A1 <= c && c <= 0x00D7FF) && c !== 0x2028 && c !== 0x2029) + || ((0x0E000 <= c && c <= 0x00FFFD) && c !== CHAR_BOM) + || (0x10000 <= c && c <= 0x10FFFF); +} -/***/ }), +// [34] ns-char ::= nb-char - s-white +// [27] nb-char ::= c-printable - b-char - c-byte-order-mark +// [26] b-char ::= b-line-feed | b-carriage-return +// Including s-white (for some reason, examples doesn't match specs in this aspect) +// ns-char ::= c-printable - b-line-feed - b-carriage-return - c-byte-order-mark +function isNsCharOrWhitespace(c) { + return isPrintable(c) + && c !== CHAR_BOM + // - b-char + && c !== CHAR_CARRIAGE_RETURN + && c !== CHAR_LINE_FEED; +} -/***/ 3619: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +// [127] ns-plain-safe(c) ::= c = flow-out ⇒ ns-plain-safe-out +// c = flow-in ⇒ ns-plain-safe-in +// c = block-key ⇒ ns-plain-safe-out +// c = flow-key ⇒ ns-plain-safe-in +// [128] ns-plain-safe-out ::= ns-char +// [129] ns-plain-safe-in ::= ns-char - c-flow-indicator +// [130] ns-plain-char(c) ::= ( ns-plain-safe(c) - “:” - “#” ) +// | ( /* An ns-char preceding */ “#” ) +// | ( “:” /* Followed by an ns-plain-safe(c) */ ) +function isPlainSafe(c, prev, inblock) { + var cIsNsCharOrWhitespace = isNsCharOrWhitespace(c); + var cIsNsChar = cIsNsCharOrWhitespace && !isWhitespace(c); + return ( + // ns-plain-safe + inblock ? // c = flow-in + cIsNsCharOrWhitespace + : cIsNsCharOrWhitespace + // - c-flow-indicator + && c !== CHAR_COMMA + && c !== CHAR_LEFT_SQUARE_BRACKET + && c !== CHAR_RIGHT_SQUARE_BRACKET + && c !== CHAR_LEFT_CURLY_BRACKET + && c !== CHAR_RIGHT_CURLY_BRACKET + ) + // ns-plain-char + && c !== CHAR_SHARP // false on '#' + && !(prev === CHAR_COLON && !cIsNsChar) // false on ': ' + || (isNsCharOrWhitespace(prev) && !isWhitespace(prev) && c === CHAR_SHARP) // change to true on '[^ ]#' + || (prev === CHAR_COLON && cIsNsChar); // change to true on ':[^ ]' +} -"use strict"; +// Simplified test for values allowed as the first character in plain style. +function isPlainSafeFirst(c) { + // Uses a subset of ns-char - c-indicator + // where ns-char = nb-char - s-white. + // No support of ( ( “?” | “:” | “-” ) /* Followed by an ns-plain-safe(c)) */ ) part + return isPrintable(c) && c !== CHAR_BOM + && !isWhitespace(c) // - s-white + // - (c-indicator ::= + // “-” | “?” | “:” | “,” | “[” | “]” | “{” | “}” + && c !== CHAR_MINUS + && c !== CHAR_QUESTION + && c !== CHAR_COLON + && c !== CHAR_COMMA + && c !== CHAR_LEFT_SQUARE_BRACKET + && c !== CHAR_RIGHT_SQUARE_BRACKET + && c !== CHAR_LEFT_CURLY_BRACKET + && c !== CHAR_RIGHT_CURLY_BRACKET + // | “#” | “&” | “*” | “!” | “|” | “=” | “>” | “'” | “"” + && c !== CHAR_SHARP + && c !== CHAR_AMPERSAND + && c !== CHAR_ASTERISK + && c !== CHAR_EXCLAMATION + && c !== CHAR_VERTICAL_LINE + && c !== CHAR_EQUALS + && c !== CHAR_GREATER_THAN + && c !== CHAR_SINGLE_QUOTE + && c !== CHAR_DOUBLE_QUOTE + // | “%” | “@” | “`”) + && c !== CHAR_PERCENT + && c !== CHAR_COMMERCIAL_AT + && c !== CHAR_GRAVE_ACCENT; +} +// Simplified test for values allowed as the last character in plain style. +function isPlainSafeLast(c) { + // just not whitespace or colon, it will be checked to be plain character later + return !isWhitespace(c) && c !== CHAR_COLON; +} -var Type = __nccwpck_require__(6073); +// Same as 'string'.codePointAt(pos), but works in older browsers. +function codePointAt(string, pos) { + var first = string.charCodeAt(pos), second; + if (first >= 0xD800 && first <= 0xDBFF && pos + 1 < string.length) { + second = string.charCodeAt(pos + 1); + if (second >= 0xDC00 && second <= 0xDFFF) { + // https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae + return (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000; + } + } + return first; +} -module.exports = new Type('tag:yaml.org,2002:str', { - kind: 'scalar', - construct: function (data) { return data !== null ? data : ''; } -}); +// Determines whether block indentation indicator is required. +function needIndentIndicator(string) { + var leadingSpaceRe = /^\n* /; + return leadingSpaceRe.test(string); +} +var STYLE_PLAIN = 1, + STYLE_SINGLE = 2, + STYLE_LITERAL = 3, + STYLE_FOLDED = 4, + STYLE_DOUBLE = 5; -/***/ }), +// Determines which scalar styles are possible and returns the preferred style. +// lineWidth = -1 => no limit. +// Pre-conditions: str.length > 0. +// Post-conditions: +// STYLE_PLAIN or STYLE_SINGLE => no \n are in the string. +// STYLE_LITERAL => no lines are suitable for folding (or lineWidth is -1). +// STYLE_FOLDED => a line > lineWidth and can be folded (and lineWidth != -1). +function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth, + testAmbiguousType, quotingType, forceQuotes, inblock) { -/***/ 9212: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var i; + var char = 0; + var prevChar = null; + var hasLineBreak = false; + var hasFoldableLine = false; // only checked if shouldTrackWidth + var shouldTrackWidth = lineWidth !== -1; + var previousLineBreak = -1; // count the first line correctly + var plain = isPlainSafeFirst(codePointAt(string, 0)) + && isPlainSafeLast(codePointAt(string, string.length - 1)); -"use strict"; + if (singleLineOnly || forceQuotes) { + // Case: no block styles. + // Check for disallowed characters to rule out plain and single. + for (i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) { + char = codePointAt(string, i); + if (!isPrintable(char)) { + return STYLE_DOUBLE; + } + plain = plain && isPlainSafe(char, prevChar, inblock); + prevChar = char; + } + } else { + // Case: block styles permitted. + for (i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) { + char = codePointAt(string, i); + if (char === CHAR_LINE_FEED) { + hasLineBreak = true; + // Check if any line can be folded. + if (shouldTrackWidth) { + hasFoldableLine = hasFoldableLine || + // Foldable line = too long, and not more-indented. + (i - previousLineBreak - 1 > lineWidth && + string[previousLineBreak + 1] !== ' '); + previousLineBreak = i; + } + } else if (!isPrintable(char)) { + return STYLE_DOUBLE; + } + plain = plain && isPlainSafe(char, prevChar, inblock); + prevChar = char; + } + // in case the end is missing a \n + hasFoldableLine = hasFoldableLine || (shouldTrackWidth && + (i - previousLineBreak - 1 > lineWidth && + string[previousLineBreak + 1] !== ' ')); + } + // Although every style can represent \n without escaping, prefer block styles + // for multiline, since they're more readable and they don't add empty lines. + // Also prefer folding a super-long line. + if (!hasLineBreak && !hasFoldableLine) { + // Strings interpretable as another type have to be quoted; + // e.g. the string 'true' vs. the boolean true. + if (plain && !forceQuotes && !testAmbiguousType(string)) { + return STYLE_PLAIN; + } + return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE; + } + // Edge case: block indentation indicator can only have one digit. + if (indentPerLevel > 9 && needIndentIndicator(string)) { + return STYLE_DOUBLE; + } + // At this point we know block styles are valid. + // Prefer literal style unless we want to fold. + if (!forceQuotes) { + return hasFoldableLine ? STYLE_FOLDED : STYLE_LITERAL; + } + return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE; +} +// Note: line breaking/folding is implemented for only the folded style. +// NB. We drop the last trailing newline (if any) of a returned block scalar +// since the dumper adds its own newline. This always works: +// • No ending newline => unaffected; already using strip "-" chomping. +// • Ending newline => removed then restored. +// Importantly, this keeps the "+" chomp indicator from gaining an extra line. +function writeScalar(state, string, level, iskey, inblock) { + state.dump = (function () { + if (string.length === 0) { + return state.quotingType === QUOTING_TYPE_DOUBLE ? '""' : "''"; + } + if (!state.noCompatMode) { + if (DEPRECATED_BOOLEANS_SYNTAX.indexOf(string) !== -1 || DEPRECATED_BASE60_SYNTAX.test(string)) { + return state.quotingType === QUOTING_TYPE_DOUBLE ? ('"' + string + '"') : ("'" + string + "'"); + } + } -var Type = __nccwpck_require__(6073); + var indent = state.indent * Math.max(1, level); // no 0-indent scalars + // As indentation gets deeper, let the width decrease monotonically + // to the lower bound min(state.lineWidth, 40). + // Note that this implies + // state.lineWidth ≤ 40 + state.indent: width is fixed at the lower bound. + // state.lineWidth > 40 + state.indent: width decreases until the lower bound. + // This behaves better than a constant minimum width which disallows narrower options, + // or an indent threshold which causes the width to suddenly increase. + var lineWidth = state.lineWidth === -1 + ? -1 : Math.max(Math.min(state.lineWidth, 40), state.lineWidth - indent); -var YAML_DATE_REGEXP = new RegExp( - '^([0-9][0-9][0-9][0-9])' + // [1] year - '-([0-9][0-9])' + // [2] month - '-([0-9][0-9])$'); // [3] day + // Without knowing if keys are implicit/explicit, assume implicit for safety. + var singleLineOnly = iskey + // No block styles in flow mode. + || (state.flowLevel > -1 && level >= state.flowLevel); + function testAmbiguity(string) { + return testImplicitResolving(state, string); + } -var YAML_TIMESTAMP_REGEXP = new RegExp( - '^([0-9][0-9][0-9][0-9])' + // [1] year - '-([0-9][0-9]?)' + // [2] month - '-([0-9][0-9]?)' + // [3] day - '(?:[Tt]|[ \\t]+)' + // ... - '([0-9][0-9]?)' + // [4] hour - ':([0-9][0-9])' + // [5] minute - ':([0-9][0-9])' + // [6] second - '(?:\\.([0-9]*))?' + // [7] fraction - '(?:[ \\t]*(Z|([-+])([0-9][0-9]?)' + // [8] tz [9] tz_sign [10] tz_hour - '(?::([0-9][0-9]))?))?$'); // [11] tz_minute + switch (chooseScalarStyle(string, singleLineOnly, state.indent, lineWidth, + testAmbiguity, state.quotingType, state.forceQuotes && !iskey, inblock)) { -function resolveYamlTimestamp(data) { - if (data === null) return false; - if (YAML_DATE_REGEXP.exec(data) !== null) return true; - if (YAML_TIMESTAMP_REGEXP.exec(data) !== null) return true; - return false; + case STYLE_PLAIN: + return string; + case STYLE_SINGLE: + return "'" + string.replace(/'/g, "''") + "'"; + case STYLE_LITERAL: + return '|' + blockHeader(string, state.indent) + + dropEndingNewline(indentString(string, indent)); + case STYLE_FOLDED: + return '>' + blockHeader(string, state.indent) + + dropEndingNewline(indentString(foldString(string, lineWidth), indent)); + case STYLE_DOUBLE: + return '"' + escapeString(string, lineWidth) + '"'; + default: + throw new YAMLException('impossible error: invalid scalar style'); + } + }()); } -function constructYamlTimestamp(data) { - var match, year, month, day, hour, minute, second, fraction = 0, - delta = null, tz_hour, tz_minute, date; +// Pre-conditions: string is valid for a block scalar, 1 <= indentPerLevel <= 9. +function blockHeader(string, indentPerLevel) { + var indentIndicator = needIndentIndicator(string) ? String(indentPerLevel) : ''; - match = YAML_DATE_REGEXP.exec(data); - if (match === null) match = YAML_TIMESTAMP_REGEXP.exec(data); + // note the special case: the string '\n' counts as a "trailing" empty line. + var clip = string[string.length - 1] === '\n'; + var keep = clip && (string[string.length - 2] === '\n' || string === '\n'); + var chomp = keep ? '+' : (clip ? '' : '-'); - if (match === null) throw new Error('Date resolve error'); + return indentIndicator + chomp + '\n'; +} - // match: [1] year [2] month [3] day +// (See the note for writeScalar.) +function dropEndingNewline(string) { + return string[string.length - 1] === '\n' ? string.slice(0, -1) : string; +} - year = +(match[1]); - month = +(match[2]) - 1; // JS month starts with 0 - day = +(match[3]); +// Note: a long line without a suitable break point will exceed the width limit. +// Pre-conditions: every char in str isPrintable, str.length > 0, width > 0. +function foldString(string, width) { + // In folded style, $k$ consecutive newlines output as $k+1$ newlines— + // unless they're before or after a more-indented line, or at the very + // beginning or end, in which case $k$ maps to $k$. + // Therefore, parse each chunk as newline(s) followed by a content line. + var lineRe = /(\n+)([^\n]*)/g; - if (!match[4]) { // no hour - return new Date(Date.UTC(year, month, day)); + // first line (possibly an empty line) + var result = (function () { + var nextLF = string.indexOf('\n'); + nextLF = nextLF !== -1 ? nextLF : string.length; + lineRe.lastIndex = nextLF; + return foldLine(string.slice(0, nextLF), width); + }()); + // If we haven't reached the first content line yet, don't add an extra \n. + var prevMoreIndented = string[0] === '\n' || string[0] === ' '; + var moreIndented; + + // rest of the lines + var match; + while ((match = lineRe.exec(string))) { + var prefix = match[1], line = match[2]; + moreIndented = (line[0] === ' '); + result += prefix + + (!prevMoreIndented && !moreIndented && line !== '' + ? '\n' : '') + + foldLine(line, width); + prevMoreIndented = moreIndented; } - // match: [4] hour [5] minute [6] second [7] fraction + return result; +} - hour = +(match[4]); - minute = +(match[5]); - second = +(match[6]); +// Greedy line breaking. +// Picks the longest line under the limit each time, +// otherwise settles for the shortest line over the limit. +// NB. More-indented lines *cannot* be folded, as that would add an extra \n. +function foldLine(line, width) { + if (line === '' || line[0] === ' ') return line; - if (match[7]) { - fraction = match[7].slice(0, 3); - while (fraction.length < 3) { // milli-seconds - fraction += '0'; + // Since a more-indented line adds a \n, breaks can't be followed by a space. + var breakRe = / [^ ]/g; // note: the match index will always be <= length-2. + var match; + // start is an inclusive index. end, curr, and next are exclusive. + var start = 0, end, curr = 0, next = 0; + var result = ''; + + // Invariants: 0 <= start <= length-1. + // 0 <= curr <= next <= max(0, length-2). curr - start <= width. + // Inside the loop: + // A match implies length >= 2, so curr and next are <= length-2. + while ((match = breakRe.exec(line))) { + next = match.index; + // maintain invariant: curr - start <= width + if (next - start > width) { + end = (curr > start) ? curr : next; // derive end <= length-2 + result += '\n' + line.slice(start, end); + // skip the space that was output as \n + start = end + 1; // derive start <= length-1 } - fraction = +fraction; + curr = next; } - // match: [8] tz [9] tz_sign [10] tz_hour [11] tz_minute - - if (match[9]) { - tz_hour = +(match[10]); - tz_minute = +(match[11] || 0); - delta = (tz_hour * 60 + tz_minute) * 60000; // delta in mili-seconds - if (match[9] === '-') delta = -delta; + // By the invariants, start <= length-1, so there is something left over. + // It is either the whole string or a part starting from non-whitespace. + result += '\n'; + // Insert a break if the remainder is too long and there is a break available. + if (line.length - start > width && curr > start) { + result += line.slice(start, curr) + '\n' + line.slice(curr + 1); + } else { + result += line.slice(start); } - date = new Date(Date.UTC(year, month, day, hour, minute, second, fraction)); - - if (delta) date.setTime(date.getTime() - delta); - - return date; -} - -function representYamlTimestamp(object /*, style*/) { - return object.toISOString(); + return result.slice(1); // drop extra \n joiner } -module.exports = new Type('tag:yaml.org,2002:timestamp', { - kind: 'scalar', - resolve: resolveYamlTimestamp, - construct: constructYamlTimestamp, - instanceOf: Date, - represent: representYamlTimestamp -}); - +// Escapes a double-quoted string. +function escapeString(string) { + var result = ''; + var char = 0; + var escapeSeq; -/***/ }), + for (var i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) { + char = codePointAt(string, i); + escapeSeq = ESCAPE_SEQUENCES[char]; -/***/ 6160: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (!escapeSeq && isPrintable(char)) { + result += string[i]; + if (char >= 0x10000) result += string[i + 1]; + } else { + result += escapeSeq || encodeHex(char); + } + } -let _fs -try { - _fs = __nccwpck_require__(7758) -} catch (_) { - _fs = __nccwpck_require__(5747) + return result; } -const universalify = __nccwpck_require__(1463) -const { stringify, stripBom } = __nccwpck_require__(5902) - -async function _readFile (file, options = {}) { - if (typeof options === 'string') { - options = { encoding: options } - } - const fs = options.fs || _fs +function writeFlowSequence(state, level, object) { + var _result = '', + _tag = state.tag, + index, + length, + value; - const shouldThrow = 'throws' in options ? options.throws : true + for (index = 0, length = object.length; index < length; index += 1) { + value = object[index]; - let data = await universalify.fromCallback(fs.readFile)(file, options) + if (state.replacer) { + value = state.replacer.call(object, String(index), value); + } - data = stripBom(data) + // Write only valid elements, put null instead of invalid elements. + if (writeNode(state, level, value, false, false) || + (typeof value === 'undefined' && + writeNode(state, level, null, false, false))) { - let obj - try { - obj = JSON.parse(data, options ? options.reviver : null) - } catch (err) { - if (shouldThrow) { - err.message = `${file}: ${err.message}` - throw err - } else { - return null + if (_result !== '') _result += ',' + (!state.condenseFlow ? ' ' : ''); + _result += state.dump; } } - return obj + state.tag = _tag; + state.dump = '[' + _result + ']'; } -const readFile = universalify.fromPromise(_readFile) +function writeBlockSequence(state, level, object, compact) { + var _result = '', + _tag = state.tag, + index, + length, + value; -function readFileSync (file, options = {}) { - if (typeof options === 'string') { - options = { encoding: options } - } + for (index = 0, length = object.length; index < length; index += 1) { + value = object[index]; - const fs = options.fs || _fs + if (state.replacer) { + value = state.replacer.call(object, String(index), value); + } - const shouldThrow = 'throws' in options ? options.throws : true + // Write only valid elements, put null instead of invalid elements. + if (writeNode(state, level + 1, value, true, true, false, true) || + (typeof value === 'undefined' && + writeNode(state, level + 1, null, true, true, false, true))) { - try { - let content = fs.readFileSync(file, options) - content = stripBom(content) - return JSON.parse(content, options.reviver) - } catch (err) { - if (shouldThrow) { - err.message = `${file}: ${err.message}` - throw err - } else { - return null + if (!compact || _result !== '') { + _result += generateNextLine(state, level); + } + + if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { + _result += '-'; + } else { + _result += '- '; + } + + _result += state.dump; } } + + state.tag = _tag; + state.dump = _result || '[]'; // Empty sequence if no valid values. } -async function _writeFile (file, obj, options = {}) { - const fs = options.fs || _fs +function writeFlowMapping(state, level, object) { + var _result = '', + _tag = state.tag, + objectKeyList = Object.keys(object), + index, + length, + objectKey, + objectValue, + pairBuffer; - const str = stringify(obj, options) + for (index = 0, length = objectKeyList.length; index < length; index += 1) { - await universalify.fromCallback(fs.writeFile)(file, str, options) -} + pairBuffer = ''; + if (_result !== '') pairBuffer += ', '; -const writeFile = universalify.fromPromise(_writeFile) + if (state.condenseFlow) pairBuffer += '"'; -function writeFileSync (file, obj, options = {}) { - const fs = options.fs || _fs + objectKey = objectKeyList[index]; + objectValue = object[objectKey]; - const str = stringify(obj, options) - // not sure if fs.writeFileSync returns anything, but just in case - return fs.writeFileSync(file, str, options) -} + if (state.replacer) { + objectValue = state.replacer.call(object, objectKey, objectValue); + } -const jsonfile = { - readFile, - readFileSync, - writeFile, - writeFileSync -} + if (!writeNode(state, level, objectKey, false, false)) { + continue; // Skip this pair because of invalid key; + } -module.exports = jsonfile + if (state.dump.length > 1024) pairBuffer += '? '; + pairBuffer += state.dump + (state.condenseFlow ? '"' : '') + ':' + (state.condenseFlow ? '' : ' '); -/***/ }), + if (!writeNode(state, level, objectValue, false, false)) { + continue; // Skip this pair because of invalid value. + } -/***/ 5902: -/***/ ((module) => { + pairBuffer += state.dump; -function stringify (obj, { EOL = '\n', finalEOL = true, replacer = null, spaces } = {}) { - const EOF = finalEOL ? EOL : '' - const str = JSON.stringify(obj, replacer, spaces) + // Both key and value are valid. + _result += pairBuffer; + } - return str.replace(/\n/g, EOL) + EOF + state.tag = _tag; + state.dump = '{' + _result + '}'; } -function stripBom (content) { - // we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified - if (Buffer.isBuffer(content)) content = content.toString('utf8') - return content.replace(/^\uFEFF/, '') -} +function writeBlockMapping(state, level, object, compact) { + var _result = '', + _tag = state.tag, + objectKeyList = Object.keys(object), + index, + length, + objectKey, + objectValue, + explicitPair, + pairBuffer; -module.exports = { stringify, stripBom } + // Allow sorting keys so that the output file is deterministic + if (state.sortKeys === true) { + // Default sorting + objectKeyList.sort(); + } else if (typeof state.sortKeys === 'function') { + // Custom sort function + objectKeyList.sort(state.sortKeys); + } else if (state.sortKeys) { + // Something is wrong + throw new YAMLException('sortKeys must be a boolean or a function'); + } + for (index = 0, length = objectKeyList.length; index < length; index += 1) { + pairBuffer = ''; -/***/ }), + if (!compact || _result !== '') { + pairBuffer += generateNextLine(state, level); + } -/***/ 9197: -/***/ ((module) => { + objectKey = objectKeyList[index]; + objectValue = object[objectKey]; -/** - * lodash (Custom Build) - * Build: `lodash modularize exports="npm" -o ./` - * Copyright jQuery Foundation and other contributors - * Released under MIT license - * Based on Underscore.js 1.8.3 - * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors - */ + if (state.replacer) { + objectValue = state.replacer.call(object, objectKey, objectValue); + } -/** Used as the `TypeError` message for "Functions" methods. */ -var FUNC_ERROR_TEXT = 'Expected a function'; + if (!writeNode(state, level + 1, objectKey, true, true, true)) { + continue; // Skip this pair because of invalid key. + } + + explicitPair = (state.tag !== null && state.tag !== '?') || + (state.dump && state.dump.length > 1024); + + if (explicitPair) { + if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { + pairBuffer += '?'; + } else { + pairBuffer += '? '; + } + } -/** Used to stand-in for `undefined` hash values. */ -var HASH_UNDEFINED = '__lodash_hash_undefined__'; + pairBuffer += state.dump; -/** Used as references for various `Number` constants. */ -var INFINITY = 1 / 0; + if (explicitPair) { + pairBuffer += generateNextLine(state, level); + } -/** `Object#toString` result references. */ -var funcTag = '[object Function]', - genTag = '[object GeneratorFunction]', - symbolTag = '[object Symbol]'; + if (!writeNode(state, level + 1, objectValue, true, explicitPair)) { + continue; // Skip this pair because of invalid value. + } -/** Used to match property names within property paths. */ -var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, - reIsPlainProp = /^\w*$/, - reLeadingDot = /^\./, - rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g; + if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { + pairBuffer += ':'; + } else { + pairBuffer += ': '; + } -/** - * Used to match `RegExp` - * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). - */ -var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; + pairBuffer += state.dump; -/** Used to match backslashes in property paths. */ -var reEscapeChar = /\\(\\)?/g; + // Both key and value are valid. + _result += pairBuffer; + } -/** Used to detect host constructors (Safari). */ -var reIsHostCtor = /^\[object .+?Constructor\]$/; + state.tag = _tag; + state.dump = _result || '{}'; // Empty mapping if no valid pairs. +} -/** Detect free variable `global` from Node.js. */ -var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; +function detectType(state, object, explicit) { + var _result, typeList, index, length, type, style; -/** Detect free variable `self`. */ -var freeSelf = typeof self == 'object' && self && self.Object === Object && self; + typeList = explicit ? state.explicitTypes : state.implicitTypes; -/** Used as a reference to the global object. */ -var root = freeGlobal || freeSelf || Function('return this')(); + for (index = 0, length = typeList.length; index < length; index += 1) { + type = typeList[index]; -/** - * Gets the value at `key` of `object`. - * - * @private - * @param {Object} [object] The object to query. - * @param {string} key The key of the property to get. - * @returns {*} Returns the property value. - */ -function getValue(object, key) { - return object == null ? undefined : object[key]; -} + if ((type.instanceOf || type.predicate) && + (!type.instanceOf || ((typeof object === 'object') && (object instanceof type.instanceOf))) && + (!type.predicate || type.predicate(object))) { -/** - * Checks if `value` is a host object in IE < 9. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a host object, else `false`. - */ -function isHostObject(value) { - // Many host objects are `Object` objects that can coerce to strings - // despite having improperly defined `toString` methods. - var result = false; - if (value != null && typeof value.toString != 'function') { - try { - result = !!(value + ''); - } catch (e) {} - } - return result; -} + if (explicit) { + if (type.multi && type.representName) { + state.tag = type.representName(object); + } else { + state.tag = type.tag; + } + } else { + state.tag = '?'; + } -/** Used for built-in method references. */ -var arrayProto = Array.prototype, - funcProto = Function.prototype, - objectProto = Object.prototype; + if (type.represent) { + style = state.styleMap[type.tag] || type.defaultStyle; -/** Used to detect overreaching core-js shims. */ -var coreJsData = root['__core-js_shared__']; + if (_toString.call(type.represent) === '[object Function]') { + _result = type.represent(object, style); + } else if (_hasOwnProperty.call(type.represent, style)) { + _result = type.represent[style](object, style); + } else { + throw new YAMLException('!<' + type.tag + '> tag resolver accepts not "' + style + '" style'); + } -/** Used to detect methods masquerading as native. */ -var maskSrcKey = (function() { - var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); - return uid ? ('Symbol(src)_1.' + uid) : ''; -}()); + state.dump = _result; + } -/** Used to resolve the decompiled source of functions. */ -var funcToString = funcProto.toString; + return true; + } + } -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; + return false; +} -/** - * Used to resolve the - * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) - * of values. - */ -var objectToString = objectProto.toString; +// Serializes `object` and writes it to global `result`. +// Returns true on success, or false on invalid object. +// +function writeNode(state, level, object, block, compact, iskey, isblockseq) { + state.tag = null; + state.dump = object; -/** Used to detect if a method is native. */ -var reIsNative = RegExp('^' + - funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') - .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' -); + if (!detectType(state, object, false)) { + detectType(state, object, true); + } -/** Built-in value references. */ -var Symbol = root.Symbol, - splice = arrayProto.splice; + var type = _toString.call(state.dump); + var inblock = block; + var tagStr; -/* Built-in method references that are verified to be native. */ -var Map = getNative(root, 'Map'), - nativeCreate = getNative(Object, 'create'); + if (block) { + block = (state.flowLevel < 0 || state.flowLevel > level); + } -/** Used to convert symbols to primitives and strings. */ -var symbolProto = Symbol ? Symbol.prototype : undefined, - symbolToString = symbolProto ? symbolProto.toString : undefined; + var objectOrArray = type === '[object Object]' || type === '[object Array]', + duplicateIndex, + duplicate; -/** - * Creates a hash object. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function Hash(entries) { - var index = -1, - length = entries ? entries.length : 0; + if (objectOrArray) { + duplicateIndex = state.duplicates.indexOf(object); + duplicate = duplicateIndex !== -1; + } - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); + if ((state.tag !== null && state.tag !== '?') || duplicate || (state.indent !== 2 && level > 0)) { + compact = false; } -} -/** - * Removes all key-value entries from the hash. - * - * @private - * @name clear - * @memberOf Hash - */ -function hashClear() { - this.__data__ = nativeCreate ? nativeCreate(null) : {}; -} + if (duplicate && state.usedDuplicates[duplicateIndex]) { + state.dump = '*ref_' + duplicateIndex; + } else { + if (objectOrArray && duplicate && !state.usedDuplicates[duplicateIndex]) { + state.usedDuplicates[duplicateIndex] = true; + } + if (type === '[object Object]') { + if (block && (Object.keys(state.dump).length !== 0)) { + writeBlockMapping(state, level, state.dump, compact); + if (duplicate) { + state.dump = '&ref_' + duplicateIndex + state.dump; + } + } else { + writeFlowMapping(state, level, state.dump); + if (duplicate) { + state.dump = '&ref_' + duplicateIndex + ' ' + state.dump; + } + } + } else if (type === '[object Array]') { + if (block && (state.dump.length !== 0)) { + if (state.noArrayIndent && !isblockseq && level > 0) { + writeBlockSequence(state, level - 1, state.dump, compact); + } else { + writeBlockSequence(state, level, state.dump, compact); + } + if (duplicate) { + state.dump = '&ref_' + duplicateIndex + state.dump; + } + } else { + writeFlowSequence(state, level, state.dump); + if (duplicate) { + state.dump = '&ref_' + duplicateIndex + ' ' + state.dump; + } + } + } else if (type === '[object String]') { + if (state.tag !== '?') { + writeScalar(state, state.dump, level, iskey, inblock); + } + } else if (type === '[object Undefined]') { + return false; + } else { + if (state.skipInvalid) return false; + throw new YAMLException('unacceptable kind of an object to dump ' + type); + } + + if (state.tag !== null && state.tag !== '?') { + // Need to encode all characters except those allowed by the spec: + // + // [35] ns-dec-digit ::= [#x30-#x39] /* 0-9 */ + // [36] ns-hex-digit ::= ns-dec-digit + // | [#x41-#x46] /* A-F */ | [#x61-#x66] /* a-f */ + // [37] ns-ascii-letter ::= [#x41-#x5A] /* A-Z */ | [#x61-#x7A] /* a-z */ + // [38] ns-word-char ::= ns-dec-digit | ns-ascii-letter | “-” + // [39] ns-uri-char ::= “%” ns-hex-digit ns-hex-digit | ns-word-char | “#” + // | “;” | “/” | “?” | “:” | “@” | “&” | “=” | “+” | “$” | “,” + // | “_” | “.” | “!” | “~” | “*” | “'” | “(” | “)” | “[” | “]” + // + // Also need to encode '!' because it has special meaning (end of tag prefix). + // + tagStr = encodeURI( + state.tag[0] === '!' ? state.tag.slice(1) : state.tag + ).replace(/!/g, '%21'); -/** - * Removes `key` and its value from the hash. - * - * @private - * @name delete - * @memberOf Hash - * @param {Object} hash The hash to modify. - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function hashDelete(key) { - return this.has(key) && delete this.__data__[key]; -} + if (state.tag[0] === '!') { + tagStr = '!' + tagStr; + } else if (tagStr.slice(0, 18) === 'tag:yaml.org,2002:') { + tagStr = '!!' + tagStr.slice(18); + } else { + tagStr = '!<' + tagStr + '>'; + } -/** - * Gets the hash value for `key`. - * - * @private - * @name get - * @memberOf Hash - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function hashGet(key) { - var data = this.__data__; - if (nativeCreate) { - var result = data[key]; - return result === HASH_UNDEFINED ? undefined : result; + state.dump = tagStr + ' ' + state.dump; + } } - return hasOwnProperty.call(data, key) ? data[key] : undefined; -} -/** - * Checks if a hash value for `key` exists. - * - * @private - * @name has - * @memberOf Hash - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function hashHas(key) { - var data = this.__data__; - return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key); -} - -/** - * Sets the hash `key` to `value`. - * - * @private - * @name set - * @memberOf Hash - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the hash instance. - */ -function hashSet(key, value) { - var data = this.__data__; - data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; - return this; + return true; } -// Add methods to `Hash`. -Hash.prototype.clear = hashClear; -Hash.prototype['delete'] = hashDelete; -Hash.prototype.get = hashGet; -Hash.prototype.has = hashHas; -Hash.prototype.set = hashSet; +function getDuplicateReferences(object, state) { + var objects = [], + duplicatesIndexes = [], + index, + length; -/** - * Creates an list cache object. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function ListCache(entries) { - var index = -1, - length = entries ? entries.length : 0; + inspectNode(object, objects, duplicatesIndexes); - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); + for (index = 0, length = duplicatesIndexes.length; index < length; index += 1) { + state.duplicates.push(objects[duplicatesIndexes[index]]); } + state.usedDuplicates = new Array(length); } -/** - * Removes all key-value entries from the list cache. - * - * @private - * @name clear - * @memberOf ListCache - */ -function listCacheClear() { - this.__data__ = []; -} - -/** - * Removes `key` and its value from the list cache. - * - * @private - * @name delete - * @memberOf ListCache - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function listCacheDelete(key) { - var data = this.__data__, - index = assocIndexOf(data, key); - - if (index < 0) { - return false; - } - var lastIndex = data.length - 1; - if (index == lastIndex) { - data.pop(); - } else { - splice.call(data, index, 1); - } - return true; -} +function inspectNode(object, objects, duplicatesIndexes) { + var objectKeyList, + index, + length; -/** - * Gets the list cache value for `key`. - * - * @private - * @name get - * @memberOf ListCache - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function listCacheGet(key) { - var data = this.__data__, - index = assocIndexOf(data, key); + if (object !== null && typeof object === 'object') { + index = objects.indexOf(object); + if (index !== -1) { + if (duplicatesIndexes.indexOf(index) === -1) { + duplicatesIndexes.push(index); + } + } else { + objects.push(object); - return index < 0 ? undefined : data[index][1]; -} + if (Array.isArray(object)) { + for (index = 0, length = object.length; index < length; index += 1) { + inspectNode(object[index], objects, duplicatesIndexes); + } + } else { + objectKeyList = Object.keys(object); -/** - * Checks if a list cache value for `key` exists. - * - * @private - * @name has - * @memberOf ListCache - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function listCacheHas(key) { - return assocIndexOf(this.__data__, key) > -1; + for (index = 0, length = objectKeyList.length; index < length; index += 1) { + inspectNode(object[objectKeyList[index]], objects, duplicatesIndexes); + } + } + } + } } -/** - * Sets the list cache `key` to `value`. - * - * @private - * @name set - * @memberOf ListCache - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the list cache instance. - */ -function listCacheSet(key, value) { - var data = this.__data__, - index = assocIndexOf(data, key); +function dump(input, options) { + options = options || {}; - if (index < 0) { - data.push([key, value]); - } else { - data[index][1] = value; - } - return this; -} + var state = new State(options); -// Add methods to `ListCache`. -ListCache.prototype.clear = listCacheClear; -ListCache.prototype['delete'] = listCacheDelete; -ListCache.prototype.get = listCacheGet; -ListCache.prototype.has = listCacheHas; -ListCache.prototype.set = listCacheSet; + if (!state.noRefs) getDuplicateReferences(input, state); -/** - * Creates a map cache object to store key-value pairs. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function MapCache(entries) { - var index = -1, - length = entries ? entries.length : 0; + var value = input; - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); + if (state.replacer) { + value = state.replacer.call({ '': value }, '', value); } -} -/** - * Removes all key-value entries from the map. - * - * @private - * @name clear - * @memberOf MapCache - */ -function mapCacheClear() { - this.__data__ = { - 'hash': new Hash, - 'map': new (Map || ListCache), - 'string': new Hash - }; -} + if (writeNode(state, 0, value, true, true)) return state.dump + '\n'; -/** - * Removes `key` and its value from the map. - * - * @private - * @name delete - * @memberOf MapCache - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function mapCacheDelete(key) { - return getMapData(this, key)['delete'](key); + return ''; } -/** - * Gets the map value for `key`. - * - * @private - * @name get - * @memberOf MapCache - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function mapCacheGet(key) { - return getMapData(this, key).get(key); -} +module.exports.dump = dump; -/** - * Checks if a map value for `key` exists. - * - * @private - * @name has - * @memberOf MapCache - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function mapCacheHas(key) { - return getMapData(this, key).has(key); -} -/** - * Sets the map `key` to `value`. - * - * @private - * @name set - * @memberOf MapCache - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the map cache instance. - */ -function mapCacheSet(key, value) { - getMapData(this, key).set(key, value); - return this; -} +/***/ }), -// Add methods to `MapCache`. -MapCache.prototype.clear = mapCacheClear; -MapCache.prototype['delete'] = mapCacheDelete; -MapCache.prototype.get = mapCacheGet; -MapCache.prototype.has = mapCacheHas; -MapCache.prototype.set = mapCacheSet; +/***/ 8179: +/***/ ((module) => { -/** - * Gets the index at which the `key` is found in `array` of key-value pairs. - * - * @private - * @param {Array} array The array to inspect. - * @param {*} key The key to search for. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function assocIndexOf(array, key) { - var length = array.length; - while (length--) { - if (eq(array[length][0], key)) { - return length; - } - } - return -1; -} +"use strict"; +// YAML error class. http://stackoverflow.com/questions/8458984 +// -/** - * The base implementation of `_.get` without support for default values. - * - * @private - * @param {Object} object The object to query. - * @param {Array|string} path The path of the property to get. - * @returns {*} Returns the resolved value. - */ -function baseGet(object, path) { - path = isKey(path, object) ? [path] : castPath(path); - var index = 0, - length = path.length; - while (object != null && index < length) { - object = object[toKey(path[index++])]; - } - return (index && index == length) ? object : undefined; -} +function formatError(exception, compact) { + var where = '', message = exception.reason || '(unknown reason)'; -/** - * The base implementation of `_.isNative` without bad shim checks. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a native function, - * else `false`. - */ -function baseIsNative(value) { - if (!isObject(value) || isMasked(value)) { - return false; - } - var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor; - return pattern.test(toSource(value)); -} + if (!exception.mark) return message; -/** - * The base implementation of `_.toString` which doesn't convert nullish - * values to empty strings. - * - * @private - * @param {*} value The value to process. - * @returns {string} Returns the string. - */ -function baseToString(value) { - // Exit early for strings to avoid a performance hit in some environments. - if (typeof value == 'string') { - return value; - } - if (isSymbol(value)) { - return symbolToString ? symbolToString.call(value) : ''; + if (exception.mark.name) { + where += 'in "' + exception.mark.name + '" '; } - var result = (value + ''); - return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; -} - -/** - * Casts `value` to a path array if it's not one. - * - * @private - * @param {*} value The value to inspect. - * @returns {Array} Returns the cast property path array. - */ -function castPath(value) { - return isArray(value) ? value : stringToPath(value); -} - -/** - * Gets the data for `map`. - * - * @private - * @param {Object} map The map to query. - * @param {string} key The reference key. - * @returns {*} Returns the map data. - */ -function getMapData(map, key) { - var data = map.__data__; - return isKeyable(key) - ? data[typeof key == 'string' ? 'string' : 'hash'] - : data.map; -} -/** - * Gets the native function at `key` of `object`. - * - * @private - * @param {Object} object The object to query. - * @param {string} key The key of the method to get. - * @returns {*} Returns the function if it's native, else `undefined`. - */ -function getNative(object, key) { - var value = getValue(object, key); - return baseIsNative(value) ? value : undefined; -} + where += '(' + (exception.mark.line + 1) + ':' + (exception.mark.column + 1) + ')'; -/** - * Checks if `value` is a property name and not a property path. - * - * @private - * @param {*} value The value to check. - * @param {Object} [object] The object to query keys on. - * @returns {boolean} Returns `true` if `value` is a property name, else `false`. - */ -function isKey(value, object) { - if (isArray(value)) { - return false; - } - var type = typeof value; - if (type == 'number' || type == 'symbol' || type == 'boolean' || - value == null || isSymbol(value)) { - return true; + if (!compact && exception.mark.snippet) { + where += '\n\n' + exception.mark.snippet; } - return reIsPlainProp.test(value) || !reIsDeepProp.test(value) || - (object != null && value in Object(object)); -} - -/** - * Checks if `value` is suitable for use as unique object key. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is suitable, else `false`. - */ -function isKeyable(value) { - var type = typeof value; - return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') - ? (value !== '__proto__') - : (value === null); -} -/** - * Checks if `func` has its source masked. - * - * @private - * @param {Function} func The function to check. - * @returns {boolean} Returns `true` if `func` is masked, else `false`. - */ -function isMasked(func) { - return !!maskSrcKey && (maskSrcKey in func); + return message + ' ' + where; } -/** - * Converts `string` to a property path array. - * - * @private - * @param {string} string The string to convert. - * @returns {Array} Returns the property path array. - */ -var stringToPath = memoize(function(string) { - string = toString(string); - var result = []; - if (reLeadingDot.test(string)) { - result.push(''); - } - string.replace(rePropName, function(match, number, quote, string) { - result.push(quote ? string.replace(reEscapeChar, '$1') : (number || match)); - }); - return result; -}); +function YAMLException(reason, mark) { + // Super constructor + Error.call(this); -/** - * Converts `value` to a string key if it's not a string or symbol. - * - * @private - * @param {*} value The value to inspect. - * @returns {string|symbol} Returns the key. - */ -function toKey(value) { - if (typeof value == 'string' || isSymbol(value)) { - return value; - } - var result = (value + ''); - return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; -} + this.name = 'YAMLException'; + this.reason = reason; + this.mark = mark; + this.message = formatError(this, false); -/** - * Converts `func` to its source code. - * - * @private - * @param {Function} func The function to process. - * @returns {string} Returns the source code. - */ -function toSource(func) { - if (func != null) { - try { - return funcToString.call(func); - } catch (e) {} - try { - return (func + ''); - } catch (e) {} + // Include stack trace in error object + if (Error.captureStackTrace) { + // Chrome and NodeJS + Error.captureStackTrace(this, this.constructor); + } else { + // FF, IE 10+ and Safari 6+. Fallback for others + this.stack = (new Error()).stack || ''; } - return ''; } -/** - * Creates a function that memoizes the result of `func`. If `resolver` is - * provided, it determines the cache key for storing the result based on the - * arguments provided to the memoized function. By default, the first argument - * provided to the memoized function is used as the map cache key. The `func` - * is invoked with the `this` binding of the memoized function. - * - * **Note:** The cache is exposed as the `cache` property on the memoized - * function. Its creation may be customized by replacing the `_.memoize.Cache` - * constructor with one whose instances implement the - * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object) - * method interface of `delete`, `get`, `has`, and `set`. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Function - * @param {Function} func The function to have its output memoized. - * @param {Function} [resolver] The function to resolve the cache key. - * @returns {Function} Returns the new memoized function. - * @example - * - * var object = { 'a': 1, 'b': 2 }; - * var other = { 'c': 3, 'd': 4 }; - * - * var values = _.memoize(_.values); - * values(object); - * // => [1, 2] - * - * values(other); - * // => [3, 4] - * - * object.a = 2; - * values(object); - * // => [1, 2] - * - * // Modify the result cache. - * values.cache.set(object, ['a', 'b']); - * values(object); - * // => ['a', 'b'] - * - * // Replace `_.memoize.Cache`. - * _.memoize.Cache = WeakMap; - */ -function memoize(func, resolver) { - if (typeof func != 'function' || (resolver && typeof resolver != 'function')) { - throw new TypeError(FUNC_ERROR_TEXT); - } - var memoized = function() { - var args = arguments, - key = resolver ? resolver.apply(this, args) : args[0], - cache = memoized.cache; - if (cache.has(key)) { - return cache.get(key); - } - var result = func.apply(this, args); - memoized.cache = cache.set(key, result); - return result; - }; - memoized.cache = new (memoize.Cache || MapCache); - return memoized; -} +// Inherit from Error +YAMLException.prototype = Object.create(Error.prototype); +YAMLException.prototype.constructor = YAMLException; -// Assign cache to `_.memoize`. -memoize.Cache = MapCache; -/** - * Performs a - * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) - * comparison between two values to determine if they are equivalent. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to compare. - * @param {*} other The other value to compare. - * @returns {boolean} Returns `true` if the values are equivalent, else `false`. - * @example - * - * var object = { 'a': 1 }; - * var other = { 'a': 1 }; - * - * _.eq(object, object); - * // => true - * - * _.eq(object, other); - * // => false - * - * _.eq('a', 'a'); - * // => true - * - * _.eq('a', Object('a')); - * // => false - * - * _.eq(NaN, NaN); - * // => true - */ -function eq(value, other) { - return value === other || (value !== value && other !== other); -} +YAMLException.prototype.toString = function toString(compact) { + return this.name + ': ' + formatError(this, compact); +}; -/** - * Checks if `value` is classified as an `Array` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an array, else `false`. - * @example - * - * _.isArray([1, 2, 3]); - * // => true - * - * _.isArray(document.body.children); - * // => false - * - * _.isArray('abc'); - * // => false - * - * _.isArray(_.noop); - * // => false - */ -var isArray = Array.isArray; -/** - * Checks if `value` is classified as a `Function` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a function, else `false`. - * @example - * - * _.isFunction(_); - * // => true - * - * _.isFunction(/abc/); - * // => false - */ -function isFunction(value) { - // The use of `Object#toString` avoids issues with the `typeof` operator - // in Safari 8-9 which returns 'object' for typed array and other constructors. - var tag = isObject(value) ? objectToString.call(value) : ''; - return tag == funcTag || tag == genTag; -} +module.exports = YAMLException; -/** - * Checks if `value` is the - * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) - * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an object, else `false`. - * @example - * - * _.isObject({}); - * // => true - * - * _.isObject([1, 2, 3]); - * // => true - * - * _.isObject(_.noop); - * // => true - * - * _.isObject(null); - * // => false - */ -function isObject(value) { - var type = typeof value; - return !!value && (type == 'object' || type == 'function'); -} -/** - * Checks if `value` is object-like. A value is object-like if it's not `null` - * and has a `typeof` result of "object". - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is object-like, else `false`. - * @example - * - * _.isObjectLike({}); - * // => true - * - * _.isObjectLike([1, 2, 3]); - * // => true - * - * _.isObjectLike(_.noop); - * // => false - * - * _.isObjectLike(null); - * // => false - */ -function isObjectLike(value) { - return !!value && typeof value == 'object'; -} +/***/ }), -/** - * Checks if `value` is classified as a `Symbol` primitive or object. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. - * @example - * - * _.isSymbol(Symbol.iterator); - * // => true - * - * _.isSymbol('abc'); - * // => false - */ -function isSymbol(value) { - return typeof value == 'symbol' || - (isObjectLike(value) && objectToString.call(value) == symbolTag); -} +/***/ 1161: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * Converts `value` to a string. An empty string is returned for `null` - * and `undefined` values. The sign of `-0` is preserved. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to process. - * @returns {string} Returns the string. - * @example - * - * _.toString(null); - * // => '' - * - * _.toString(-0); - * // => '-0' - * - * _.toString([1, 2, 3]); - * // => '1,2,3' - */ -function toString(value) { - return value == null ? '' : baseToString(value); -} +"use strict"; -/** - * Gets the value at `path` of `object`. If the resolved value is - * `undefined`, the `defaultValue` is returned in its place. - * - * @static - * @memberOf _ - * @since 3.7.0 - * @category Object - * @param {Object} object The object to query. - * @param {Array|string} path The path of the property to get. - * @param {*} [defaultValue] The value returned for `undefined` resolved values. - * @returns {*} Returns the resolved value. - * @example - * - * var object = { 'a': [{ 'b': { 'c': 3 } }] }; - * - * _.get(object, 'a[0].b.c'); - * // => 3 - * - * _.get(object, ['a', '0', 'b', 'c']); - * // => 3 - * - * _.get(object, 'a.b.c', 'default'); - * // => 'default' - */ -function get(object, path, defaultValue) { - var result = object == null ? undefined : baseGet(object, path); - return result === undefined ? defaultValue : result; -} -module.exports = get; +/*eslint-disable max-len,no-use-before-define*/ +var common = __nccwpck_require__(6829); +var YAMLException = __nccwpck_require__(8179); +var makeSnippet = __nccwpck_require__(6975); +var DEFAULT_SCHEMA = __nccwpck_require__(8759); -/***/ }), -/***/ 1552: -/***/ ((module) => { +var _hasOwnProperty = Object.prototype.hasOwnProperty; -/** - * lodash (Custom Build) - * Build: `lodash modularize exports="npm" -o ./` - * Copyright jQuery Foundation and other contributors - * Released under MIT license - * Based on Underscore.js 1.8.3 - * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors - */ -/** Used as the `TypeError` message for "Functions" methods. */ -var FUNC_ERROR_TEXT = 'Expected a function'; +var CONTEXT_FLOW_IN = 1; +var CONTEXT_FLOW_OUT = 2; +var CONTEXT_BLOCK_IN = 3; +var CONTEXT_BLOCK_OUT = 4; + -/** Used to stand-in for `undefined` hash values. */ -var HASH_UNDEFINED = '__lodash_hash_undefined__'; +var CHOMPING_CLIP = 1; +var CHOMPING_STRIP = 2; +var CHOMPING_KEEP = 3; -/** Used as references for various `Number` constants. */ -var INFINITY = 1 / 0, - MAX_SAFE_INTEGER = 9007199254740991; -/** `Object#toString` result references. */ -var funcTag = '[object Function]', - genTag = '[object GeneratorFunction]', - symbolTag = '[object Symbol]'; +var PATTERN_NON_PRINTABLE = /[\x00-\x08\x0B\x0C\x0E-\x1F\x7F-\x84\x86-\x9F\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/; +var PATTERN_NON_ASCII_LINE_BREAKS = /[\x85\u2028\u2029]/; +var PATTERN_FLOW_INDICATORS = /[,\[\]\{\}]/; +var PATTERN_TAG_HANDLE = /^(?:!|!!|![a-z\-]+!)$/i; +var PATTERN_TAG_URI = /^(?:!|[^,\[\]\{\}])(?:%[0-9a-f]{2}|[0-9a-z\-#;\/\?:@&=\+\$,_\.!~\*'\(\)\[\]])*$/i; -/** Used to match property names within property paths. */ -var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, - reIsPlainProp = /^\w*$/, - reLeadingDot = /^\./, - rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g; -/** - * Used to match `RegExp` - * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). - */ -var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; +function _class(obj) { return Object.prototype.toString.call(obj); } + +function is_EOL(c) { + return (c === 0x0A/* LF */) || (c === 0x0D/* CR */); +} + +function is_WHITE_SPACE(c) { + return (c === 0x09/* Tab */) || (c === 0x20/* Space */); +} + +function is_WS_OR_EOL(c) { + return (c === 0x09/* Tab */) || + (c === 0x20/* Space */) || + (c === 0x0A/* LF */) || + (c === 0x0D/* CR */); +} -/** Used to match backslashes in property paths. */ -var reEscapeChar = /\\(\\)?/g; +function is_FLOW_INDICATOR(c) { + return c === 0x2C/* , */ || + c === 0x5B/* [ */ || + c === 0x5D/* ] */ || + c === 0x7B/* { */ || + c === 0x7D/* } */; +} -/** Used to detect host constructors (Safari). */ -var reIsHostCtor = /^\[object .+?Constructor\]$/; +function fromHexCode(c) { + var lc; -/** Used to detect unsigned integer values. */ -var reIsUint = /^(?:0|[1-9]\d*)$/; + if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) { + return c - 0x30; + } -/** Detect free variable `global` from Node.js. */ -var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; + /*eslint-disable no-bitwise*/ + lc = c | 0x20; -/** Detect free variable `self`. */ -var freeSelf = typeof self == 'object' && self && self.Object === Object && self; + if ((0x61/* a */ <= lc) && (lc <= 0x66/* f */)) { + return lc - 0x61 + 10; + } -/** Used as a reference to the global object. */ -var root = freeGlobal || freeSelf || Function('return this')(); + return -1; +} -/** - * Gets the value at `key` of `object`. - * - * @private - * @param {Object} [object] The object to query. - * @param {string} key The key of the property to get. - * @returns {*} Returns the property value. - */ -function getValue(object, key) { - return object == null ? undefined : object[key]; +function escapedHexLen(c) { + if (c === 0x78/* x */) { return 2; } + if (c === 0x75/* u */) { return 4; } + if (c === 0x55/* U */) { return 8; } + return 0; } -/** - * Checks if `value` is a host object in IE < 9. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a host object, else `false`. - */ -function isHostObject(value) { - // Many host objects are `Object` objects that can coerce to strings - // despite having improperly defined `toString` methods. - var result = false; - if (value != null && typeof value.toString != 'function') { - try { - result = !!(value + ''); - } catch (e) {} +function fromDecimalCode(c) { + if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) { + return c - 0x30; } - return result; + + return -1; +} + +function simpleEscapeSequence(c) { + /* eslint-disable indent */ + return (c === 0x30/* 0 */) ? '\x00' : + (c === 0x61/* a */) ? '\x07' : + (c === 0x62/* b */) ? '\x08' : + (c === 0x74/* t */) ? '\x09' : + (c === 0x09/* Tab */) ? '\x09' : + (c === 0x6E/* n */) ? '\x0A' : + (c === 0x76/* v */) ? '\x0B' : + (c === 0x66/* f */) ? '\x0C' : + (c === 0x72/* r */) ? '\x0D' : + (c === 0x65/* e */) ? '\x1B' : + (c === 0x20/* Space */) ? ' ' : + (c === 0x22/* " */) ? '\x22' : + (c === 0x2F/* / */) ? '/' : + (c === 0x5C/* \ */) ? '\x5C' : + (c === 0x4E/* N */) ? '\x85' : + (c === 0x5F/* _ */) ? '\xA0' : + (c === 0x4C/* L */) ? '\u2028' : + (c === 0x50/* P */) ? '\u2029' : ''; } -/** Used for built-in method references. */ -var arrayProto = Array.prototype, - funcProto = Function.prototype, - objectProto = Object.prototype; +function charFromCodepoint(c) { + if (c <= 0xFFFF) { + return String.fromCharCode(c); + } + // Encode UTF-16 surrogate pair + // https://en.wikipedia.org/wiki/UTF-16#Code_points_U.2B010000_to_U.2B10FFFF + return String.fromCharCode( + ((c - 0x010000) >> 10) + 0xD800, + ((c - 0x010000) & 0x03FF) + 0xDC00 + ); +} -/** Used to detect overreaching core-js shims. */ -var coreJsData = root['__core-js_shared__']; +var simpleEscapeCheck = new Array(256); // integer, for fast access +var simpleEscapeMap = new Array(256); +for (var i = 0; i < 256; i++) { + simpleEscapeCheck[i] = simpleEscapeSequence(i) ? 1 : 0; + simpleEscapeMap[i] = simpleEscapeSequence(i); +} -/** Used to detect methods masquerading as native. */ -var maskSrcKey = (function() { - var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); - return uid ? ('Symbol(src)_1.' + uid) : ''; -}()); -/** Used to resolve the decompiled source of functions. */ -var funcToString = funcProto.toString; +function State(input, options) { + this.input = input; -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; + this.filename = options['filename'] || null; + this.schema = options['schema'] || DEFAULT_SCHEMA; + this.onWarning = options['onWarning'] || null; + // (Hidden) Remove? makes the loader to expect YAML 1.1 documents + // if such documents have no explicit %YAML directive + this.legacy = options['legacy'] || false; -/** - * Used to resolve the - * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) - * of values. - */ -var objectToString = objectProto.toString; + this.json = options['json'] || false; + this.listener = options['listener'] || null; -/** Used to detect if a method is native. */ -var reIsNative = RegExp('^' + - funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') - .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' -); + this.implicitTypes = this.schema.compiledImplicit; + this.typeMap = this.schema.compiledTypeMap; -/** Built-in value references. */ -var Symbol = root.Symbol, - splice = arrayProto.splice; + this.length = input.length; + this.position = 0; + this.line = 0; + this.lineStart = 0; + this.lineIndent = 0; -/* Built-in method references that are verified to be native. */ -var Map = getNative(root, 'Map'), - nativeCreate = getNative(Object, 'create'); + // position of first leading tab in the current line, + // used to make sure there are no tabs in the indentation + this.firstTabInLine = -1; -/** Used to convert symbols to primitives and strings. */ -var symbolProto = Symbol ? Symbol.prototype : undefined, - symbolToString = symbolProto ? symbolProto.toString : undefined; + this.documents = []; -/** - * Creates a hash object. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function Hash(entries) { - var index = -1, - length = entries ? entries.length : 0; + /* + this.version; + this.checkLineBreaks; + this.tagMap; + this.anchorMap; + this.tag; + this.anchor; + this.kind; + this.result;*/ - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } } -/** - * Removes all key-value entries from the hash. - * - * @private - * @name clear - * @memberOf Hash - */ -function hashClear() { - this.__data__ = nativeCreate ? nativeCreate(null) : {}; + +function generateError(state, message) { + var mark = { + name: state.filename, + buffer: state.input.slice(0, -1), // omit trailing \0 + position: state.position, + line: state.line, + column: state.position - state.lineStart + }; + + mark.snippet = makeSnippet(mark); + + return new YAMLException(message, mark); } -/** - * Removes `key` and its value from the hash. - * - * @private - * @name delete - * @memberOf Hash - * @param {Object} hash The hash to modify. - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function hashDelete(key) { - return this.has(key) && delete this.__data__[key]; +function throwError(state, message) { + throw generateError(state, message); } -/** - * Gets the hash value for `key`. - * - * @private - * @name get - * @memberOf Hash - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function hashGet(key) { - var data = this.__data__; - if (nativeCreate) { - var result = data[key]; - return result === HASH_UNDEFINED ? undefined : result; +function throwWarning(state, message) { + if (state.onWarning) { + state.onWarning.call(null, generateError(state, message)); } - return hasOwnProperty.call(data, key) ? data[key] : undefined; } -/** - * Checks if a hash value for `key` exists. - * - * @private - * @name has - * @memberOf Hash - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function hashHas(key) { - var data = this.__data__; - return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key); -} -/** - * Sets the hash `key` to `value`. - * - * @private - * @name set - * @memberOf Hash - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the hash instance. - */ -function hashSet(key, value) { - var data = this.__data__; - data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; - return this; -} +var directiveHandlers = { -// Add methods to `Hash`. -Hash.prototype.clear = hashClear; -Hash.prototype['delete'] = hashDelete; -Hash.prototype.get = hashGet; -Hash.prototype.has = hashHas; -Hash.prototype.set = hashSet; + YAML: function handleYamlDirective(state, name, args) { -/** - * Creates an list cache object. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function ListCache(entries) { - var index = -1, - length = entries ? entries.length : 0; + var match, major, minor; - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } -} + if (state.version !== null) { + throwError(state, 'duplication of %YAML directive'); + } -/** - * Removes all key-value entries from the list cache. - * - * @private - * @name clear - * @memberOf ListCache - */ -function listCacheClear() { - this.__data__ = []; -} + if (args.length !== 1) { + throwError(state, 'YAML directive accepts exactly one argument'); + } -/** - * Removes `key` and its value from the list cache. - * - * @private - * @name delete - * @memberOf ListCache - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function listCacheDelete(key) { - var data = this.__data__, - index = assocIndexOf(data, key); + match = /^([0-9]+)\.([0-9]+)$/.exec(args[0]); - if (index < 0) { - return false; - } - var lastIndex = data.length - 1; - if (index == lastIndex) { - data.pop(); - } else { - splice.call(data, index, 1); - } - return true; -} + if (match === null) { + throwError(state, 'ill-formed argument of the YAML directive'); + } -/** - * Gets the list cache value for `key`. - * - * @private - * @name get - * @memberOf ListCache - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function listCacheGet(key) { - var data = this.__data__, - index = assocIndexOf(data, key); + major = parseInt(match[1], 10); + minor = parseInt(match[2], 10); - return index < 0 ? undefined : data[index][1]; -} + if (major !== 1) { + throwError(state, 'unacceptable YAML version of the document'); + } -/** - * Checks if a list cache value for `key` exists. - * - * @private - * @name has - * @memberOf ListCache - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function listCacheHas(key) { - return assocIndexOf(this.__data__, key) > -1; -} + state.version = args[0]; + state.checkLineBreaks = (minor < 2); -/** - * Sets the list cache `key` to `value`. - * - * @private - * @name set - * @memberOf ListCache - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the list cache instance. - */ -function listCacheSet(key, value) { - var data = this.__data__, - index = assocIndexOf(data, key); + if (minor !== 1 && minor !== 2) { + throwWarning(state, 'unsupported YAML version of the document'); + } + }, - if (index < 0) { - data.push([key, value]); - } else { - data[index][1] = value; - } - return this; -} + TAG: function handleTagDirective(state, name, args) { -// Add methods to `ListCache`. -ListCache.prototype.clear = listCacheClear; -ListCache.prototype['delete'] = listCacheDelete; -ListCache.prototype.get = listCacheGet; -ListCache.prototype.has = listCacheHas; -ListCache.prototype.set = listCacheSet; + var handle, prefix; -/** - * Creates a map cache object to store key-value pairs. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function MapCache(entries) { - var index = -1, - length = entries ? entries.length : 0; + if (args.length !== 2) { + throwError(state, 'TAG directive accepts exactly two arguments'); + } + + handle = args[0]; + prefix = args[1]; + + if (!PATTERN_TAG_HANDLE.test(handle)) { + throwError(state, 'ill-formed tag handle (first argument) of the TAG directive'); + } + + if (_hasOwnProperty.call(state.tagMap, handle)) { + throwError(state, 'there is a previously declared suffix for "' + handle + '" tag handle'); + } + + if (!PATTERN_TAG_URI.test(prefix)) { + throwError(state, 'ill-formed tag prefix (second argument) of the TAG directive'); + } + + try { + prefix = decodeURIComponent(prefix); + } catch (err) { + throwError(state, 'tag prefix is malformed: ' + prefix); + } - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); + state.tagMap[handle] = prefix; } -} +}; -/** - * Removes all key-value entries from the map. - * - * @private - * @name clear - * @memberOf MapCache - */ -function mapCacheClear() { - this.__data__ = { - 'hash': new Hash, - 'map': new (Map || ListCache), - 'string': new Hash - }; -} -/** - * Removes `key` and its value from the map. - * - * @private - * @name delete - * @memberOf MapCache - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function mapCacheDelete(key) { - return getMapData(this, key)['delete'](key); -} +function captureSegment(state, start, end, checkJson) { + var _position, _length, _character, _result; -/** - * Gets the map value for `key`. - * - * @private - * @name get - * @memberOf MapCache - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function mapCacheGet(key) { - return getMapData(this, key).get(key); -} + if (start < end) { + _result = state.input.slice(start, end); -/** - * Checks if a map value for `key` exists. - * - * @private - * @name has - * @memberOf MapCache - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function mapCacheHas(key) { - return getMapData(this, key).has(key); -} + if (checkJson) { + for (_position = 0, _length = _result.length; _position < _length; _position += 1) { + _character = _result.charCodeAt(_position); + if (!(_character === 0x09 || + (0x20 <= _character && _character <= 0x10FFFF))) { + throwError(state, 'expected valid JSON character'); + } + } + } else if (PATTERN_NON_PRINTABLE.test(_result)) { + throwError(state, 'the stream contains non-printable characters'); + } -/** - * Sets the map `key` to `value`. - * - * @private - * @name set - * @memberOf MapCache - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the map cache instance. - */ -function mapCacheSet(key, value) { - getMapData(this, key).set(key, value); - return this; + state.result += _result; + } } -// Add methods to `MapCache`. -MapCache.prototype.clear = mapCacheClear; -MapCache.prototype['delete'] = mapCacheDelete; -MapCache.prototype.get = mapCacheGet; -MapCache.prototype.has = mapCacheHas; -MapCache.prototype.set = mapCacheSet; +function mergeMappings(state, destination, source, overridableKeys) { + var sourceKeys, key, index, quantity; -/** - * Assigns `value` to `key` of `object` if the existing value is not equivalent - * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) - * for equality comparisons. - * - * @private - * @param {Object} object The object to modify. - * @param {string} key The key of the property to assign. - * @param {*} value The value to assign. - */ -function assignValue(object, key, value) { - var objValue = object[key]; - if (!(hasOwnProperty.call(object, key) && eq(objValue, value)) || - (value === undefined && !(key in object))) { - object[key] = value; + if (!common.isObject(source)) { + throwError(state, 'cannot merge mappings; the provided source object is unacceptable'); } -} -/** - * Gets the index at which the `key` is found in `array` of key-value pairs. - * - * @private - * @param {Array} array The array to inspect. - * @param {*} key The key to search for. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function assocIndexOf(array, key) { - var length = array.length; - while (length--) { - if (eq(array[length][0], key)) { - return length; + sourceKeys = Object.keys(source); + + for (index = 0, quantity = sourceKeys.length; index < quantity; index += 1) { + key = sourceKeys[index]; + + if (!_hasOwnProperty.call(destination, key)) { + destination[key] = source[key]; + overridableKeys[key] = true; } } - return -1; } -/** - * The base implementation of `_.isNative` without bad shim checks. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a native function, - * else `false`. - */ -function baseIsNative(value) { - if (!isObject(value) || isMasked(value)) { - return false; - } - var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor; - return pattern.test(toSource(value)); -} +function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, + startLine, startLineStart, startPos) { -/** - * The base implementation of `_.set`. - * - * @private - * @param {Object} object The object to modify. - * @param {Array|string} path The path of the property to set. - * @param {*} value The value to set. - * @param {Function} [customizer] The function to customize path creation. - * @returns {Object} Returns `object`. - */ -function baseSet(object, path, value, customizer) { - if (!isObject(object)) { - return object; - } - path = isKey(path, object) ? [path] : castPath(path); - - var index = -1, - length = path.length, - lastIndex = length - 1, - nested = object; - - while (nested != null && ++index < length) { - var key = toKey(path[index]), - newValue = value; - - if (index != lastIndex) { - var objValue = nested[key]; - newValue = customizer ? customizer(objValue, key, nested) : undefined; - if (newValue === undefined) { - newValue = isObject(objValue) - ? objValue - : (isIndex(path[index + 1]) ? [] : {}); + var index, quantity; + + // The output is a plain object here, so keys can only be strings. + // We need to convert keyNode to a string, but doing so can hang the process + // (deeply nested arrays that explode exponentially using aliases). + if (Array.isArray(keyNode)) { + keyNode = Array.prototype.slice.call(keyNode); + + for (index = 0, quantity = keyNode.length; index < quantity; index += 1) { + if (Array.isArray(keyNode[index])) { + throwError(state, 'nested arrays are not supported inside keys'); + } + + if (typeof keyNode === 'object' && _class(keyNode[index]) === '[object Object]') { + keyNode[index] = '[object Object]'; } } - assignValue(nested, key, newValue); - nested = nested[key]; } - return object; -} -/** - * The base implementation of `_.toString` which doesn't convert nullish - * values to empty strings. - * - * @private - * @param {*} value The value to process. - * @returns {string} Returns the string. - */ -function baseToString(value) { - // Exit early for strings to avoid a performance hit in some environments. - if (typeof value == 'string') { - return value; - } - if (isSymbol(value)) { - return symbolToString ? symbolToString.call(value) : ''; + // Avoid code execution in load() via toString property + // (still use its own toString for arrays, timestamps, + // and whatever user schema extensions happen to have @@toStringTag) + if (typeof keyNode === 'object' && _class(keyNode) === '[object Object]') { + keyNode = '[object Object]'; } - var result = (value + ''); - return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; -} -/** - * Casts `value` to a path array if it's not one. - * - * @private - * @param {*} value The value to inspect. - * @returns {Array} Returns the cast property path array. - */ -function castPath(value) { - return isArray(value) ? value : stringToPath(value); -} -/** - * Gets the data for `map`. - * - * @private - * @param {Object} map The map to query. - * @param {string} key The reference key. - * @returns {*} Returns the map data. - */ -function getMapData(map, key) { - var data = map.__data__; - return isKeyable(key) - ? data[typeof key == 'string' ? 'string' : 'hash'] - : data.map; -} + keyNode = String(keyNode); -/** - * Gets the native function at `key` of `object`. - * - * @private - * @param {Object} object The object to query. - * @param {string} key The key of the method to get. - * @returns {*} Returns the function if it's native, else `undefined`. - */ -function getNative(object, key) { - var value = getValue(object, key); - return baseIsNative(value) ? value : undefined; -} + if (_result === null) { + _result = {}; + } -/** - * Checks if `value` is a valid array-like index. - * - * @private - * @param {*} value The value to check. - * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. - * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. - */ -function isIndex(value, length) { - length = length == null ? MAX_SAFE_INTEGER : length; - return !!length && - (typeof value == 'number' || reIsUint.test(value)) && - (value > -1 && value % 1 == 0 && value < length); -} + if (keyTag === 'tag:yaml.org,2002:merge') { + if (Array.isArray(valueNode)) { + for (index = 0, quantity = valueNode.length; index < quantity; index += 1) { + mergeMappings(state, _result, valueNode[index], overridableKeys); + } + } else { + mergeMappings(state, _result, valueNode, overridableKeys); + } + } else { + if (!state.json && + !_hasOwnProperty.call(overridableKeys, keyNode) && + _hasOwnProperty.call(_result, keyNode)) { + state.line = startLine || state.line; + state.lineStart = startLineStart || state.lineStart; + state.position = startPos || state.position; + throwError(state, 'duplicated mapping key'); + } -/** - * Checks if `value` is a property name and not a property path. - * - * @private - * @param {*} value The value to check. - * @param {Object} [object] The object to query keys on. - * @returns {boolean} Returns `true` if `value` is a property name, else `false`. - */ -function isKey(value, object) { - if (isArray(value)) { - return false; - } - var type = typeof value; - if (type == 'number' || type == 'symbol' || type == 'boolean' || - value == null || isSymbol(value)) { - return true; + // used for this specific key only because Object.defineProperty is slow + if (keyNode === '__proto__') { + Object.defineProperty(_result, keyNode, { + configurable: true, + enumerable: true, + writable: true, + value: valueNode + }); + } else { + _result[keyNode] = valueNode; + } + delete overridableKeys[keyNode]; } - return reIsPlainProp.test(value) || !reIsDeepProp.test(value) || - (object != null && value in Object(object)); -} -/** - * Checks if `value` is suitable for use as unique object key. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is suitable, else `false`. - */ -function isKeyable(value) { - var type = typeof value; - return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') - ? (value !== '__proto__') - : (value === null); + return _result; } -/** - * Checks if `func` has its source masked. - * - * @private - * @param {Function} func The function to check. - * @returns {boolean} Returns `true` if `func` is masked, else `false`. - */ -function isMasked(func) { - return !!maskSrcKey && (maskSrcKey in func); -} +function readLineBreak(state) { + var ch; -/** - * Converts `string` to a property path array. - * - * @private - * @param {string} string The string to convert. - * @returns {Array} Returns the property path array. - */ -var stringToPath = memoize(function(string) { - string = toString(string); + ch = state.input.charCodeAt(state.position); - var result = []; - if (reLeadingDot.test(string)) { - result.push(''); + if (ch === 0x0A/* LF */) { + state.position++; + } else if (ch === 0x0D/* CR */) { + state.position++; + if (state.input.charCodeAt(state.position) === 0x0A/* LF */) { + state.position++; + } + } else { + throwError(state, 'a line break is expected'); } - string.replace(rePropName, function(match, number, quote, string) { - result.push(quote ? string.replace(reEscapeChar, '$1') : (number || match)); - }); - return result; -}); -/** - * Converts `value` to a string key if it's not a string or symbol. - * - * @private - * @param {*} value The value to inspect. - * @returns {string|symbol} Returns the key. - */ -function toKey(value) { - if (typeof value == 'string' || isSymbol(value)) { - return value; - } - var result = (value + ''); - return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; + state.line += 1; + state.lineStart = state.position; + state.firstTabInLine = -1; } -/** - * Converts `func` to its source code. - * - * @private - * @param {Function} func The function to process. - * @returns {string} Returns the source code. - */ -function toSource(func) { - if (func != null) { - try { - return funcToString.call(func); - } catch (e) {} - try { - return (func + ''); - } catch (e) {} - } - return ''; -} +function skipSeparationSpace(state, allowComments, checkIndent) { + var lineBreaks = 0, + ch = state.input.charCodeAt(state.position); -/** - * Creates a function that memoizes the result of `func`. If `resolver` is - * provided, it determines the cache key for storing the result based on the - * arguments provided to the memoized function. By default, the first argument - * provided to the memoized function is used as the map cache key. The `func` - * is invoked with the `this` binding of the memoized function. - * - * **Note:** The cache is exposed as the `cache` property on the memoized - * function. Its creation may be customized by replacing the `_.memoize.Cache` - * constructor with one whose instances implement the - * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object) - * method interface of `delete`, `get`, `has`, and `set`. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Function - * @param {Function} func The function to have its output memoized. - * @param {Function} [resolver] The function to resolve the cache key. - * @returns {Function} Returns the new memoized function. - * @example - * - * var object = { 'a': 1, 'b': 2 }; - * var other = { 'c': 3, 'd': 4 }; - * - * var values = _.memoize(_.values); - * values(object); - * // => [1, 2] - * - * values(other); - * // => [3, 4] - * - * object.a = 2; - * values(object); - * // => [1, 2] - * - * // Modify the result cache. - * values.cache.set(object, ['a', 'b']); - * values(object); - * // => ['a', 'b'] - * - * // Replace `_.memoize.Cache`. - * _.memoize.Cache = WeakMap; - */ -function memoize(func, resolver) { - if (typeof func != 'function' || (resolver && typeof resolver != 'function')) { - throw new TypeError(FUNC_ERROR_TEXT); - } - var memoized = function() { - var args = arguments, - key = resolver ? resolver.apply(this, args) : args[0], - cache = memoized.cache; + while (ch !== 0) { + while (is_WHITE_SPACE(ch)) { + if (ch === 0x09/* Tab */ && state.firstTabInLine === -1) { + state.firstTabInLine = state.position; + } + ch = state.input.charCodeAt(++state.position); + } - if (cache.has(key)) { - return cache.get(key); + if (allowComments && ch === 0x23/* # */) { + do { + ch = state.input.charCodeAt(++state.position); + } while (ch !== 0x0A/* LF */ && ch !== 0x0D/* CR */ && ch !== 0); } - var result = func.apply(this, args); - memoized.cache = cache.set(key, result); - return result; - }; - memoized.cache = new (memoize.Cache || MapCache); - return memoized; -} -// Assign cache to `_.memoize`. -memoize.Cache = MapCache; + if (is_EOL(ch)) { + readLineBreak(state); -/** - * Performs a - * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) - * comparison between two values to determine if they are equivalent. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to compare. - * @param {*} other The other value to compare. - * @returns {boolean} Returns `true` if the values are equivalent, else `false`. - * @example - * - * var object = { 'a': 1 }; - * var other = { 'a': 1 }; - * - * _.eq(object, object); - * // => true - * - * _.eq(object, other); - * // => false - * - * _.eq('a', 'a'); - * // => true - * - * _.eq('a', Object('a')); - * // => false - * - * _.eq(NaN, NaN); - * // => true - */ -function eq(value, other) { - return value === other || (value !== value && other !== other); -} + ch = state.input.charCodeAt(state.position); + lineBreaks++; + state.lineIndent = 0; -/** - * Checks if `value` is classified as an `Array` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an array, else `false`. - * @example - * - * _.isArray([1, 2, 3]); - * // => true - * - * _.isArray(document.body.children); - * // => false - * - * _.isArray('abc'); - * // => false - * - * _.isArray(_.noop); - * // => false - */ -var isArray = Array.isArray; + while (ch === 0x20/* Space */) { + state.lineIndent++; + ch = state.input.charCodeAt(++state.position); + } + } else { + break; + } + } -/** - * Checks if `value` is classified as a `Function` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a function, else `false`. - * @example - * - * _.isFunction(_); - * // => true - * - * _.isFunction(/abc/); - * // => false - */ -function isFunction(value) { - // The use of `Object#toString` avoids issues with the `typeof` operator - // in Safari 8-9 which returns 'object' for typed array and other constructors. - var tag = isObject(value) ? objectToString.call(value) : ''; - return tag == funcTag || tag == genTag; -} + if (checkIndent !== -1 && lineBreaks !== 0 && state.lineIndent < checkIndent) { + throwWarning(state, 'deficient indentation'); + } -/** - * Checks if `value` is the - * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) - * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an object, else `false`. - * @example - * - * _.isObject({}); - * // => true - * - * _.isObject([1, 2, 3]); - * // => true - * - * _.isObject(_.noop); - * // => true - * - * _.isObject(null); - * // => false - */ -function isObject(value) { - var type = typeof value; - return !!value && (type == 'object' || type == 'function'); + return lineBreaks; } -/** - * Checks if `value` is object-like. A value is object-like if it's not `null` - * and has a `typeof` result of "object". - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is object-like, else `false`. - * @example - * - * _.isObjectLike({}); - * // => true - * - * _.isObjectLike([1, 2, 3]); - * // => true - * - * _.isObjectLike(_.noop); - * // => false - * - * _.isObjectLike(null); - * // => false - */ -function isObjectLike(value) { - return !!value && typeof value == 'object'; -} +function testDocumentSeparator(state) { + var _position = state.position, + ch; -/** - * Checks if `value` is classified as a `Symbol` primitive or object. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. - * @example - * - * _.isSymbol(Symbol.iterator); - * // => true - * - * _.isSymbol('abc'); - * // => false - */ -function isSymbol(value) { - return typeof value == 'symbol' || - (isObjectLike(value) && objectToString.call(value) == symbolTag); -} + ch = state.input.charCodeAt(_position); -/** - * Converts `value` to a string. An empty string is returned for `null` - * and `undefined` values. The sign of `-0` is preserved. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to process. - * @returns {string} Returns the string. - * @example - * - * _.toString(null); - * // => '' - * - * _.toString(-0); - * // => '-0' - * - * _.toString([1, 2, 3]); - * // => '1,2,3' - */ -function toString(value) { - return value == null ? '' : baseToString(value); -} + // Condition state.position === state.lineStart is tested + // in parent on each call, for efficiency. No needs to test here again. + if ((ch === 0x2D/* - */ || ch === 0x2E/* . */) && + ch === state.input.charCodeAt(_position + 1) && + ch === state.input.charCodeAt(_position + 2)) { -/** - * Sets the value at `path` of `object`. If a portion of `path` doesn't exist, - * it's created. Arrays are created for missing index properties while objects - * are created for all other missing properties. Use `_.setWith` to customize - * `path` creation. - * - * **Note:** This method mutates `object`. - * - * @static - * @memberOf _ - * @since 3.7.0 - * @category Object - * @param {Object} object The object to modify. - * @param {Array|string} path The path of the property to set. - * @param {*} value The value to set. - * @returns {Object} Returns `object`. - * @example - * - * var object = { 'a': [{ 'b': { 'c': 3 } }] }; - * - * _.set(object, 'a[0].b.c', 4); - * console.log(object.a[0].b.c); - * // => 4 - * - * _.set(object, ['x', '0', 'y', 'z'], 5); - * console.log(object.x[0].y.z); - * // => 5 - */ -function set(object, path, value) { - return object == null ? object : baseSet(object, path, value); -} + _position += 3; + + ch = state.input.charCodeAt(_position); -module.exports = set; + if (ch === 0 || is_WS_OR_EOL(ch)) { + return true; + } + } + return false; +} -/***/ }), +function writeFoldedLines(state, count) { + if (count === 1) { + state.result += ' '; + } else if (count > 1) { + state.result += common.repeat('\n', count - 1); + } +} -/***/ 8216: -/***/ ((module) => { -/** - * lodash (Custom Build) - * Build: `lodash modularize exports="npm" -o ./` - * Copyright jQuery Foundation and other contributors - * Released under MIT license - * Based on Underscore.js 1.8.3 - * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors - */ +function readPlainScalar(state, nodeIndent, withinFlowCollection) { + var preceding, + following, + captureStart, + captureEnd, + hasPendingContent, + _line, + _lineStart, + _lineIndent, + _kind = state.kind, + _result = state.result, + ch; -/** Used as the size to enable large array optimizations. */ -var LARGE_ARRAY_SIZE = 200; + ch = state.input.charCodeAt(state.position); -/** Used to stand-in for `undefined` hash values. */ -var HASH_UNDEFINED = '__lodash_hash_undefined__'; + if (is_WS_OR_EOL(ch) || + is_FLOW_INDICATOR(ch) || + ch === 0x23/* # */ || + ch === 0x26/* & */ || + ch === 0x2A/* * */ || + ch === 0x21/* ! */ || + ch === 0x7C/* | */ || + ch === 0x3E/* > */ || + ch === 0x27/* ' */ || + ch === 0x22/* " */ || + ch === 0x25/* % */ || + ch === 0x40/* @ */ || + ch === 0x60/* ` */) { + return false; + } -/** Used as references for various `Number` constants. */ -var INFINITY = 1 / 0; + if (ch === 0x3F/* ? */ || ch === 0x2D/* - */) { + following = state.input.charCodeAt(state.position + 1); -/** `Object#toString` result references. */ -var funcTag = '[object Function]', - genTag = '[object GeneratorFunction]'; + if (is_WS_OR_EOL(following) || + withinFlowCollection && is_FLOW_INDICATOR(following)) { + return false; + } + } -/** - * Used to match `RegExp` - * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). - */ -var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; + state.kind = 'scalar'; + state.result = ''; + captureStart = captureEnd = state.position; + hasPendingContent = false; -/** Used to detect host constructors (Safari). */ -var reIsHostCtor = /^\[object .+?Constructor\]$/; + while (ch !== 0) { + if (ch === 0x3A/* : */) { + following = state.input.charCodeAt(state.position + 1); -/** Detect free variable `global` from Node.js. */ -var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; + if (is_WS_OR_EOL(following) || + withinFlowCollection && is_FLOW_INDICATOR(following)) { + break; + } -/** Detect free variable `self`. */ -var freeSelf = typeof self == 'object' && self && self.Object === Object && self; + } else if (ch === 0x23/* # */) { + preceding = state.input.charCodeAt(state.position - 1); -/** Used as a reference to the global object. */ -var root = freeGlobal || freeSelf || Function('return this')(); + if (is_WS_OR_EOL(preceding)) { + break; + } -/** - * A specialized version of `_.includes` for arrays without support for - * specifying an index to search from. - * - * @private - * @param {Array} [array] The array to inspect. - * @param {*} target The value to search for. - * @returns {boolean} Returns `true` if `target` is found, else `false`. - */ -function arrayIncludes(array, value) { - var length = array ? array.length : 0; - return !!length && baseIndexOf(array, value, 0) > -1; -} + } else if ((state.position === state.lineStart && testDocumentSeparator(state)) || + withinFlowCollection && is_FLOW_INDICATOR(ch)) { + break; -/** - * This function is like `arrayIncludes` except that it accepts a comparator. - * - * @private - * @param {Array} [array] The array to inspect. - * @param {*} target The value to search for. - * @param {Function} comparator The comparator invoked per element. - * @returns {boolean} Returns `true` if `target` is found, else `false`. - */ -function arrayIncludesWith(array, value, comparator) { - var index = -1, - length = array ? array.length : 0; + } else if (is_EOL(ch)) { + _line = state.line; + _lineStart = state.lineStart; + _lineIndent = state.lineIndent; + skipSeparationSpace(state, false, -1); - while (++index < length) { - if (comparator(value, array[index])) { - return true; + if (state.lineIndent >= nodeIndent) { + hasPendingContent = true; + ch = state.input.charCodeAt(state.position); + continue; + } else { + state.position = captureEnd; + state.line = _line; + state.lineStart = _lineStart; + state.lineIndent = _lineIndent; + break; + } } - } - return false; -} - -/** - * The base implementation of `_.findIndex` and `_.findLastIndex` without - * support for iteratee shorthands. - * - * @private - * @param {Array} array The array to inspect. - * @param {Function} predicate The function invoked per iteration. - * @param {number} fromIndex The index to search from. - * @param {boolean} [fromRight] Specify iterating from right to left. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function baseFindIndex(array, predicate, fromIndex, fromRight) { - var length = array.length, - index = fromIndex + (fromRight ? 1 : -1); - while ((fromRight ? index-- : ++index < length)) { - if (predicate(array[index], index, array)) { - return index; + if (hasPendingContent) { + captureSegment(state, captureStart, captureEnd, false); + writeFoldedLines(state, state.line - _line); + captureStart = captureEnd = state.position; + hasPendingContent = false; } - } - return -1; -} - -/** - * The base implementation of `_.indexOf` without `fromIndex` bounds checks. - * - * @private - * @param {Array} array The array to inspect. - * @param {*} value The value to search for. - * @param {number} fromIndex The index to search from. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function baseIndexOf(array, value, fromIndex) { - if (value !== value) { - return baseFindIndex(array, baseIsNaN, fromIndex); - } - var index = fromIndex - 1, - length = array.length; - while (++index < length) { - if (array[index] === value) { - return index; + if (!is_WHITE_SPACE(ch)) { + captureEnd = state.position + 1; } - } - return -1; -} - -/** - * The base implementation of `_.isNaN` without support for number objects. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. - */ -function baseIsNaN(value) { - return value !== value; -} -/** - * Checks if a cache value for `key` exists. - * - * @private - * @param {Object} cache The cache to query. - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function cacheHas(cache, key) { - return cache.has(key); -} + ch = state.input.charCodeAt(++state.position); + } -/** - * Gets the value at `key` of `object`. - * - * @private - * @param {Object} [object] The object to query. - * @param {string} key The key of the property to get. - * @returns {*} Returns the property value. - */ -function getValue(object, key) { - return object == null ? undefined : object[key]; -} + captureSegment(state, captureStart, captureEnd, false); -/** - * Checks if `value` is a host object in IE < 9. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a host object, else `false`. - */ -function isHostObject(value) { - // Many host objects are `Object` objects that can coerce to strings - // despite having improperly defined `toString` methods. - var result = false; - if (value != null && typeof value.toString != 'function') { - try { - result = !!(value + ''); - } catch (e) {} + if (state.result) { + return true; } - return result; -} - -/** - * Converts `set` to an array of its values. - * - * @private - * @param {Object} set The set to convert. - * @returns {Array} Returns the values. - */ -function setToArray(set) { - var index = -1, - result = Array(set.size); - set.forEach(function(value) { - result[++index] = value; - }); - return result; + state.kind = _kind; + state.result = _result; + return false; } -/** Used for built-in method references. */ -var arrayProto = Array.prototype, - funcProto = Function.prototype, - objectProto = Object.prototype; - -/** Used to detect overreaching core-js shims. */ -var coreJsData = root['__core-js_shared__']; - -/** Used to detect methods masquerading as native. */ -var maskSrcKey = (function() { - var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); - return uid ? ('Symbol(src)_1.' + uid) : ''; -}()); +function readSingleQuotedScalar(state, nodeIndent) { + var ch, + captureStart, captureEnd; -/** Used to resolve the decompiled source of functions. */ -var funcToString = funcProto.toString; + ch = state.input.charCodeAt(state.position); -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; + if (ch !== 0x27/* ' */) { + return false; + } -/** - * Used to resolve the - * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) - * of values. - */ -var objectToString = objectProto.toString; + state.kind = 'scalar'; + state.result = ''; + state.position++; + captureStart = captureEnd = state.position; -/** Used to detect if a method is native. */ -var reIsNative = RegExp('^' + - funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') - .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' -); + while ((ch = state.input.charCodeAt(state.position)) !== 0) { + if (ch === 0x27/* ' */) { + captureSegment(state, captureStart, state.position, true); + ch = state.input.charCodeAt(++state.position); -/** Built-in value references. */ -var splice = arrayProto.splice; + if (ch === 0x27/* ' */) { + captureStart = state.position; + state.position++; + captureEnd = state.position; + } else { + return true; + } -/* Built-in method references that are verified to be native. */ -var Map = getNative(root, 'Map'), - Set = getNative(root, 'Set'), - nativeCreate = getNative(Object, 'create'); + } else if (is_EOL(ch)) { + captureSegment(state, captureStart, captureEnd, true); + writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); + captureStart = captureEnd = state.position; -/** - * Creates a hash object. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function Hash(entries) { - var index = -1, - length = entries ? entries.length : 0; + } else if (state.position === state.lineStart && testDocumentSeparator(state)) { + throwError(state, 'unexpected end of the document within a single quoted scalar'); - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); + } else { + state.position++; + captureEnd = state.position; + } } -} -/** - * Removes all key-value entries from the hash. - * - * @private - * @name clear - * @memberOf Hash - */ -function hashClear() { - this.__data__ = nativeCreate ? nativeCreate(null) : {}; + throwError(state, 'unexpected end of the stream within a single quoted scalar'); } -/** - * Removes `key` and its value from the hash. - * - * @private - * @name delete - * @memberOf Hash - * @param {Object} hash The hash to modify. - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function hashDelete(key) { - return this.has(key) && delete this.__data__[key]; -} +function readDoubleQuotedScalar(state, nodeIndent) { + var captureStart, + captureEnd, + hexLength, + hexResult, + tmp, + ch; -/** - * Gets the hash value for `key`. - * - * @private - * @name get - * @memberOf Hash - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function hashGet(key) { - var data = this.__data__; - if (nativeCreate) { - var result = data[key]; - return result === HASH_UNDEFINED ? undefined : result; + ch = state.input.charCodeAt(state.position); + + if (ch !== 0x22/* " */) { + return false; } - return hasOwnProperty.call(data, key) ? data[key] : undefined; -} -/** - * Checks if a hash value for `key` exists. - * - * @private - * @name has - * @memberOf Hash - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function hashHas(key) { - var data = this.__data__; - return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key); -} + state.kind = 'scalar'; + state.result = ''; + state.position++; + captureStart = captureEnd = state.position; -/** - * Sets the hash `key` to `value`. - * - * @private - * @name set - * @memberOf Hash - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the hash instance. - */ -function hashSet(key, value) { - var data = this.__data__; - data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; - return this; -} + while ((ch = state.input.charCodeAt(state.position)) !== 0) { + if (ch === 0x22/* " */) { + captureSegment(state, captureStart, state.position, true); + state.position++; + return true; -// Add methods to `Hash`. -Hash.prototype.clear = hashClear; -Hash.prototype['delete'] = hashDelete; -Hash.prototype.get = hashGet; -Hash.prototype.has = hashHas; -Hash.prototype.set = hashSet; + } else if (ch === 0x5C/* \ */) { + captureSegment(state, captureStart, state.position, true); + ch = state.input.charCodeAt(++state.position); -/** - * Creates an list cache object. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function ListCache(entries) { - var index = -1, - length = entries ? entries.length : 0; + if (is_EOL(ch)) { + skipSeparationSpace(state, false, nodeIndent); - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } -} + // TODO: rework to inline fn with no type cast? + } else if (ch < 256 && simpleEscapeCheck[ch]) { + state.result += simpleEscapeMap[ch]; + state.position++; -/** - * Removes all key-value entries from the list cache. - * - * @private - * @name clear - * @memberOf ListCache - */ -function listCacheClear() { - this.__data__ = []; -} + } else if ((tmp = escapedHexLen(ch)) > 0) { + hexLength = tmp; + hexResult = 0; -/** - * Removes `key` and its value from the list cache. - * - * @private - * @name delete - * @memberOf ListCache - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function listCacheDelete(key) { - var data = this.__data__, - index = assocIndexOf(data, key); + for (; hexLength > 0; hexLength--) { + ch = state.input.charCodeAt(++state.position); - if (index < 0) { - return false; - } - var lastIndex = data.length - 1; - if (index == lastIndex) { - data.pop(); - } else { - splice.call(data, index, 1); - } - return true; -} + if ((tmp = fromHexCode(ch)) >= 0) { + hexResult = (hexResult << 4) + tmp; -/** - * Gets the list cache value for `key`. - * - * @private - * @name get - * @memberOf ListCache - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function listCacheGet(key) { - var data = this.__data__, - index = assocIndexOf(data, key); + } else { + throwError(state, 'expected hexadecimal character'); + } + } - return index < 0 ? undefined : data[index][1]; -} + state.result += charFromCodepoint(hexResult); -/** - * Checks if a list cache value for `key` exists. - * - * @private - * @name has - * @memberOf ListCache - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function listCacheHas(key) { - return assocIndexOf(this.__data__, key) > -1; -} + state.position++; -/** - * Sets the list cache `key` to `value`. - * - * @private - * @name set - * @memberOf ListCache - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the list cache instance. - */ -function listCacheSet(key, value) { - var data = this.__data__, - index = assocIndexOf(data, key); + } else { + throwError(state, 'unknown escape sequence'); + } - if (index < 0) { - data.push([key, value]); - } else { - data[index][1] = value; - } - return this; -} + captureStart = captureEnd = state.position; -// Add methods to `ListCache`. -ListCache.prototype.clear = listCacheClear; -ListCache.prototype['delete'] = listCacheDelete; -ListCache.prototype.get = listCacheGet; -ListCache.prototype.has = listCacheHas; -ListCache.prototype.set = listCacheSet; + } else if (is_EOL(ch)) { + captureSegment(state, captureStart, captureEnd, true); + writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); + captureStart = captureEnd = state.position; -/** - * Creates a map cache object to store key-value pairs. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function MapCache(entries) { - var index = -1, - length = entries ? entries.length : 0; + } else if (state.position === state.lineStart && testDocumentSeparator(state)) { + throwError(state, 'unexpected end of the document within a double quoted scalar'); - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); + } else { + state.position++; + captureEnd = state.position; + } } -} -/** - * Removes all key-value entries from the map. - * - * @private - * @name clear - * @memberOf MapCache - */ -function mapCacheClear() { - this.__data__ = { - 'hash': new Hash, - 'map': new (Map || ListCache), - 'string': new Hash - }; + throwError(state, 'unexpected end of the stream within a double quoted scalar'); } -/** - * Removes `key` and its value from the map. - * - * @private - * @name delete - * @memberOf MapCache - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function mapCacheDelete(key) { - return getMapData(this, key)['delete'](key); -} +function readFlowCollection(state, nodeIndent) { + var readNext = true, + _line, + _lineStart, + _pos, + _tag = state.tag, + _result, + _anchor = state.anchor, + following, + terminator, + isPair, + isExplicitPair, + isMapping, + overridableKeys = Object.create(null), + keyNode, + keyTag, + valueNode, + ch; -/** - * Gets the map value for `key`. - * - * @private - * @name get - * @memberOf MapCache - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function mapCacheGet(key) { - return getMapData(this, key).get(key); -} + ch = state.input.charCodeAt(state.position); -/** - * Checks if a map value for `key` exists. - * - * @private - * @name has - * @memberOf MapCache - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function mapCacheHas(key) { - return getMapData(this, key).has(key); -} + if (ch === 0x5B/* [ */) { + terminator = 0x5D;/* ] */ + isMapping = false; + _result = []; + } else if (ch === 0x7B/* { */) { + terminator = 0x7D;/* } */ + isMapping = true; + _result = {}; + } else { + return false; + } -/** - * Sets the map `key` to `value`. - * - * @private - * @name set - * @memberOf MapCache - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the map cache instance. - */ -function mapCacheSet(key, value) { - getMapData(this, key).set(key, value); - return this; -} + if (state.anchor !== null) { + state.anchorMap[state.anchor] = _result; + } -// Add methods to `MapCache`. -MapCache.prototype.clear = mapCacheClear; -MapCache.prototype['delete'] = mapCacheDelete; -MapCache.prototype.get = mapCacheGet; -MapCache.prototype.has = mapCacheHas; -MapCache.prototype.set = mapCacheSet; + ch = state.input.charCodeAt(++state.position); -/** - * - * Creates an array cache object to store unique values. - * - * @private - * @constructor - * @param {Array} [values] The values to cache. - */ -function SetCache(values) { - var index = -1, - length = values ? values.length : 0; + while (ch !== 0) { + skipSeparationSpace(state, true, nodeIndent); - this.__data__ = new MapCache; - while (++index < length) { - this.add(values[index]); - } -} + ch = state.input.charCodeAt(state.position); -/** - * Adds `value` to the array cache. - * - * @private - * @name add - * @memberOf SetCache - * @alias push - * @param {*} value The value to cache. - * @returns {Object} Returns the cache instance. - */ -function setCacheAdd(value) { - this.__data__.set(value, HASH_UNDEFINED); - return this; -} + if (ch === terminator) { + state.position++; + state.tag = _tag; + state.anchor = _anchor; + state.kind = isMapping ? 'mapping' : 'sequence'; + state.result = _result; + return true; + } else if (!readNext) { + throwError(state, 'missed comma between flow collection entries'); + } else if (ch === 0x2C/* , */) { + // "flow collection entries can never be completely empty", as per YAML 1.2, section 7.4 + throwError(state, "expected the node content, but found ','"); + } -/** - * Checks if `value` is in the array cache. - * - * @private - * @name has - * @memberOf SetCache - * @param {*} value The value to search for. - * @returns {number} Returns `true` if `value` is found, else `false`. - */ -function setCacheHas(value) { - return this.__data__.has(value); -} + keyTag = keyNode = valueNode = null; + isPair = isExplicitPair = false; -// Add methods to `SetCache`. -SetCache.prototype.add = SetCache.prototype.push = setCacheAdd; -SetCache.prototype.has = setCacheHas; + if (ch === 0x3F/* ? */) { + following = state.input.charCodeAt(state.position + 1); -/** - * Gets the index at which the `key` is found in `array` of key-value pairs. - * - * @private - * @param {Array} array The array to inspect. - * @param {*} key The key to search for. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function assocIndexOf(array, key) { - var length = array.length; - while (length--) { - if (eq(array[length][0], key)) { - return length; + if (is_WS_OR_EOL(following)) { + isPair = isExplicitPair = true; + state.position++; + skipSeparationSpace(state, true, nodeIndent); + } } - } - return -1; -} -/** - * The base implementation of `_.isNative` without bad shim checks. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a native function, - * else `false`. - */ -function baseIsNative(value) { - if (!isObject(value) || isMasked(value)) { - return false; - } - var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor; - return pattern.test(toSource(value)); -} + _line = state.line; // Save the current line. + _lineStart = state.lineStart; + _pos = state.position; + composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); + keyTag = state.tag; + keyNode = state.result; + skipSeparationSpace(state, true, nodeIndent); -/** - * The base implementation of `_.uniqBy` without support for iteratee shorthands. - * - * @private - * @param {Array} array The array to inspect. - * @param {Function} [iteratee] The iteratee invoked per element. - * @param {Function} [comparator] The comparator invoked per element. - * @returns {Array} Returns the new duplicate free array. - */ -function baseUniq(array, iteratee, comparator) { - var index = -1, - includes = arrayIncludes, - length = array.length, - isCommon = true, - result = [], - seen = result; - - if (comparator) { - isCommon = false; - includes = arrayIncludesWith; - } - else if (length >= LARGE_ARRAY_SIZE) { - var set = iteratee ? null : createSet(array); - if (set) { - return setToArray(set); - } - isCommon = false; - includes = cacheHas; - seen = new SetCache; - } - else { - seen = iteratee ? [] : result; - } - outer: - while (++index < length) { - var value = array[index], - computed = iteratee ? iteratee(value) : value; - - value = (comparator || value !== 0) ? value : 0; - if (isCommon && computed === computed) { - var seenIndex = seen.length; - while (seenIndex--) { - if (seen[seenIndex] === computed) { - continue outer; - } - } - if (iteratee) { - seen.push(computed); - } - result.push(value); + ch = state.input.charCodeAt(state.position); + + if ((isExplicitPair || state.line === _line) && ch === 0x3A/* : */) { + isPair = true; + ch = state.input.charCodeAt(++state.position); + skipSeparationSpace(state, true, nodeIndent); + composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); + valueNode = state.result; } - else if (!includes(seen, computed, comparator)) { - if (seen !== result) { - seen.push(computed); - } - result.push(value); + + if (isMapping) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos); + } else if (isPair) { + _result.push(storeMappingPair(state, null, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos)); + } else { + _result.push(keyNode); } - } - return result; -} -/** - * Creates a set object of `values`. - * - * @private - * @param {Array} values The values to add to the set. - * @returns {Object} Returns the new set. - */ -var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) { - return new Set(values); -}; + skipSeparationSpace(state, true, nodeIndent); -/** - * Gets the data for `map`. - * - * @private - * @param {Object} map The map to query. - * @param {string} key The reference key. - * @returns {*} Returns the map data. - */ -function getMapData(map, key) { - var data = map.__data__; - return isKeyable(key) - ? data[typeof key == 'string' ? 'string' : 'hash'] - : data.map; -} + ch = state.input.charCodeAt(state.position); -/** - * Gets the native function at `key` of `object`. - * - * @private - * @param {Object} object The object to query. - * @param {string} key The key of the method to get. - * @returns {*} Returns the function if it's native, else `undefined`. - */ -function getNative(object, key) { - var value = getValue(object, key); - return baseIsNative(value) ? value : undefined; -} + if (ch === 0x2C/* , */) { + readNext = true; + ch = state.input.charCodeAt(++state.position); + } else { + readNext = false; + } + } -/** - * Checks if `value` is suitable for use as unique object key. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is suitable, else `false`. - */ -function isKeyable(value) { - var type = typeof value; - return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') - ? (value !== '__proto__') - : (value === null); + throwError(state, 'unexpected end of the stream within a flow collection'); } -/** - * Checks if `func` has its source masked. - * - * @private - * @param {Function} func The function to check. - * @returns {boolean} Returns `true` if `func` is masked, else `false`. - */ -function isMasked(func) { - return !!maskSrcKey && (maskSrcKey in func); -} +function readBlockScalar(state, nodeIndent) { + var captureStart, + folding, + chomping = CHOMPING_CLIP, + didReadContent = false, + detectedIndent = false, + textIndent = nodeIndent, + emptyLines = 0, + atMoreIndented = false, + tmp, + ch; -/** - * Converts `func` to its source code. - * - * @private - * @param {Function} func The function to process. - * @returns {string} Returns the source code. - */ -function toSource(func) { - if (func != null) { - try { - return funcToString.call(func); - } catch (e) {} - try { - return (func + ''); - } catch (e) {} + ch = state.input.charCodeAt(state.position); + + if (ch === 0x7C/* | */) { + folding = false; + } else if (ch === 0x3E/* > */) { + folding = true; + } else { + return false; } - return ''; -} -/** - * Creates a duplicate-free version of an array, using - * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) - * for equality comparisons, in which only the first occurrence of each - * element is kept. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Array - * @param {Array} array The array to inspect. - * @returns {Array} Returns the new duplicate free array. - * @example - * - * _.uniq([2, 1, 2]); - * // => [2, 1] - */ -function uniq(array) { - return (array && array.length) - ? baseUniq(array) - : []; -} + state.kind = 'scalar'; + state.result = ''; -/** - * Performs a - * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) - * comparison between two values to determine if they are equivalent. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to compare. - * @param {*} other The other value to compare. - * @returns {boolean} Returns `true` if the values are equivalent, else `false`. - * @example - * - * var object = { 'a': 1 }; - * var other = { 'a': 1 }; - * - * _.eq(object, object); - * // => true - * - * _.eq(object, other); - * // => false - * - * _.eq('a', 'a'); - * // => true - * - * _.eq('a', Object('a')); - * // => false - * - * _.eq(NaN, NaN); - * // => true - */ -function eq(value, other) { - return value === other || (value !== value && other !== other); -} + while (ch !== 0) { + ch = state.input.charCodeAt(++state.position); -/** - * Checks if `value` is classified as a `Function` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a function, else `false`. - * @example - * - * _.isFunction(_); - * // => true - * - * _.isFunction(/abc/); - * // => false - */ -function isFunction(value) { - // The use of `Object#toString` avoids issues with the `typeof` operator - // in Safari 8-9 which returns 'object' for typed array and other constructors. - var tag = isObject(value) ? objectToString.call(value) : ''; - return tag == funcTag || tag == genTag; -} + if (ch === 0x2B/* + */ || ch === 0x2D/* - */) { + if (CHOMPING_CLIP === chomping) { + chomping = (ch === 0x2B/* + */) ? CHOMPING_KEEP : CHOMPING_STRIP; + } else { + throwError(state, 'repeat of a chomping mode identifier'); + } -/** - * Checks if `value` is the - * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) - * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an object, else `false`. - * @example - * - * _.isObject({}); - * // => true - * - * _.isObject([1, 2, 3]); - * // => true - * - * _.isObject(_.noop); - * // => true - * - * _.isObject(null); - * // => false - */ -function isObject(value) { - var type = typeof value; - return !!value && (type == 'object' || type == 'function'); -} + } else if ((tmp = fromDecimalCode(ch)) >= 0) { + if (tmp === 0) { + throwError(state, 'bad explicit indentation width of a block scalar; it cannot be less than one'); + } else if (!detectedIndent) { + textIndent = nodeIndent + tmp - 1; + detectedIndent = true; + } else { + throwError(state, 'repeat of an indentation width identifier'); + } -/** - * This method returns `undefined`. - * - * @static - * @memberOf _ - * @since 2.3.0 - * @category Util - * @example - * - * _.times(2, _.noop); - * // => [undefined, undefined] - */ -function noop() { - // No operation performed. -} + } else { + break; + } + } -module.exports = uniq; + if (is_WHITE_SPACE(ch)) { + do { ch = state.input.charCodeAt(++state.position); } + while (is_WHITE_SPACE(ch)); + if (ch === 0x23/* # */) { + do { ch = state.input.charCodeAt(++state.position); } + while (!is_EOL(ch) && (ch !== 0)); + } + } -/***/ }), + while (ch !== 0) { + readLineBreak(state); + state.lineIndent = 0; -/***/ 7493: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + ch = state.input.charCodeAt(state.position); -"use strict"; + while ((!detectedIndent || state.lineIndent < textIndent) && + (ch === 0x20/* Space */)) { + state.lineIndent++; + ch = state.input.charCodeAt(++state.position); + } -const os = __nccwpck_require__(2087); - -const nameMap = new Map([ - [20, ['Big Sur', '11']], - [19, ['Catalina', '10.15']], - [18, ['Mojave', '10.14']], - [17, ['High Sierra', '10.13']], - [16, ['Sierra', '10.12']], - [15, ['El Capitan', '10.11']], - [14, ['Yosemite', '10.10']], - [13, ['Mavericks', '10.9']], - [12, ['Mountain Lion', '10.8']], - [11, ['Lion', '10.7']], - [10, ['Snow Leopard', '10.6']], - [9, ['Leopard', '10.5']], - [8, ['Tiger', '10.4']], - [7, ['Panther', '10.3']], - [6, ['Jaguar', '10.2']], - [5, ['Puma', '10.1']] -]); - -const macosRelease = release => { - release = Number((release || os.release()).split('.')[0]); - - const [name, version] = nameMap.get(release); + if (!detectedIndent && state.lineIndent > textIndent) { + textIndent = state.lineIndent; + } - return { - name, - version - }; -}; + if (is_EOL(ch)) { + emptyLines++; + continue; + } -module.exports = macosRelease; -// TODO: remove this in the next major version -module.exports.default = macosRelease; + // End of the scalar. + if (state.lineIndent < textIndent) { + // Perform the chomping. + if (chomping === CHOMPING_KEEP) { + state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); + } else if (chomping === CHOMPING_CLIP) { + if (didReadContent) { // i.e. only if the scalar is not empty. + state.result += '\n'; + } + } -/***/ }), + // Break this `while` cycle and go to the funciton's epilogue. + break; + } -/***/ 8560: -/***/ ((module) => { + // Folded style: use fancy rules to handle line breaks. + if (folding) { -"use strict"; + // Lines starting with white space characters (more-indented lines) are not folded. + if (is_WHITE_SPACE(ch)) { + atMoreIndented = true; + // except for the first content line (cf. Example 8.1) + state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); + // End of more-indented block. + } else if (atMoreIndented) { + atMoreIndented = false; + state.result += common.repeat('\n', emptyLines + 1); -/** - * Tries to execute a function and discards any error that occurs. - * @param {Function} fn - Function that might or might not throw an error. - * @returns {?*} Return-value of the function when no error occurred. - */ -module.exports = function(fn) { + // Just one line break - perceive as the same line. + } else if (emptyLines === 0) { + if (didReadContent) { // i.e. only if we have already read some scalar content. + state.result += ' '; + } - try { return fn() } catch (e) {} + // Several line breaks - perceive as different lines. + } else { + state.result += common.repeat('\n', emptyLines); + } -} + // Literal style: just add exact number of line breaks between content lines. + } else { + // Keep all line breaks except the header line break. + state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); + } -/***/ }), + didReadContent = true; + detectedIndent = true; + emptyLines = 0; + captureStart = state.position; -/***/ 467: -/***/ ((module, exports, __nccwpck_require__) => { + while (!is_EOL(ch) && (ch !== 0)) { + ch = state.input.charCodeAt(++state.position); + } -"use strict"; + captureSegment(state, captureStart, state.position, false); + } + return true; +} -Object.defineProperty(exports, "__esModule", ({ value: true })); +function readBlockSequence(state, nodeIndent) { + var _line, + _tag = state.tag, + _anchor = state.anchor, + _result = [], + following, + detected = false, + ch; -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + // there is a leading tab before this token, so it can't be a block sequence/mapping; + // it can still be flow sequence/mapping or a scalar + if (state.firstTabInLine !== -1) return false; -var Stream = _interopDefault(__nccwpck_require__(2413)); -var http = _interopDefault(__nccwpck_require__(8605)); -var Url = _interopDefault(__nccwpck_require__(8835)); -var https = _interopDefault(__nccwpck_require__(7211)); -var zlib = _interopDefault(__nccwpck_require__(8761)); + if (state.anchor !== null) { + state.anchorMap[state.anchor] = _result; + } -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + ch = state.input.charCodeAt(state.position); -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; + while (ch !== 0) { + if (state.firstTabInLine !== -1) { + state.position = state.firstTabInLine; + throwError(state, 'tab characters must not be used in indentation'); + } -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); + if (ch !== 0x2D/* - */) { + break; + } -class Blob { - constructor() { - this[TYPE] = ''; + following = state.input.charCodeAt(state.position + 1); - const blobParts = arguments[0]; - const options = arguments[1]; + if (!is_WS_OR_EOL(following)) { + break; + } - const buffers = []; - let size = 0; + detected = true; + state.position++; - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } + if (skipSeparationSpace(state, true, -1)) { + if (state.lineIndent <= nodeIndent) { + _result.push(null); + ch = state.input.charCodeAt(state.position); + continue; + } + } - this[BUFFER] = Buffer.concat(buffers); + _line = state.line; + composeNode(state, nodeIndent, CONTEXT_BLOCK_IN, false, true); + _result.push(state.result); + skipSeparationSpace(state, true, -1); - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } - } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); - } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; + ch = state.input.charCodeAt(state.position); - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); + if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) { + throwError(state, 'bad indentation of a sequence entry'); + } else if (state.lineIndent < nodeIndent) { + break; + } + } - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; - } + if (detected) { + state.tag = _tag; + state.anchor = _anchor; + state.kind = 'sequence'; + state.result = _result; + return true; + } + return false; } -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); - -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); +function readBlockMapping(state, nodeIndent, flowIndent) { + var following, + allowCompact, + _line, + _keyLine, + _keyLineStart, + _keyPos, + _tag = state.tag, + _anchor = state.anchor, + _result = {}, + overridableKeys = Object.create(null), + keyTag = null, + keyNode = null, + valueNode = null, + atExplicitKey = false, + detected = false, + ch; -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ + // there is a leading tab before this token, so it can't be a block sequence/mapping; + // it can still be flow sequence/mapping or a scalar + if (state.firstTabInLine !== -1) return false; -/** - * Create FetchError instance - * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError - */ -function FetchError(message, type, systemError) { - Error.call(this, message); + if (state.anchor !== null) { + state.anchorMap[state.anchor] = _result; + } - this.message = message; - this.type = type; + ch = state.input.charCodeAt(state.position); - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } + while (ch !== 0) { + if (!atExplicitKey && state.firstTabInLine !== -1) { + state.position = state.firstTabInLine; + throwError(state, 'tab characters must not be used in indentation'); + } - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} + following = state.input.charCodeAt(state.position + 1); + _line = state.line; // Save the current line. -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; + // + // Explicit notation case. There are two separate blocks: + // first for the key (denoted by "?") and second for the value (denoted by ":") + // + if ((ch === 0x3F/* ? */ || ch === 0x3A/* : */) && is_WS_OR_EOL(following)) { -let convert; -try { - convert = __nccwpck_require__(2877).convert; -} catch (e) {} + if (ch === 0x3F/* ? */) { + if (atExplicitKey) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); + keyTag = keyNode = valueNode = null; + } -const INTERNALS = Symbol('Body internals'); + detected = true; + atExplicitKey = true; + allowCompact = true; -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; + } else if (atExplicitKey) { + // i.e. 0x3A/* : */ === character after the explicit key. + atExplicitKey = false; + allowCompact = true; -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; + } else { + throwError(state, 'incomplete explicit mapping pair; a key node is missed; or followed by a non-tabulated empty line'); + } - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; + state.position += 1; + ch = following; - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + // + // Implicit notation case. Flow-style node as the key first, then ":", and the value. + // + } else { + _keyLine = state.line; + _keyLineStart = state.lineStart; + _keyPos = state.position; - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; + if (!composeNode(state, flowIndent, CONTEXT_FLOW_OUT, false, true)) { + // Neither implicit nor explicit notation. + // Reading is done. Go to the epilogue. + break; + } - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } -} + if (state.line === _line) { + ch = state.input.charCodeAt(state.position); -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, + while (is_WHITE_SPACE(ch)) { + ch = state.input.charCodeAt(++state.position); + } - get bodyUsed() { - return this[INTERNALS].disturbed; - }, + if (ch === 0x3A/* : */) { + ch = state.input.charCodeAt(++state.position); - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, + if (!is_WS_OR_EOL(ch)) { + throwError(state, 'a whitespace character is expected after the key-value separator within a block mapping'); + } - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, + if (atExplicitKey) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); + keyTag = keyNode = valueNode = null; + } - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; + detected = true; + atExplicitKey = false; + allowCompact = false; + keyTag = state.tag; + keyNode = state.result; - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, + } else if (detected) { + throwError(state, 'can not read an implicit mapping pair; a colon is missed'); - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, + } else { + state.tag = _tag; + state.anchor = _anchor; + return true; // Keep the result of `composeNode`. + } - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, + } else if (detected) { + throwError(state, 'can not read a block mapping entry; a multiline key may not be an implicit key'); - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; + } else { + state.tag = _tag; + state.anchor = _anchor; + return true; // Keep the result of `composeNode`. + } + } - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; + // + // Common reading code for both explicit and implicit notations. + // + if (state.line === _line || state.lineIndent > nodeIndent) { + if (atExplicitKey) { + _keyLine = state.line; + _keyLineStart = state.lineStart; + _keyPos = state.position; + } -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); + if (composeNode(state, nodeIndent, CONTEXT_BLOCK_OUT, true, allowCompact)) { + if (atExplicitKey) { + keyNode = state.result; + } else { + valueNode = state.result; + } + } -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } -}; + if (!atExplicitKey) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _keyLine, _keyLineStart, _keyPos); + keyTag = keyNode = valueNode = null; + } -/** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise - */ -function consumeBody() { - var _this4 = this; + skipSeparationSpace(state, true, -1); + ch = state.input.charCodeAt(state.position); + } - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } + if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) { + throwError(state, 'bad indentation of a mapping entry'); + } else if (state.lineIndent < nodeIndent) { + break; + } + } - this[INTERNALS].disturbed = true; + // + // Epilogue. + // - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } + // Special case: last mapping's node contains only the key in explicit notation. + if (atExplicitKey) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); + } - let body = this.body; + // Expose the resulting mapping. + if (detected) { + state.tag = _tag; + state.anchor = _anchor; + state.kind = 'mapping'; + state.result = _result; + } - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } + return detected; +} - // body is blob - if (isBlob(body)) { - body = body.stream(); - } +function readTagProperty(state) { + var _position, + isVerbatim = false, + isNamed = false, + tagHandle, + tagName, + ch; - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } + ch = state.input.charCodeAt(state.position); - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } + if (ch !== 0x21/* ! */) return false; - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; + if (state.tag !== null) { + throwError(state, 'duplication of a tag property'); + } - return new Body.Promise(function (resolve, reject) { - let resTimeout; + ch = state.input.charCodeAt(++state.position); - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } + if (ch === 0x3C/* < */) { + isVerbatim = true; + ch = state.input.charCodeAt(++state.position); - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); + } else if (ch === 0x21/* ! */) { + isNamed = true; + tagHandle = '!!'; + ch = state.input.charCodeAt(++state.position); - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } + } else { + tagHandle = '!'; + } - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } + _position = state.position; - accumBytes += chunk.length; - accum.push(chunk); - }); + if (isVerbatim) { + do { ch = state.input.charCodeAt(++state.position); } + while (ch !== 0 && ch !== 0x3E/* > */); - body.on('end', function () { - if (abort) { - return; - } + if (state.position < state.length) { + tagName = state.input.slice(_position, state.position); + ch = state.input.charCodeAt(++state.position); + } else { + throwError(state, 'unexpected end of the stream within a verbatim tag'); + } + } else { + while (ch !== 0 && !is_WS_OR_EOL(ch)) { - clearTimeout(resTimeout); + if (ch === 0x21/* ! */) { + if (!isNamed) { + tagHandle = state.input.slice(_position - 1, state.position + 1); - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} + if (!PATTERN_TAG_HANDLE.test(tagHandle)) { + throwError(state, 'named tag handle cannot contain such characters'); + } -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } + isNamed = true; + _position = state.position + 1; + } else { + throwError(state, 'tag suffix cannot contain exclamation marks'); + } + } - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; + ch = state.input.charCodeAt(++state.position); + } - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } + tagName = state.input.slice(_position, state.position); - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); + if (PATTERN_FLOW_INDICATORS.test(tagName)) { + throwError(state, 'tag suffix cannot contain flow indicator characters'); + } + } - // html5 - if (!res && str) { - res = /parent, 0: this=parent, -1: this parentIndent) { + indentStatus = 1; + } else if (state.lineIndent === parentIndent) { + indentStatus = 0; + } else if (state.lineIndent < parentIndent) { + indentStatus = -1; + } + } + } -const MAP = Symbol('map'); -class Headers { - /** - * Headers class - * - * @param Object headers Response headers - * @return Void - */ - constructor() { - let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; + if (indentStatus === 1) { + while (readTagProperty(state) || readAnchorProperty(state)) { + if (skipSeparationSpace(state, true, -1)) { + atNewLine = true; + allowBlockCollections = allowBlockStyles; + + if (state.lineIndent > parentIndent) { + indentStatus = 1; + } else if (state.lineIndent === parentIndent) { + indentStatus = 0; + } else if (state.lineIndent < parentIndent) { + indentStatus = -1; + } + } else { + allowBlockCollections = false; + } + } + } - this[MAP] = Object.create(null); + if (allowBlockCollections) { + allowBlockCollections = atNewLine || allowCompact; + } - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); + if (indentStatus === 1 || CONTEXT_BLOCK_OUT === nodeContext) { + if (CONTEXT_FLOW_IN === nodeContext || CONTEXT_FLOW_OUT === nodeContext) { + flowIndent = parentIndent; + } else { + flowIndent = parentIndent + 1; + } - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } + blockIndent = state.position - state.lineStart; - return; - } + if (indentStatus === 1) { + if (allowBlockCollections && + (readBlockSequence(state, blockIndent) || + readBlockMapping(state, blockIndent, flowIndent)) || + readFlowCollection(state, flowIndent)) { + hasContent = true; + } else { + if ((allowBlockScalars && readBlockScalar(state, flowIndent)) || + readSingleQuotedScalar(state, flowIndent) || + readDoubleQuotedScalar(state, flowIndent)) { + hasContent = true; - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } + } else if (readAlias(state)) { + hasContent = true; - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } + if (state.tag !== null || state.anchor !== null) { + throwError(state, 'alias node should not have any properties'); + } - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } + } else if (readPlainScalar(state, flowIndent, CONTEXT_FLOW_IN === nodeContext)) { + hasContent = true; - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } + if (state.tag === null) { + state.tag = '?'; + } + } - return this[MAP][key].join(', '); - } + if (state.anchor !== null) { + state.anchorMap[state.anchor] = state.result; + } + } + } else if (indentStatus === 0) { + // Special case: block sequences are allowed to have same indentation level as the parent. + // http://www.yaml.org/spec/1.2/spec.html#id2799784 + hasContent = allowBlockCollections && readBlockSequence(state, blockIndent); + } + } - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + if (state.tag === null) { + if (state.anchor !== null) { + state.anchorMap[state.anchor] = state.result; + } - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; + } else if (state.tag === '?') { + // Implicit resolving is not allowed for non-scalar types, and '?' + // non-specific tag is only automatically assigned to plain scalars. + // + // We only need to check kind conformity in case user explicitly assigns '?' + // tag, for example like this: "! [0]" + // + if (state.result !== null && state.kind !== 'scalar') { + throwError(state, 'unacceptable node kind for ! tag; it should be "scalar", not "' + state.kind + '"'); + } - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } + for (typeIndex = 0, typeQuantity = state.implicitTypes.length; typeIndex < typeQuantity; typeIndex += 1) { + type = state.implicitTypes[typeIndex]; - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } + if (type.resolve(state.result)) { // `state.result` updated in resolver if matched + state.result = type.construct(state.result); + state.tag = type.tag; + if (state.anchor !== null) { + state.anchorMap[state.anchor] = state.result; + } + break; + } + } + } else if (state.tag !== '!') { + if (_hasOwnProperty.call(state.typeMap[state.kind || 'fallback'], state.tag)) { + type = state.typeMap[state.kind || 'fallback'][state.tag]; + } else { + // looking for multi type + type = null; + typeList = state.typeMap.multi[state.kind || 'fallback']; - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } + for (typeIndex = 0, typeQuantity = typeList.length; typeIndex < typeQuantity; typeIndex += 1) { + if (state.tag.slice(0, typeList[typeIndex].tag.length) === typeList[typeIndex].tag) { + type = typeList[typeIndex]; + break; + } + } + } - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } + if (!type) { + throwError(state, 'unknown tag !<' + state.tag + '>'); + } - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } + if (state.result !== null && type.kind !== state.kind) { + throwError(state, 'unacceptable node kind for !<' + state.tag + '> tag; it should be "' + type.kind + '", not "' + state.kind + '"'); + } - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } + if (!type.resolve(state.result, state.tag)) { // `state.result` updated in resolver if matched + throwError(state, 'cannot resolve a node with !<' + state.tag + '> explicit tag'); + } else { + state.result = type.construct(state.result, state.tag); + if (state.anchor !== null) { + state.anchorMap[state.anchor] = state.result; + } + } + } + + if (state.listener !== null) { + state.listener('close', state); + } + return state.tag !== null || state.anchor !== null || hasContent; +} + +function readDocument(state) { + var documentStart = state.position, + _position, + directiveName, + directiveArgs, + hasDirectives = false, + ch; - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } + state.version = null; + state.checkLineBreaks = state.legacy; + state.tagMap = Object.create(null); + state.anchorMap = Object.create(null); - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } + while ((ch = state.input.charCodeAt(state.position)) !== 0) { + skipSeparationSpace(state, true, -1); - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + ch = state.input.charCodeAt(state.position); -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); + if (state.lineIndent > 0 || ch !== 0x25/* % */) { + break; + } -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); + hasDirectives = true; + ch = state.input.charCodeAt(++state.position); + _position = state.position; -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + while (ch !== 0 && !is_WS_OR_EOL(ch)) { + ch = state.input.charCodeAt(++state.position); + } - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} + directiveName = state.input.slice(_position, state.position); + directiveArgs = []; -const INTERNAL = Symbol('internal'); + if (directiveName.length < 1) { + throwError(state, 'directive name must not be less than one character in length'); + } -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; -} + while (ch !== 0) { + while (is_WHITE_SPACE(ch)) { + ch = state.input.charCodeAt(++state.position); + } -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } + if (ch === 0x23/* # */) { + do { ch = state.input.charCodeAt(++state.position); } + while (ch !== 0 && !is_EOL(ch)); + break; + } - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; + if (is_EOL(ch)) break; - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } + _position = state.position; - this[INTERNAL].index = index + 1; + while (ch !== 0 && !is_WS_OR_EOL(ch)) { + ch = state.input.charCodeAt(++state.position); + } - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + directiveArgs.push(state.input.slice(_position, state.position)); + } -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); + if (ch !== 0) readLineBreak(state); -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); + if (_hasOwnProperty.call(directiveHandlers, directiveName)) { + directiveHandlers[directiveName](state, directiveName, directiveArgs); + } else { + throwWarning(state, 'unknown document directive "' + directiveName + '"'); + } + } - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } + skipSeparationSpace(state, true, -1); - return obj; -} + if (state.lineIndent === 0 && + state.input.charCodeAt(state.position) === 0x2D/* - */ && + state.input.charCodeAt(state.position + 1) === 0x2D/* - */ && + state.input.charCodeAt(state.position + 2) === 0x2D/* - */) { + state.position += 3; + skipSeparationSpace(state, true, -1); -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; -} + } else if (hasDirectives) { + throwError(state, 'directives end mark is expected'); + } -const INTERNALS$1 = Symbol('Response internals'); + composeNode(state, state.lineIndent - 1, CONTEXT_BLOCK_OUT, false, true); + skipSeparationSpace(state, true, -1); -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; + if (state.checkLineBreaks && + PATTERN_NON_ASCII_LINE_BREAKS.test(state.input.slice(documentStart, state.position))) { + throwWarning(state, 'non-ASCII line breaks are interpreted as content'); + } -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + state.documents.push(state.result); - Body.call(this, body, opts); + if (state.position === state.lineStart && testDocumentSeparator(state)) { - const status = opts.status || 200; - const headers = new Headers(opts.headers); + if (state.input.charCodeAt(state.position) === 0x2E/* . */) { + state.position += 3; + skipSeparationSpace(state, true, -1); + } + return; + } - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } + if (state.position < (state.length - 1)) { + throwError(state, 'end of the stream or a document separator is expected'); + } else { + return; + } +} - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } - get url() { - return this[INTERNALS$1].url || ''; - } +function loadDocuments(input, options) { + input = String(input); + options = options || {}; - get status() { - return this[INTERNALS$1].status; - } + if (input.length !== 0) { - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } + // Add tailing `\n` if not exists + if (input.charCodeAt(input.length - 1) !== 0x0A/* LF */ && + input.charCodeAt(input.length - 1) !== 0x0D/* CR */) { + input += '\n'; + } - get redirected() { - return this[INTERNALS$1].counter > 0; - } + // Strip BOM + if (input.charCodeAt(0) === 0xFEFF) { + input = input.slice(1); + } + } - get statusText() { - return this[INTERNALS$1].statusText; - } + var state = new State(input, options); - get headers() { - return this[INTERNALS$1].headers; - } + var nullpos = input.indexOf('\0'); - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } -} + if (nullpos !== -1) { + state.position = nullpos; + throwError(state, 'null byte is not allowed in input'); + } -Body.mixIn(Response.prototype); + // Use 0 as string terminator. That significantly simplifies bounds check. + state.input += '\0'; -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); + while (state.input.charCodeAt(state.position) === 0x20/* Space */) { + state.lineIndent += 1; + state.position += 1; + } -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); + while (state.position < (state.length - 1)) { + readDocument(state); + } -const INTERNALS$2 = Symbol('Request internals'); + return state.documents; +} -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; +function loadAll(input, iterator, options) { + if (iterator !== null && typeof iterator === 'object' && typeof options === 'undefined') { + options = iterator; + iterator = null; + } -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} + var documents = loadDocuments(input, options); -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); + if (typeof iterator !== 'function') { + return documents; + } + + for (var index = 0, length = documents.length; index < length; index += 1) { + iterator(documents[index]); + } } -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - let parsedURL; +function load(input, options) { + var documents = loadDocuments(input, options); - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parse_url(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parse_url(`${input}`); - } - input = {}; - } else { - parsedURL = parse_url(input.url); - } + if (documents.length === 0) { + /*eslint-disable no-undefined*/ + return undefined; + } else if (documents.length === 1) { + return documents[0]; + } + throw new YAMLException('expected a single document in the stream, but found more'); +} - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } +module.exports.loadAll = loadAll; +module.exports.load = load; - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); +/***/ }), - const headers = new Headers(init.headers || input.headers || {}); +/***/ 1082: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } +"use strict"; - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } +/*eslint-disable max-len*/ - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; +var YAMLException = __nccwpck_require__(8179); +var Type = __nccwpck_require__(6073); - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } - get method() { - return this[INTERNALS$2].method; - } +function compileList(schema, name) { + var result = []; - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } + schema[name].forEach(function (currentType) { + var newIndex = result.length; - get headers() { - return this[INTERNALS$2].headers; - } + result.forEach(function (previousType, previousIndex) { + if (previousType.tag === currentType.tag && + previousType.kind === currentType.kind && + previousType.multi === currentType.multi) { - get redirect() { - return this[INTERNALS$2].redirect; - } + newIndex = previousIndex; + } + }); - get signal() { - return this[INTERNALS$2].signal; - } + result[newIndex] = currentType; + }); - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } + return result; } -Body.mixIn(Request.prototype); -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); +function compileMap(/* lists... */) { + var result = { + scalar: {}, + sequence: {}, + mapping: {}, + fallback: {}, + multi: { + scalar: [], + sequence: [], + mapping: [], + fallback: [] + } + }, index, length; -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); + function collectType(type) { + if (type.multi) { + result.multi[type.kind].push(type); + result.multi['fallback'].push(type); + } else { + result[type.kind][type.tag] = result['fallback'][type.tag] = type; + } + } -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); + for (index = 0, length = arguments.length; index < length; index += 1) { + arguments[index].forEach(collectType); + } + return result; +} - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } +function Schema(definition) { + return this.extend(definition); +} - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } +Schema.prototype.extend = function extend(definition) { + var implicit = []; + var explicit = []; - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } + if (definition instanceof Type) { + // Schema.extend(type) + explicit.push(definition); - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } + } else if (Array.isArray(definition)) { + // Schema.extend([ type1, type2, ... ]) + explicit = explicit.concat(definition); - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } + } else if (definition && (Array.isArray(definition.implicit) || Array.isArray(definition.explicit))) { + // Schema.extend({ explicit: [ type1, type2, ... ], implicit: [ type1, type2, ... ] }) + if (definition.implicit) implicit = implicit.concat(definition.implicit); + if (definition.explicit) explicit = explicit.concat(definition.explicit); - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } + } else { + throw new YAMLException('Schema.extend argument should be a Type, [ Type ], ' + + 'or a schema definition ({ implicit: [...], explicit: [...] })'); + } - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } + implicit.forEach(function (type) { + if (!(type instanceof Type)) { + throw new YAMLException('Specified list of YAML types (or a single Type object) contains a non-Type object.'); + } - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js + if (type.loadKind && type.loadKind !== 'scalar') { + throw new YAMLException('There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.'); + } - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} + if (type.multi) { + throw new YAMLException('There is a multi type in the implicit list of a schema. Multi tags can only be listed as explicit.'); + } + }); -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ + explicit.forEach(function (type) { + if (!(type instanceof Type)) { + throw new YAMLException('Specified list of YAML types (or a single Type object) contains a non-Type object.'); + } + }); -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); + var result = Object.create(Schema.prototype); - this.type = 'aborted'; - this.message = message; + result.implicit = (this.implicit || []).concat(implicit); + result.explicit = (this.explicit || []).concat(explicit); - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} + result.compiledImplicit = compileList(result, 'implicit'); + result.compiledExplicit = compileList(result, 'explicit'); + result.compiledTypeMap = compileMap(result.compiledImplicit, result.compiledExplicit); -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; + return result; +}; -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; -const resolve_url = Url.resolve; -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { +module.exports = Schema; - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } - Body.Promise = fetch.Promise; +/***/ }), - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); +/***/ 2011: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; +"use strict"; +// Standard YAML's Core schema. +// http://www.yaml.org/spec/1.2/spec.html#id2804923 +// +// NOTE: JS-YAML does not support schema-specific tag resolution restrictions. +// So, Core schema has no distinctions from JSON schema is JS-YAML. - let response = null; - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; - if (signal && signal.aborted) { - abort(); - return; - } - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; - // send request - const req = send(options); - let reqTimeout; +module.exports = __nccwpck_require__(1035); - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } +/***/ }), - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } +/***/ 8759: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); - finalize(); - }); +"use strict"; +// JS-YAML's default schema for `safeLoad` function. +// It is not described in the YAML specification. +// +// This schema is based on standard YAML's Core schema and includes most of +// extra types described at YAML tag repository. (http://yaml.org/type/) - req.on('response', function (res) { - clearTimeout(reqTimeout); - const headers = createHeadersLenient(res.headers); - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); - // HTTP fetch step 5.3 - const locationURL = location === null ? null : resolve_url(request.url, location); - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } +module.exports = __nccwpck_require__(2011).extend({ + implicit: [ + __nccwpck_require__(9212), + __nccwpck_require__(6104) + ], + explicit: [ + __nccwpck_require__(7900), + __nccwpck_require__(9046), + __nccwpck_require__(6860), + __nccwpck_require__(9548) + ] +}); - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - size: request.size - }; +/***/ }), - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } +/***/ 8562: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } +"use strict"; +// Standard YAML's Failsafe schema. +// http://www.yaml.org/spec/1.2/spec.html#id2802346 - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); - // HTTP-network fetch step 12.1.1.4: handle content codings +var Schema = __nccwpck_require__(1082); - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; +module.exports = new Schema({ + explicit: [ + __nccwpck_require__(3619), + __nccwpck_require__(7283), + __nccwpck_require__(6150) + ] +}); - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - return; - } +/***/ }), - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } +/***/ 1035: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); +"use strict"; +// Standard YAML's JSON schema. +// http://www.yaml.org/spec/1.2/spec.html#id2803231 +// +// NOTE: JS-YAML does not support schema-specific tag resolution restrictions. +// So, this schema is not such strict as defined in the YAML specification. +// It allows numbers in binary notaion, use `Null` and `NULL` as `null`, etc. - writeToStream(req, request); - }); -} -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; -// expose Promise -fetch.Promise = global.Promise; -module.exports = exports = fetch; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.default = exports; -exports.Headers = Headers; -exports.Request = Request; -exports.Response = Response; -exports.FetchError = FetchError; + + +module.exports = __nccwpck_require__(8562).extend({ + implicit: [ + __nccwpck_require__(721), + __nccwpck_require__(4993), + __nccwpck_require__(1615), + __nccwpck_require__(2705) + ] +}); /***/ }), -/***/ 2072: +/***/ 6975: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = paginationMethodsPlugin +"use strict"; -function paginationMethodsPlugin (octokit) { - octokit.getFirstPage = __nccwpck_require__(9555).bind(null, octokit) - octokit.getLastPage = __nccwpck_require__(2203).bind(null, octokit) - octokit.getNextPage = __nccwpck_require__(6655).bind(null, octokit) - octokit.getPreviousPage = __nccwpck_require__(3032).bind(null, octokit) - octokit.hasFirstPage = __nccwpck_require__(9631) - octokit.hasLastPage = __nccwpck_require__(4286) - octokit.hasNextPage = __nccwpck_require__(500) - octokit.hasPreviousPage = __nccwpck_require__(5996) -} -/***/ }), +var common = __nccwpck_require__(6829); -/***/ 191: -/***/ ((module) => { -module.exports = deprecate +// get snippet for a single line, respecting maxLength +function getLine(buffer, lineStart, lineEnd, position, maxLineLength) { + var head = ''; + var tail = ''; + var maxHalfLength = Math.floor(maxLineLength / 2) - 1; -const loggedMessages = {} + if (position - lineStart > maxHalfLength) { + head = ' ... '; + lineStart = position - maxHalfLength + head.length; + } -function deprecate (message) { - if (loggedMessages[message]) { - return + if (lineEnd - position > maxHalfLength) { + tail = ' ...'; + lineEnd = position + maxHalfLength - tail.length; } - console.warn(`DEPRECATED (@octokit/rest): ${message}`) - loggedMessages[message] = 1 + return { + str: head + buffer.slice(lineStart, lineEnd).replace(/\t/g, '→') + tail, + pos: position - lineStart + head.length // relative position + }; +} + + +function padStart(string, max) { + return common.repeat(' ', max - string.length) + string; } -/***/ }), +function makeSnippet(mark, options) { + options = Object.create(options || null); -/***/ 9555: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (!mark.buffer) return null; -module.exports = getFirstPage + if (!options.maxLength) options.maxLength = 79; + if (typeof options.indent !== 'number') options.indent = 1; + if (typeof options.linesBefore !== 'number') options.linesBefore = 3; + if (typeof options.linesAfter !== 'number') options.linesAfter = 2; -const getPage = __nccwpck_require__(8604) + var re = /\r?\n|\r|\0/g; + var lineStarts = [ 0 ]; + var lineEnds = []; + var match; + var foundLineNo = -1; -function getFirstPage (octokit, link, headers) { - return getPage(octokit, link, 'first', headers) -} + while ((match = re.exec(mark.buffer))) { + lineEnds.push(match.index); + lineStarts.push(match.index + match[0].length); + if (mark.position <= match.index && foundLineNo < 0) { + foundLineNo = lineStarts.length - 2; + } + } -/***/ }), + if (foundLineNo < 0) foundLineNo = lineStarts.length - 1; -/***/ 2203: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var result = '', i, line; + var lineNoLength = Math.min(mark.line + options.linesAfter, lineEnds.length).toString().length; + var maxLineLength = options.maxLength - (options.indent + lineNoLength + 3); + + for (i = 1; i <= options.linesBefore; i++) { + if (foundLineNo - i < 0) break; + line = getLine( + mark.buffer, + lineStarts[foundLineNo - i], + lineEnds[foundLineNo - i], + mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo - i]), + maxLineLength + ); + result = common.repeat(' ', options.indent) + padStart((mark.line - i + 1).toString(), lineNoLength) + + ' | ' + line.str + '\n' + result; + } + + line = getLine(mark.buffer, lineStarts[foundLineNo], lineEnds[foundLineNo], mark.position, maxLineLength); + result += common.repeat(' ', options.indent) + padStart((mark.line + 1).toString(), lineNoLength) + + ' | ' + line.str + '\n'; + result += common.repeat('-', options.indent + lineNoLength + 3 + line.pos) + '^' + '\n'; + + for (i = 1; i <= options.linesAfter; i++) { + if (foundLineNo + i >= lineEnds.length) break; + line = getLine( + mark.buffer, + lineStarts[foundLineNo + i], + lineEnds[foundLineNo + i], + mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo + i]), + maxLineLength + ); + result += common.repeat(' ', options.indent) + padStart((mark.line + i + 1).toString(), lineNoLength) + + ' | ' + line.str + '\n'; + } -module.exports = getLastPage + return result.replace(/\n$/, ''); +} -const getPage = __nccwpck_require__(8604) -function getLastPage (octokit, link, headers) { - return getPage(octokit, link, 'last', headers) -} +module.exports = makeSnippet; /***/ }), -/***/ 6655: +/***/ 6073: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = getNextPage - -const getPage = __nccwpck_require__(8604) - -function getNextPage (octokit, link, headers) { - return getPage(octokit, link, 'next', headers) -} - +"use strict"; -/***/ }), -/***/ 7889: -/***/ ((module) => { +var YAMLException = __nccwpck_require__(8179); -module.exports = getPageLinks +var TYPE_CONSTRUCTOR_OPTIONS = [ + 'kind', + 'multi', + 'resolve', + 'construct', + 'instanceOf', + 'predicate', + 'represent', + 'representName', + 'defaultStyle', + 'styleAliases' +]; -function getPageLinks (link) { - link = link.link || link.headers.link || '' +var YAML_NODE_KINDS = [ + 'scalar', + 'sequence', + 'mapping' +]; - const links = {} +function compileStyleAliases(map) { + var result = {}; - // link format: - // '; rel="next", ; rel="last"' - link.replace(/<([^>]*)>;\s*rel="([\w]*)"/g, (m, uri, type) => { - links[type] = uri - }) + if (map !== null) { + Object.keys(map).forEach(function (style) { + map[style].forEach(function (alias) { + result[String(alias)] = style; + }); + }); + } - return links + return result; } +function Type(tag, options) { + options = options || {}; -/***/ }), - -/***/ 8604: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = getPage - -const deprecate = __nccwpck_require__(191) -const getPageLinks = __nccwpck_require__(7889) -const HttpError = __nccwpck_require__(6058) - -function getPage (octokit, link, which, headers) { - deprecate(`octokit.get${which.charAt(0).toUpperCase() + which.slice(1)}Page() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) - const url = getPageLinks(link)[which] + Object.keys(options).forEach(function (name) { + if (TYPE_CONSTRUCTOR_OPTIONS.indexOf(name) === -1) { + throw new YAMLException('Unknown option "' + name + '" is met in definition of "' + tag + '" YAML type.'); + } + }); - if (!url) { - const urlError = new HttpError(`No ${which} page found`, 404) - return Promise.reject(urlError) - } + // TODO: Add tag format check. + this.options = options; // keep original options in case user wants to extend this type later + this.tag = tag; + this.kind = options['kind'] || null; + this.resolve = options['resolve'] || function () { return true; }; + this.construct = options['construct'] || function (data) { return data; }; + this.instanceOf = options['instanceOf'] || null; + this.predicate = options['predicate'] || null; + this.represent = options['represent'] || null; + this.representName = options['representName'] || null; + this.defaultStyle = options['defaultStyle'] || null; + this.multi = options['multi'] || false; + this.styleAliases = compileStyleAliases(options['styleAliases'] || null); - const requestOptions = { - url, - headers: applyAcceptHeader(link, headers) + if (YAML_NODE_KINDS.indexOf(this.kind) === -1) { + throw new YAMLException('Unknown kind "' + this.kind + '" is specified for "' + tag + '" YAML type.'); } - - const promise = octokit.request(requestOptions) - - return promise } -function applyAcceptHeader (res, headers) { - const previous = res.headers && res.headers['x-github-media-type'] - - if (!previous || (headers && headers.accept)) { - return headers - } - headers = headers || {} - headers.accept = 'application/vnd.' + previous - .replace('; param=', '.') - .replace('; format=', '+') - - return headers -} +module.exports = Type; /***/ }), -/***/ 3032: +/***/ 7900: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = getPreviousPage +"use strict"; -const getPage = __nccwpck_require__(8604) -function getPreviousPage (octokit, link, headers) { - return getPage(octokit, link, 'prev', headers) -} +/*eslint-disable no-bitwise*/ -/***/ }), +var Type = __nccwpck_require__(6073); -/***/ 9631: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = hasFirstPage +// [ 64, 65, 66 ] -> [ padding, CR, LF ] +var BASE64_MAP = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\n\r'; -const deprecate = __nccwpck_require__(191) -const getPageLinks = __nccwpck_require__(7889) -function hasFirstPage (link) { - deprecate(`octokit.hasFirstPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) - return getPageLinks(link).first -} +function resolveYamlBinary(data) { + if (data === null) return false; + var code, idx, bitlen = 0, max = data.length, map = BASE64_MAP; -/***/ }), + // Convert one by one. + for (idx = 0; idx < max; idx++) { + code = map.indexOf(data.charAt(idx)); -/***/ 4286: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Skip CR/LF + if (code > 64) continue; -module.exports = hasLastPage + // Fail on illegal characters + if (code < 0) return false; -const deprecate = __nccwpck_require__(191) -const getPageLinks = __nccwpck_require__(7889) + bitlen += 6; + } -function hasLastPage (link) { - deprecate(`octokit.hasLastPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) - return getPageLinks(link).last + // If there are any bits left, source was corrupted + return (bitlen % 8) === 0; } +function constructYamlBinary(data) { + var idx, tailbits, + input = data.replace(/[\r\n=]/g, ''), // remove CR/LF & padding to simplify scan + max = input.length, + map = BASE64_MAP, + bits = 0, + result = []; -/***/ }), - -/***/ 500: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Collect by 6*4 bits (3 bytes) -module.exports = hasNextPage + for (idx = 0; idx < max; idx++) { + if ((idx % 4 === 0) && idx) { + result.push((bits >> 16) & 0xFF); + result.push((bits >> 8) & 0xFF); + result.push(bits & 0xFF); + } -const deprecate = __nccwpck_require__(191) -const getPageLinks = __nccwpck_require__(7889) + bits = (bits << 6) | map.indexOf(input.charAt(idx)); + } -function hasNextPage (link) { - deprecate(`octokit.hasNextPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) - return getPageLinks(link).next -} + // Dump tail + tailbits = (max % 4) * 6; -/***/ }), + if (tailbits === 0) { + result.push((bits >> 16) & 0xFF); + result.push((bits >> 8) & 0xFF); + result.push(bits & 0xFF); + } else if (tailbits === 18) { + result.push((bits >> 10) & 0xFF); + result.push((bits >> 2) & 0xFF); + } else if (tailbits === 12) { + result.push((bits >> 4) & 0xFF); + } -/***/ 5996: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return new Uint8Array(result); +} -module.exports = hasPreviousPage +function representYamlBinary(object /*, style*/) { + var result = '', bits = 0, idx, tail, + max = object.length, + map = BASE64_MAP; -const deprecate = __nccwpck_require__(191) -const getPageLinks = __nccwpck_require__(7889) + // Convert every three bytes to 4 ASCII characters. -function hasPreviousPage (link) { - deprecate(`octokit.hasPreviousPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) - return getPageLinks(link).prev -} + for (idx = 0; idx < max; idx++) { + if ((idx % 3 === 0) && idx) { + result += map[(bits >> 18) & 0x3F]; + result += map[(bits >> 12) & 0x3F]; + result += map[(bits >> 6) & 0x3F]; + result += map[bits & 0x3F]; + } + bits = (bits << 8) + object[idx]; + } -/***/ }), + // Dump tail -/***/ 6058: -/***/ ((module) => { + tail = max % 3; -module.exports = class HttpError extends Error { - constructor (message, code, headers) { - super(message) + if (tail === 0) { + result += map[(bits >> 18) & 0x3F]; + result += map[(bits >> 12) & 0x3F]; + result += map[(bits >> 6) & 0x3F]; + result += map[bits & 0x3F]; + } else if (tail === 2) { + result += map[(bits >> 10) & 0x3F]; + result += map[(bits >> 4) & 0x3F]; + result += map[(bits << 2) & 0x3F]; + result += map[64]; + } else if (tail === 1) { + result += map[(bits >> 2) & 0x3F]; + result += map[(bits << 4) & 0x3F]; + result += map[64]; + result += map[64]; + } - // Maintains proper stack trace (only available on V8) - /* istanbul ignore next */ - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor) - } + return result; +} - this.name = 'HttpError' - this.code = code - this.headers = headers - } +function isBinary(obj) { + return Object.prototype.toString.call(obj) === '[object Uint8Array]'; } +module.exports = new Type('tag:yaml.org,2002:binary', { + kind: 'scalar', + resolve: resolveYamlBinary, + construct: constructYamlBinary, + predicate: isBinary, + represent: representYamlBinary +}); + /***/ }), -/***/ 1223: +/***/ 4993: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var wrappy = __nccwpck_require__(2940) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) +"use strict"; -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) +var Type = __nccwpck_require__(6073); -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) - } - f.called = false - return f +function resolveYamlBoolean(data) { + if (data === null) return false; + + var max = data.length; + + return (max === 4 && (data === 'true' || data === 'True' || data === 'TRUE')) || + (max === 5 && (data === 'false' || data === 'False' || data === 'FALSE')); } -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) - } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f +function constructYamlBoolean(data) { + return data === 'true' || + data === 'True' || + data === 'TRUE'; +} + +function isBoolean(object) { + return Object.prototype.toString.call(object) === '[object Boolean]'; } +module.exports = new Type('tag:yaml.org,2002:bool', { + kind: 'scalar', + resolve: resolveYamlBoolean, + construct: constructYamlBoolean, + predicate: isBoolean, + represent: { + lowercase: function (object) { return object ? 'true' : 'false'; }, + uppercase: function (object) { return object ? 'TRUE' : 'FALSE'; }, + camelcase: function (object) { return object ? 'True' : 'False'; } + }, + defaultStyle: 'lowercase' +}); + /***/ }), -/***/ 4824: +/***/ 2705: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const os = __nccwpck_require__(2087); -const macosRelease = __nccwpck_require__(7493); -const winRelease = __nccwpck_require__(3515); - -const osName = (platform, release) => { - if (!platform && release) { - throw new Error('You can\'t specify a `release` without specifying `platform`'); - } - - platform = platform || os.platform(); - - let id; - - if (platform === 'darwin') { - if (!release && os.platform() === 'darwin') { - release = os.release(); - } - - const prefix = release ? (Number(release.split('.')[0]) > 15 ? 'macOS' : 'OS X') : 'macOS'; - id = release ? macosRelease(release).name : ''; - return prefix + (id ? ' ' + id : ''); - } - - if (platform === 'linux') { - if (!release && os.platform() === 'linux') { - release = os.release(); - } - id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : ''; - return 'Linux' + (id ? ' ' + id : ''); - } +var common = __nccwpck_require__(6829); +var Type = __nccwpck_require__(6073); - if (platform === 'win32') { - if (!release && os.platform() === 'win32') { - release = os.release(); - } +var YAML_FLOAT_PATTERN = new RegExp( + // 2.5e4, 2.5 and integers + '^(?:[-+]?(?:[0-9][0-9_]*)(?:\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?' + + // .2e4, .2 + // special case, seems not from spec + '|\\.[0-9_]+(?:[eE][-+]?[0-9]+)?' + + // .inf + '|[-+]?\\.(?:inf|Inf|INF)' + + // .nan + '|\\.(?:nan|NaN|NAN))$'); - id = release ? winRelease(release) : ''; - return 'Windows' + (id ? ' ' + id : ''); - } +function resolveYamlFloat(data) { + if (data === null) return false; - return platform; -}; + if (!YAML_FLOAT_PATTERN.test(data) || + // Quick hack to not allow integers end with `_` + // Probably should update regexp & check speed + data[data.length - 1] === '_') { + return false; + } -module.exports = osName; + return true; +} +function constructYamlFloat(data) { + var value, sign; -/***/ }), + value = data.replace(/_/g, '').toLowerCase(); + sign = value[0] === '-' ? -1 : 1; -/***/ 1330: -/***/ ((module) => { + if ('+-'.indexOf(value[0]) >= 0) { + value = value.slice(1); + } -"use strict"; + if (value === '.inf') { + return (sign === 1) ? Number.POSITIVE_INFINITY : Number.NEGATIVE_INFINITY; -module.exports = (promise, onFinally) => { - onFinally = onFinally || (() => {}); - - return promise.then( - val => new Promise(resolve => { - resolve(onFinally()); - }).then(() => val), - err => new Promise(resolve => { - resolve(onFinally()); - }).then(() => { - throw err; - }) - ); -}; + } else if (value === '.nan') { + return NaN; + } + return sign * parseFloat(value, 10); +} -/***/ }), +var SCIENTIFIC_WITHOUT_DOT = /^[-+]?[0-9]+e/; -/***/ 8341: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function representYamlFloat(object, style) { + var res; -var once = __nccwpck_require__(1223) -var eos = __nccwpck_require__(1205) -var fs = __nccwpck_require__(5747) // we only need fs to get the ReadStream and WriteStream prototypes + if (isNaN(object)) { + switch (style) { + case 'lowercase': return '.nan'; + case 'uppercase': return '.NAN'; + case 'camelcase': return '.NaN'; + } + } else if (Number.POSITIVE_INFINITY === object) { + switch (style) { + case 'lowercase': return '.inf'; + case 'uppercase': return '.INF'; + case 'camelcase': return '.Inf'; + } + } else if (Number.NEGATIVE_INFINITY === object) { + switch (style) { + case 'lowercase': return '-.inf'; + case 'uppercase': return '-.INF'; + case 'camelcase': return '-.Inf'; + } + } else if (common.isNegativeZero(object)) { + return '-0.0'; + } -var noop = function () {} -var ancient = /^v?\.0/.test(process.version) + res = object.toString(10); -var isFn = function (fn) { - return typeof fn === 'function' -} + // JS stringifier can build scientific format without dots: 5e-100, + // while YAML requres dot: 5.e-100. Fix it with simple hack -var isFS = function (stream) { - if (!ancient) return false // newer node version do not need to care about fs is a special way - if (!fs) return false // browser - return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) + return SCIENTIFIC_WITHOUT_DOT.test(res) ? res.replace('e', '.e') : res; } -var isRequest = function (stream) { - return stream.setHeader && isFn(stream.abort) +function isFloat(object) { + return (Object.prototype.toString.call(object) === '[object Number]') && + (object % 1 !== 0 || common.isNegativeZero(object)); } -var destroyer = function (stream, reading, writing, callback) { - callback = once(callback) +module.exports = new Type('tag:yaml.org,2002:float', { + kind: 'scalar', + resolve: resolveYamlFloat, + construct: constructYamlFloat, + predicate: isFloat, + represent: representYamlFloat, + defaultStyle: 'lowercase' +}); - var closed = false - stream.on('close', function () { - closed = true - }) - eos(stream, {readable: reading, writable: writing}, function (err) { - if (err) return callback(err) - closed = true - callback() - }) +/***/ }), - var destroyed = false - return function (err) { - if (closed) return - if (destroyed) return - destroyed = true +/***/ 1615: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks - if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want +"use strict"; - if (isFn(stream.destroy)) return stream.destroy() - callback(err || new Error('stream was destroyed')) - } -} +var common = __nccwpck_require__(6829); +var Type = __nccwpck_require__(6073); -var call = function (fn) { - fn() +function isHexCode(c) { + return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) || + ((0x41/* A */ <= c) && (c <= 0x46/* F */)) || + ((0x61/* a */ <= c) && (c <= 0x66/* f */)); } -var pipe = function (from, to) { - return from.pipe(to) +function isOctCode(c) { + return ((0x30/* 0 */ <= c) && (c <= 0x37/* 7 */)); } -var pump = function () { - var streams = Array.prototype.slice.call(arguments) - var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop - - if (Array.isArray(streams[0])) streams = streams[0] - if (streams.length < 2) throw new Error('pump requires two streams per minimum') - - var error - var destroys = streams.map(function (stream, i) { - var reading = i < streams.length - 1 - var writing = i > 0 - return destroyer(stream, reading, writing, function (err) { - if (!error) error = err - if (err) destroys.forEach(call) - if (reading) return - destroys.forEach(call) - callback(error) - }) - }) - - return streams.reduce(pipe) +function isDecCode(c) { + return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)); } -module.exports = pump +function resolveYamlInteger(data) { + if (data === null) return false; + var max = data.length, + index = 0, + hasDigits = false, + ch; -/***/ }), + if (!max) return false; -/***/ 4931: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + ch = data[index]; -// Note: since nyc uses this module to output coverage, any lines -// that are in the direct sync flow of nyc's outputCoverage are -// ignored, since we can never get coverage for them. -var assert = __nccwpck_require__(2357) -var signals = __nccwpck_require__(3710) -var isWin = /^win/i.test(process.platform) + // sign + if (ch === '-' || ch === '+') { + ch = data[++index]; + } -var EE = __nccwpck_require__(8614) -/* istanbul ignore if */ -if (typeof EE !== 'function') { - EE = EE.EventEmitter -} + if (ch === '0') { + // 0 + if (index + 1 === max) return true; + ch = data[++index]; -var emitter -if (process.__signal_exit_emitter__) { - emitter = process.__signal_exit_emitter__ -} else { - emitter = process.__signal_exit_emitter__ = new EE() - emitter.count = 0 - emitter.emitted = {} -} + // base 2, base 8, base 16 -// Because this emitter is a global, we have to check to see if a -// previous version of this library failed to enable infinite listeners. -// I know what you're about to say. But literally everything about -// signal-exit is a compromise with evil. Get used to it. -if (!emitter.infinite) { - emitter.setMaxListeners(Infinity) - emitter.infinite = true -} + if (ch === 'b') { + // base 2 + index++; -module.exports = function (cb, opts) { - assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') + for (; index < max; index++) { + ch = data[index]; + if (ch === '_') continue; + if (ch !== '0' && ch !== '1') return false; + hasDigits = true; + } + return hasDigits && ch !== '_'; + } - if (loaded === false) { - load() - } - var ev = 'exit' - if (opts && opts.alwaysLast) { - ev = 'afterexit' - } + if (ch === 'x') { + // base 16 + index++; - var remove = function () { - emitter.removeListener(ev, cb) - if (emitter.listeners('exit').length === 0 && - emitter.listeners('afterexit').length === 0) { - unload() + for (; index < max; index++) { + ch = data[index]; + if (ch === '_') continue; + if (!isHexCode(data.charCodeAt(index))) return false; + hasDigits = true; + } + return hasDigits && ch !== '_'; } - } - emitter.on(ev, cb) - return remove -} -module.exports.unload = unload -function unload () { - if (!loaded) { - return - } - loaded = false + if (ch === 'o') { + // base 8 + index++; - signals.forEach(function (sig) { - try { - process.removeListener(sig, sigListeners[sig]) - } catch (er) {} - }) - process.emit = originalProcessEmit - process.reallyExit = originalProcessReallyExit - emitter.count -= 1 -} - -function emit (event, code, signal) { - if (emitter.emitted[event]) { - return - } - emitter.emitted[event] = true - emitter.emit(event, code, signal) -} - -// { : , ... } -var sigListeners = {} -signals.forEach(function (sig) { - sigListeners[sig] = function listener () { - // If there are no other listeners, an exit is coming! - // Simplest way: remove us and then re-send the signal. - // We know that this will kill the process, so we can - // safely emit now. - var listeners = process.listeners(sig) - if (listeners.length === emitter.count) { - unload() - emit('exit', null, sig) - /* istanbul ignore next */ - emit('afterexit', null, sig) - /* istanbul ignore next */ - if (isWin && sig === 'SIGHUP') { - // "SIGHUP" throws an `ENOSYS` error on Windows, - // so use a supported signal instead - sig = 'SIGINT' + for (; index < max; index++) { + ch = data[index]; + if (ch === '_') continue; + if (!isOctCode(data.charCodeAt(index))) return false; + hasDigits = true; } - process.kill(process.pid, sig) + return hasDigits && ch !== '_'; } } -}) -module.exports.signals = function () { - return signals -} - -module.exports.load = load + // base 10 (except 0) -var loaded = false + // value should not start with `_`; + if (ch === '_') return false; -function load () { - if (loaded) { - return + for (; index < max; index++) { + ch = data[index]; + if (ch === '_') continue; + if (!isDecCode(data.charCodeAt(index))) { + return false; + } + hasDigits = true; } - loaded = true - // This is the number of onSignalExit's that are in play. - // It's important so that we can count the correct number of - // listeners on signals, and don't wait for the other one to - // handle it instead of us. - emitter.count += 1 - - signals = signals.filter(function (sig) { - try { - process.on(sig, sigListeners[sig]) - return true - } catch (er) { - return false - } - }) + // Should have digits and should not end with `_` + if (!hasDigits || ch === '_') return false; - process.emit = processEmit - process.reallyExit = processReallyExit + return true; } -var originalProcessReallyExit = process.reallyExit -function processReallyExit (code) { - process.exitCode = code || 0 - emit('exit', process.exitCode, null) - /* istanbul ignore next */ - emit('afterexit', process.exitCode, null) - /* istanbul ignore next */ - originalProcessReallyExit.call(process, process.exitCode) -} +function constructYamlInteger(data) { + var value = data, sign = 1, ch; -var originalProcessEmit = process.emit -function processEmit (ev, arg) { - if (ev === 'exit') { - if (arg !== undefined) { - process.exitCode = arg - } - var ret = originalProcessEmit.apply(this, arguments) - emit('exit', process.exitCode, null) - /* istanbul ignore next */ - emit('afterexit', process.exitCode, null) - return ret - } else { - return originalProcessEmit.apply(this, arguments) + if (value.indexOf('_') !== -1) { + value = value.replace(/_/g, ''); } -} + ch = value[0]; -/***/ }), + if (ch === '-' || ch === '+') { + if (ch === '-') sign = -1; + value = value.slice(1); + ch = value[0]; + } -/***/ 3710: -/***/ ((module) => { + if (value === '0') return 0; -// This is not the set of all possible signals. -// -// It IS, however, the set of all signals that trigger -// an exit on either Linux or BSD systems. Linux is a -// superset of the signal names supported on BSD, and -// the unknown signals just fail to register, so we can -// catch that easily enough. -// -// Don't bother with SIGKILL. It's uncatchable, which -// means that we can't fire any callbacks anyway. -// -// If a user does happen to register a handler on a non- -// fatal signal like SIGWINCH or something, and then -// exit, it'll end up firing `process.emit('exit')`, so -// the handler will be fired anyway. -// -// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised -// artificially, inherently leave the process in a -// state from which it is not safe to try and enter JS -// listeners. -module.exports = [ - 'SIGABRT', - 'SIGALRM', - 'SIGHUP', - 'SIGINT', - 'SIGTERM' -] - -if (process.platform !== 'win32') { - module.exports.push( - 'SIGVTALRM', - 'SIGXCPU', - 'SIGXFSZ', - 'SIGUSR2', - 'SIGTRAP', - 'SIGSYS', - 'SIGQUIT', - 'SIGIOT' - // should detect profiler and enable/disable accordingly. - // see #21 - // 'SIGPROF' - ) + if (ch === '0') { + if (value[1] === 'b') return sign * parseInt(value.slice(2), 2); + if (value[1] === 'x') return sign * parseInt(value.slice(2), 16); + if (value[1] === 'o') return sign * parseInt(value.slice(2), 8); + } + + return sign * parseInt(value, 10); } -if (process.platform === 'linux') { - module.exports.push( - 'SIGIO', - 'SIGPOLL', - 'SIGPWR', - 'SIGSTKFLT', - 'SIGUNUSED' - ) +function isInteger(object) { + return (Object.prototype.toString.call(object)) === '[object Number]' && + (object % 1 === 0 && !common.isNegativeZero(object)); } +module.exports = new Type('tag:yaml.org,2002:int', { + kind: 'scalar', + resolve: resolveYamlInteger, + construct: constructYamlInteger, + predicate: isInteger, + represent: { + binary: function (obj) { return obj >= 0 ? '0b' + obj.toString(2) : '-0b' + obj.toString(2).slice(1); }, + octal: function (obj) { return obj >= 0 ? '0o' + obj.toString(8) : '-0o' + obj.toString(8).slice(1); }, + decimal: function (obj) { return obj.toString(10); }, + /* eslint-disable max-len */ + hexadecimal: function (obj) { return obj >= 0 ? '0x' + obj.toString(16).toUpperCase() : '-0x' + obj.toString(16).toUpperCase().slice(1); } + }, + defaultStyle: 'decimal', + styleAliases: { + binary: [ 2, 'bin' ], + octal: [ 8, 'oct' ], + decimal: [ 10, 'dec' ], + hexadecimal: [ 16, 'hex' ] + } +}); + /***/ }), -/***/ 5515: -/***/ ((module) => { +/***/ 6150: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -module.exports = function (x) { - var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt(); - var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt(); - - if (x[x.length - 1] === lf) { - x = x.slice(0, x.length - 1); - } - if (x[x.length - 1] === cr) { - x = x.slice(0, x.length - 1); - } +var Type = __nccwpck_require__(6073); - return x; -}; +module.exports = new Type('tag:yaml.org,2002:map', { + kind: 'mapping', + construct: function (data) { return data !== null ? data : {}; } +}); /***/ }), -/***/ 4294: +/***/ 6104: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = __nccwpck_require__(4219); - - -/***/ }), - -/***/ 4219: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - "use strict"; -var net = __nccwpck_require__(1631); -var tls = __nccwpck_require__(4016); -var http = __nccwpck_require__(8605); -var https = __nccwpck_require__(7211); -var events = __nccwpck_require__(8614); -var assert = __nccwpck_require__(2357); -var util = __nccwpck_require__(1669); - - -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; - - -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; -} - -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} - -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; -} +var Type = __nccwpck_require__(6073); -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; +function resolveYamlMerge(data) { + return data === '<<' || data === null; } +module.exports = new Type('tag:yaml.org,2002:merge', { + kind: 'scalar', + resolve: resolveYamlMerge +}); -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; - - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; - } - } - socket.destroy(); - self.removeSocket(socket); - }); -} -util.inherits(TunnelingAgent, events.EventEmitter); - -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; - } +/***/ }), - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); +/***/ 721: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - function onFree() { - self.emit('free', socket, options); - } +"use strict"; - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); - } - }); -}; -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); +var Type = __nccwpck_require__(6073); - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port - } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; - } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); - } +function resolveYamlNull(data) { + if (data === null) return true; - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); + var max = data.length; - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; - } + return (max === 1 && data === '~') || + (max === 4 && (data === 'null' || data === 'Null' || data === 'NULL')); +} - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); - } +function constructYamlNull() { + return null; +} - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); +function isNull(object) { + return object === null; +} - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); - } +module.exports = new Type('tag:yaml.org,2002:null', { + kind: 'scalar', + resolve: resolveYamlNull, + construct: constructYamlNull, + predicate: isNull, + represent: { + canonical: function () { return '~'; }, + lowercase: function () { return 'null'; }, + uppercase: function () { return 'NULL'; }, + camelcase: function () { return 'Null'; }, + empty: function () { return ''; } + }, + defaultStyle: 'lowercase' +}); - function onError(cause) { - connectReq.removeAllListeners(); - debug('tunneling socket could not be established, cause=%s\n', - cause.message, cause.stack); - var error = new Error('tunneling socket could not be established, ' + - 'cause=' + cause.message); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - } -}; +/***/ }), -TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { - var pos = this.sockets.indexOf(socket) - if (pos === -1) { - return; - } - this.sockets.splice(pos, 1); +/***/ 9046: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - var pending = this.requests.shift(); - if (pending) { - // If we have pending requests and a socket gets closed a new one - // needs to be created to take over in the pool for the one that closed. - this.createSocket(pending, function(socket) { - pending.request.onSocket(socket); - }); - } -}; +"use strict"; -function createSecureSocket(options, cb) { - var self = this; - TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { - var hostHeader = options.request.getHeader('host'); - var tlsOptions = mergeOptions({}, self.options, { - socket: socket, - servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host - }); - // 0 is dummy port for v0.6 - var secureSocket = tls.connect(0, tlsOptions); - self.sockets[self.sockets.indexOf(socket)] = secureSocket; - cb(secureSocket); - }); -} +var Type = __nccwpck_require__(6073); +var _hasOwnProperty = Object.prototype.hasOwnProperty; +var _toString = Object.prototype.toString; -function toOptions(host, port, localAddress) { - if (typeof host === 'string') { // since v0.10 - return { - host: host, - port: port, - localAddress: localAddress - }; - } - return host; // for v0.11 or later -} +function resolveYamlOmap(data) { + if (data === null) return true; -function mergeOptions(target) { - for (var i = 1, len = arguments.length; i < len; ++i) { - var overrides = arguments[i]; - if (typeof overrides === 'object') { - var keys = Object.keys(overrides); - for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { - var k = keys[j]; - if (overrides[k] !== undefined) { - target[k] = overrides[k]; - } + var objectKeys = [], index, length, pair, pairKey, pairHasKey, + object = data; + + for (index = 0, length = object.length; index < length; index += 1) { + pair = object[index]; + pairHasKey = false; + + if (_toString.call(pair) !== '[object Object]') return false; + + for (pairKey in pair) { + if (_hasOwnProperty.call(pair, pairKey)) { + if (!pairHasKey) pairHasKey = true; + else return false; } } - } - return target; -} + if (!pairHasKey) return false; -var debug; -if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug = function() { - var args = Array.prototype.slice.call(arguments); - if (typeof args[0] === 'string') { - args[0] = 'TUNNEL: ' + args[0]; - } else { - args.unshift('TUNNEL:'); - } - console.error.apply(console, args); + if (objectKeys.indexOf(pairKey) === -1) objectKeys.push(pairKey); + else return false; } -} else { - debug = function() {}; + + return true; } -exports.debug = debug; // for test + +function constructYamlOmap(data) { + return data !== null ? data : []; +} + +module.exports = new Type('tag:yaml.org,2002:omap', { + kind: 'sequence', + resolve: resolveYamlOmap, + construct: constructYamlOmap +}); /***/ }), -/***/ 5030: -/***/ ((__unused_webpack_module, exports) => { +/***/ 6860: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); +var Type = __nccwpck_require__(6073); -function getUserAgent() { - if (typeof navigator === "object" && "userAgent" in navigator) { - return navigator.userAgent; +var _toString = Object.prototype.toString; + +function resolveYamlPairs(data) { + if (data === null) return true; + + var index, length, pair, keys, result, + object = data; + + result = new Array(object.length); + + for (index = 0, length = object.length; index < length; index += 1) { + pair = object[index]; + + if (_toString.call(pair) !== '[object Object]') return false; + + keys = Object.keys(pair); + + if (keys.length !== 1) return false; + + result[index] = [ keys[0], pair[keys[0]] ]; } - if (typeof process === "object" && "version" in process) { - return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; + return true; +} + +function constructYamlPairs(data) { + if (data === null) return []; + + var index, length, pair, keys, result, + object = data; + + result = new Array(object.length); + + for (index = 0, length = object.length; index < length; index += 1) { + pair = object[index]; + + keys = Object.keys(pair); + + result[index] = [ keys[0], pair[keys[0]] ]; } - return ""; + return result; } -exports.getUserAgent = getUserAgent; -//# sourceMappingURL=index.js.map +module.exports = new Type('tag:yaml.org,2002:pairs', { + kind: 'sequence', + resolve: resolveYamlPairs, + construct: constructYamlPairs +}); /***/ }), -/***/ 1463: -/***/ ((__unused_webpack_module, exports) => { +/***/ 7283: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -exports.fromCallback = function (fn) { - return Object.defineProperty(function (...args) { - if (typeof args[args.length - 1] === 'function') fn.apply(this, args) - else { - return new Promise((resolve, reject) => { - fn.call( - this, - ...args, - (err, res) => (err != null) ? reject(err) : resolve(res) - ) - }) - } - }, 'name', { value: fn.name }) -} +var Type = __nccwpck_require__(6073); -exports.fromPromise = function (fn) { - return Object.defineProperty(function (...args) { - const cb = args[args.length - 1] - if (typeof cb !== 'function') return fn.apply(this, args) - else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb) - }, 'name', { value: fn.name }) -} +module.exports = new Type('tag:yaml.org,2002:seq', { + kind: 'sequence', + construct: function (data) { return data !== null ? data : []; } +}); /***/ }), -/***/ 3515: +/***/ 9548: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const os = __nccwpck_require__(2087); -const execa = __nccwpck_require__(4780); - -// Reference: https://www.gaijin.at/en/lstwinver.php -const names = new Map([ - ['10.0', '10'], - ['6.3', '8.1'], - ['6.2', '8'], - ['6.1', '7'], - ['6.0', 'Vista'], - ['5.2', 'Server 2003'], - ['5.1', 'XP'], - ['5.0', '2000'], - ['4.9', 'ME'], - ['4.1', '98'], - ['4.0', '95'] -]); - -const windowsRelease = release => { - const version = /\d+\.\d/.exec(release || os.release()); - - if (release && !version) { - throw new Error('`release` argument doesn\'t match `n.n`'); - } - const ver = (version || [])[0]; +var Type = __nccwpck_require__(6073); - // Server 2008, 2012, 2016, and 2019 versions are ambiguous with desktop versions and must be detected at runtime. - // If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version - // then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx - // If `wmic` is obsoloete (later versions of Windows 10), use PowerShell instead. - // If the resulting caption contains the year 2008, 2012, 2016 or 2019, it is a server version, so return a server OS name. - if ((!release || release === os.release()) && ['6.1', '6.2', '6.3', '10.0'].includes(ver)) { - let stdout; - try { - stdout = execa.sync('wmic', ['os', 'get', 'Caption']).stdout || ''; - } catch (_) { - stdout = execa.sync('powershell', ['(Get-CimInstance -ClassName Win32_OperatingSystem).caption']).stdout || ''; - } +var _hasOwnProperty = Object.prototype.hasOwnProperty; - const year = (stdout.match(/2008|2012|2016|2019/) || [])[0]; +function resolveYamlSet(data) { + if (data === null) return true; - if (year) { - return `Server ${year}`; - } - } + var key, object = data; - return names.get(ver); -}; + for (key in object) { + if (_hasOwnProperty.call(object, key)) { + if (object[key] !== null) return false; + } + } + + return true; +} + +function constructYamlSet(data) { + return data !== null ? data : {}; +} + +module.exports = new Type('tag:yaml.org,2002:set', { + kind: 'mapping', + resolve: resolveYamlSet, + construct: constructYamlSet +}); + + +/***/ }), + +/***/ 3619: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; -module.exports = windowsRelease; + +var Type = __nccwpck_require__(6073); + +module.exports = new Type('tag:yaml.org,2002:str', { + kind: 'scalar', + construct: function (data) { return data !== null ? data : ''; } +}); /***/ }), -/***/ 6868: +/***/ 9212: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const cp = __nccwpck_require__(3129); -const parse = __nccwpck_require__(6876); -const enoent = __nccwpck_require__(8625); +var Type = __nccwpck_require__(6073); + +var YAML_DATE_REGEXP = new RegExp( + '^([0-9][0-9][0-9][0-9])' + // [1] year + '-([0-9][0-9])' + // [2] month + '-([0-9][0-9])$'); // [3] day + +var YAML_TIMESTAMP_REGEXP = new RegExp( + '^([0-9][0-9][0-9][0-9])' + // [1] year + '-([0-9][0-9]?)' + // [2] month + '-([0-9][0-9]?)' + // [3] day + '(?:[Tt]|[ \\t]+)' + // ... + '([0-9][0-9]?)' + // [4] hour + ':([0-9][0-9])' + // [5] minute + ':([0-9][0-9])' + // [6] second + '(?:\\.([0-9]*))?' + // [7] fraction + '(?:[ \\t]*(Z|([-+])([0-9][0-9]?)' + // [8] tz [9] tz_sign [10] tz_hour + '(?::([0-9][0-9]))?))?$'); // [11] tz_minute + +function resolveYamlTimestamp(data) { + if (data === null) return false; + if (YAML_DATE_REGEXP.exec(data) !== null) return true; + if (YAML_TIMESTAMP_REGEXP.exec(data) !== null) return true; + return false; +} + +function constructYamlTimestamp(data) { + var match, year, month, day, hour, minute, second, fraction = 0, + delta = null, tz_hour, tz_minute, date; + + match = YAML_DATE_REGEXP.exec(data); + if (match === null) match = YAML_TIMESTAMP_REGEXP.exec(data); + + if (match === null) throw new Error('Date resolve error'); + + // match: [1] year [2] month [3] day + + year = +(match[1]); + month = +(match[2]) - 1; // JS month starts with 0 + day = +(match[3]); + + if (!match[4]) { // no hour + return new Date(Date.UTC(year, month, day)); + } -function spawn(command, args, options) { - // Parse the arguments - const parsed = parse(command, args, options); + // match: [4] hour [5] minute [6] second [7] fraction - // Spawn the child process - const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); + hour = +(match[4]); + minute = +(match[5]); + second = +(match[6]); - // Hook into child process "exit" event to emit an error if the command - // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 - enoent.hookChildProcess(spawned, parsed); + if (match[7]) { + fraction = match[7].slice(0, 3); + while (fraction.length < 3) { // milli-seconds + fraction += '0'; + } + fraction = +fraction; + } - return spawned; -} + // match: [8] tz [9] tz_sign [10] tz_hour [11] tz_minute -function spawnSync(command, args, options) { - // Parse the arguments - const parsed = parse(command, args, options); + if (match[9]) { + tz_hour = +(match[10]); + tz_minute = +(match[11] || 0); + delta = (tz_hour * 60 + tz_minute) * 60000; // delta in mili-seconds + if (match[9] === '-') delta = -delta; + } - // Spawn the child process - const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); + date = new Date(Date.UTC(year, month, day, hour, minute, second, fraction)); - // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 - result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); + if (delta) date.setTime(date.getTime() - delta); - return result; + return date; } -module.exports = spawn; -module.exports.spawn = spawn; -module.exports.sync = spawnSync; +function representYamlTimestamp(object /*, style*/) { + return object.toISOString(); +} -module.exports._parse = parse; -module.exports._enoent = enoent; +module.exports = new Type('tag:yaml.org,2002:timestamp', { + kind: 'scalar', + resolve: resolveYamlTimestamp, + construct: constructYamlTimestamp, + instanceOf: Date, + represent: representYamlTimestamp +}); /***/ }), -/***/ 8625: -/***/ ((module) => { - -"use strict"; - - -const isWin = process.platform === 'win32'; +/***/ 6160: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function notFoundError(original, syscall) { - return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), { - code: 'ENOENT', - errno: 'ENOENT', - syscall: `${syscall} ${original.command}`, - path: original.command, - spawnargs: original.args, - }); +let _fs +try { + _fs = __nccwpck_require__(7758) +} catch (_) { + _fs = __nccwpck_require__(5747) } +const universalify = __nccwpck_require__(1463) +const { stringify, stripBom } = __nccwpck_require__(5902) -function hookChildProcess(cp, parsed) { - if (!isWin) { - return; - } - - const originalEmit = cp.emit; - - cp.emit = function (name, arg1) { - // If emitting "exit" event and exit code is 1, we need to check if - // the command exists and emit an "error" instead - // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 - if (name === 'exit') { - const err = verifyENOENT(arg1, parsed, 'spawn'); +async function _readFile (file, options = {}) { + if (typeof options === 'string') { + options = { encoding: options } + } - if (err) { - return originalEmit.call(cp, 'error', err); - } - } + const fs = options.fs || _fs - return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params - }; -} + const shouldThrow = 'throws' in options ? options.throws : true -function verifyENOENT(status, parsed) { - if (isWin && status === 1 && !parsed.file) { - return notFoundError(parsed.original, 'spawn'); - } + let data = await universalify.fromCallback(fs.readFile)(file, options) - return null; -} + data = stripBom(data) -function verifyENOENTSync(status, parsed) { - if (isWin && status === 1 && !parsed.file) { - return notFoundError(parsed.original, 'spawnSync'); + let obj + try { + obj = JSON.parse(data, options ? options.reviver : null) + } catch (err) { + if (shouldThrow) { + err.message = `${file}: ${err.message}` + throw err + } else { + return null } + } - return null; + return obj } -module.exports = { - hookChildProcess, - verifyENOENT, - verifyENOENTSync, - notFoundError, -}; - - -/***/ }), +const readFile = universalify.fromPromise(_readFile) -/***/ 6876: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function readFileSync (file, options = {}) { + if (typeof options === 'string') { + options = { encoding: options } + } -"use strict"; + const fs = options.fs || _fs + const shouldThrow = 'throws' in options ? options.throws : true -const path = __nccwpck_require__(5622); -const niceTry = __nccwpck_require__(8560); -const resolveCommand = __nccwpck_require__(8741); -const escape = __nccwpck_require__(4300); -const readShebang = __nccwpck_require__(8536); -const semver = __nccwpck_require__(9317); + try { + let content = fs.readFileSync(file, options) + content = stripBom(content) + return JSON.parse(content, options.reviver) + } catch (err) { + if (shouldThrow) { + err.message = `${file}: ${err.message}` + throw err + } else { + return null + } + } +} -const isWin = process.platform === 'win32'; -const isExecutableRegExp = /\.(?:com|exe)$/i; -const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; +async function _writeFile (file, obj, options = {}) { + const fs = options.fs || _fs -// `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0 -const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false; + const str = stringify(obj, options) -function detectShebang(parsed) { - parsed.file = resolveCommand(parsed); + await universalify.fromCallback(fs.writeFile)(file, str, options) +} - const shebang = parsed.file && readShebang(parsed.file); +const writeFile = universalify.fromPromise(_writeFile) - if (shebang) { - parsed.args.unshift(parsed.file); - parsed.command = shebang; +function writeFileSync (file, obj, options = {}) { + const fs = options.fs || _fs - return resolveCommand(parsed); - } + const str = stringify(obj, options) + // not sure if fs.writeFileSync returns anything, but just in case + return fs.writeFileSync(file, str, options) +} - return parsed.file; +const jsonfile = { + readFile, + readFileSync, + writeFile, + writeFileSync } -function parseNonShell(parsed) { - if (!isWin) { - return parsed; - } +module.exports = jsonfile - // Detect & add support for shebangs - const commandFile = detectShebang(parsed); - // We don't need a shell if the command filename is an executable - const needsShell = !isExecutableRegExp.test(commandFile); +/***/ }), - // If a shell is required, use cmd.exe and take care of escaping everything correctly - // Note that `forceShell` is an hidden option used only in tests - if (parsed.options.forceShell || needsShell) { - // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` - // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument - // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, - // we need to double escape them - const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); +/***/ 5902: +/***/ ((module) => { - // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) - // This is necessary otherwise it will always fail with ENOENT in those cases - parsed.command = path.normalize(parsed.command); +function stringify (obj, { EOL = '\n', finalEOL = true, replacer = null, spaces } = {}) { + const EOF = finalEOL ? EOL : '' + const str = JSON.stringify(obj, replacer, spaces) - // Escape command & arguments - parsed.command = escape.command(parsed.command); - parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); + return str.replace(/\n/g, EOL) + EOF +} - const shellCommand = [parsed.command].concat(parsed.args).join(' '); +function stripBom (content) { + // we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified + if (Buffer.isBuffer(content)) content = content.toString('utf8') + return content.replace(/^\uFEFF/, '') +} - parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; - parsed.command = process.env.comspec || 'cmd.exe'; - parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped - } +module.exports = { stringify, stripBom } - return parsed; -} -function parseShell(parsed) { - // If node supports the shell option, there's no need to mimic its behavior - if (supportsShellOption) { - return parsed; - } +/***/ }), - // Mimic node shell option - // See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335 - const shellCommand = [parsed.command].concat(parsed.args).join(' '); +/***/ 467: +/***/ ((module, exports, __nccwpck_require__) => { - if (isWin) { - parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe'; - parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; - parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped - } else { - if (typeof parsed.options.shell === 'string') { - parsed.command = parsed.options.shell; - } else if (process.platform === 'android') { - parsed.command = '/system/bin/sh'; - } else { - parsed.command = '/bin/sh'; - } +"use strict"; - parsed.args = ['-c', shellCommand]; - } - return parsed; -} +Object.defineProperty(exports, "__esModule", ({ value: true })); -function parse(command, args, options) { - // Normalize arguments, similar to nodejs - if (args && !Array.isArray(args)) { - options = args; - args = null; - } +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - args = args ? args.slice(0) : []; // Clone array to avoid changing the original - options = Object.assign({}, options); // Clone object to avoid changing the original +var Stream = _interopDefault(__nccwpck_require__(2413)); +var http = _interopDefault(__nccwpck_require__(8605)); +var Url = _interopDefault(__nccwpck_require__(8835)); +var https = _interopDefault(__nccwpck_require__(7211)); +var zlib = _interopDefault(__nccwpck_require__(8761)); - // Build our parsed object - const parsed = { - command, - args, - options, - file: undefined, - original: { - command, - args, - }, - }; +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js - // Delegate further parsing to shell or non-shell - return options.shell ? parseShell(parsed) : parseNonShell(parsed); -} +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; -module.exports = parse; +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); +class Blob { + constructor() { + this[TYPE] = ''; -/***/ }), + const blobParts = arguments[0]; + const options = arguments[1]; -/***/ 4300: -/***/ ((module) => { + const buffers = []; + let size = 0; -"use strict"; + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + this[BUFFER] = Buffer.concat(buffers); -// See http://www.robvanderwoude.com/escapechars.php -const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; -function escapeCommand(arg) { - // Escape meta chars - arg = arg.replace(metaCharsRegExp, '^$1'); + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); - return arg; + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } } -function escapeArgument(arg, doubleEscapeMetaChars) { - // Convert to string - arg = `${arg}`; - - // Algorithm below is based on https://qntm.org/cmd - - // Sequence of backslashes followed by a double quote: - // double up all the backslashes and escape the double quote - arg = arg.replace(/(\\*)"/g, '$1$1\\"'); +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); - // Sequence of backslashes followed by the end of the string - // (which will become a double quote later): - // double up all the backslashes - arg = arg.replace(/(\\*)$/, '$1$1'); +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); - // All other backslashes occur literally +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ - // Quote the whole thing: - arg = `"${arg}"`; +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); - // Escape meta chars - arg = arg.replace(metaCharsRegExp, '^$1'); + this.message = message; + this.type = type; - // Double escape meta chars if necessary - if (doubleEscapeMetaChars) { - arg = arg.replace(metaCharsRegExp, '^$1'); - } + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } - return arg; + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); } -module.exports.command = escapeCommand; -module.exports.argument = escapeArgument; - - -/***/ }), - -/***/ 8536: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; -"use strict"; +let convert; +try { + convert = __nccwpck_require__(2877).convert; +} catch (e) {} +const INTERNALS = Symbol('Body internals'); -const fs = __nccwpck_require__(5747); -const shebangCommand = __nccwpck_require__(2116); +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; -function readShebang(command) { - // Read the first 150 bytes from the file - const size = 150; - let buffer; +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; - if (Buffer.alloc) { - // Node.js v4.5+ / v5.10+ - buffer = Buffer.alloc(size); - } else { - // Old Node.js API - buffer = new Buffer(size); - buffer.fill(0); // zero-fill - } + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; - let fd; + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; - try { - fd = fs.openSync(command, 'r'); - fs.readSync(fd, buffer, 0, size, 0); - fs.closeSync(fd); - } catch (e) { /* Empty */ } + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; - // Attempt to extract shebang (null is returned if not a shebang) - return shebangCommand(buffer.toString()); + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } } -module.exports = readShebang; - - -/***/ }), - -/***/ 8741: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const path = __nccwpck_require__(5622); -const which = __nccwpck_require__(3411); -const pathKey = __nccwpck_require__(7299)(); +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, -function resolveCommandAttempt(parsed, withoutPathExt) { - const cwd = process.cwd(); - const hasCustomCwd = parsed.options.cwd != null; + get bodyUsed() { + return this[INTERNALS].disturbed; + }, - // If a custom `cwd` was specified, we need to change the process cwd - // because `which` will do stat calls but does not support a custom cwd - if (hasCustomCwd) { - try { - process.chdir(parsed.options.cwd); - } catch (err) { - /* Empty */ - } - } + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, - let resolved; + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, - try { - resolved = which.sync(parsed.command, { - path: (parsed.options.env || process.env)[pathKey], - pathExt: withoutPathExt ? path.delimiter : undefined, - }); - } catch (e) { - /* Empty */ - } finally { - process.chdir(cwd); - } + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; - // If we successfully resolved, ensure that an absolute path is returned - // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it - if (resolved) { - resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); - } + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, - return resolved; -} + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, -function resolveCommand(parsed) { - return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); -} + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, -module.exports = resolveCommand; + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; -/***/ }), +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); -/***/ 4780: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; -"use strict"; +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; -const path = __nccwpck_require__(5622); -const childProcess = __nccwpck_require__(3129); -const crossSpawn = __nccwpck_require__(6868); -const stripEof = __nccwpck_require__(5515); -const npmRunPath = __nccwpck_require__(2509); -const isStream = __nccwpck_require__(2597); -const _getStream = __nccwpck_require__(2560); -const pFinally = __nccwpck_require__(1330); -const onExit = __nccwpck_require__(4931); -const errname = __nccwpck_require__(2160); -const stdio = __nccwpck_require__(7023); - -const TEN_MEGABYTES = 1000 * 1000 * 10; - -function handleArgs(cmd, args, opts) { - let parsed; - - opts = Object.assign({ - extendEnv: true, - env: {} - }, opts); - - if (opts.extendEnv) { - opts.env = Object.assign({}, process.env, opts.env); + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); } - if (opts.__winShell === true) { - delete opts.__winShell; - parsed = { - command: cmd, - args, - options: opts, - file: cmd, - original: { - cmd, - args - } - }; - } else { - parsed = crossSpawn._parse(cmd, args, opts); - } + this[INTERNALS].disturbed = true; - opts = Object.assign({ - maxBuffer: TEN_MEGABYTES, - buffer: true, - stripEof: true, - preferLocal: true, - localDir: parsed.options.cwd || process.cwd(), - encoding: 'utf8', - reject: true, - cleanup: true - }, parsed.options); - - opts.stdio = stdio(opts); - - if (opts.preferLocal) { - opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir})); + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); } - if (opts.detached) { - // #115 - opts.cleanup = false; - } + let body = this.body; - if (process.platform === 'win32' && path.basename(parsed.command) === 'cmd.exe') { - // #116 - parsed.args.unshift('/q'); + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); } - return { - cmd: parsed.command, - args: parsed.args, - opts, - parsed - }; -} + // body is blob + if (isBlob(body)) { + body = body.stream(); + } -function handleInput(spawned, input) { - if (input === null || input === undefined) { - return; + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); } - if (isStream(input)) { - input.pipe(spawned.stdin); - } else { - spawned.stdin.end(input); + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); } -} -function handleOutput(opts, val) { - if (val && opts.stripEof) { - val = stripEof(val); - } + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; - return val; -} + return new Body.Promise(function (resolve, reject) { + let resTimeout; -function handleShell(fn, cmd, opts) { - let file = '/bin/sh'; - let args = ['-c', cmd]; + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } - opts = Object.assign({}, opts); + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); - if (process.platform === 'win32') { - opts.__winShell = true; - file = process.env.comspec || 'cmd.exe'; - args = ['/s', '/c', `"${cmd}"`]; - opts.windowsVerbatimArguments = true; - } + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } - if (opts.shell) { - file = opts.shell; - delete opts.shell; - } + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } - return fn(file, args, opts); -} + accumBytes += chunk.length; + accum.push(chunk); + }); -function getStream(process, stream, {encoding, buffer, maxBuffer}) { - if (!process[stream]) { - return null; - } + body.on('end', function () { + if (abort) { + return; + } - let ret; + clearTimeout(resTimeout); - if (!buffer) { - // TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10 - ret = new Promise((resolve, reject) => { - process[stream] - .once('end', resolve) - .once('error', reject); - }); - } else if (encoding) { - ret = _getStream(process[stream], { - encoding, - maxBuffer + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } }); - } else { - ret = _getStream.buffer(process[stream], {maxBuffer}); - } - - return ret.catch(err => { - err.stream = stream; - err.message = `${stream} ${err.message}`; - throw err; }); } -function makeError(result, options) { - const {stdout, stderr} = result; +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } - let err = result.error; - const {code, signal} = result; + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; - const {parsed, joinedCmd} = options; - const timedOut = options.timedOut || false; + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } - if (!err) { - let output = ''; + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); - if (Array.isArray(parsed.opts.stdio)) { - if (parsed.opts.stdio[2] !== 'inherit') { - output += output.length > 0 ? stderr : `\n${stderr}`; - } + // html5 + if (!res && str) { + res = / 0) { - joinedCmd += ' ' + args.join(' '); + // prevent decode issues when sites use incorrect encoding + // ref: https://hsivonen.fi/encoding-menu/ + if (charset === 'gb2312' || charset === 'gbk') { + charset = 'gb18030'; + } } - return joinedCmd; + // turn raw buffers into a single utf-8 buffer + return convert(buffer, 'UTF-8', charset).toString(); } -module.exports = (cmd, args, opts) => { - const parsed = handleArgs(cmd, args, opts); - const {encoding, buffer, maxBuffer} = parsed.opts; - const joinedCmd = joinCmd(cmd, args); - - let spawned; - try { - spawned = childProcess.spawn(parsed.cmd, parsed.args, parsed.opts); - } catch (err) { - return Promise.reject(err); - } - - let removeExitHandler; - if (parsed.opts.cleanup) { - removeExitHandler = onExit(() => { - spawned.kill(); - }); +/** + * Detect a URLSearchParams object + * ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143 + * + * @param Object obj Object to detect by type or brand + * @return String + */ +function isURLSearchParams(obj) { + // Duck-typing as a necessary condition. + if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') { + return false; } - let timeoutId = null; - let timedOut = false; + // Brand-checking and more duck-typing as optional condition. + return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function'; +} - const cleanup = () => { - if (timeoutId) { - clearTimeout(timeoutId); - timeoutId = null; - } +/** + * Check if `obj` is a W3C `Blob` object (which `File` inherits from) + * @param {*} obj + * @return {boolean} + */ +function isBlob(obj) { + return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]); +} - if (removeExitHandler) { - removeExitHandler(); - } - }; +/** + * Clone body given Res/Req instance + * + * @param Mixed instance Response or Request instance + * @return Mixed + */ +function clone(instance) { + let p1, p2; + let body = instance.body; - if (parsed.opts.timeout > 0) { - timeoutId = setTimeout(() => { - timeoutId = null; - timedOut = true; - spawned.kill(parsed.opts.killSignal); - }, parsed.opts.timeout); + // don't allow cloning a used body + if (instance.bodyUsed) { + throw new Error('cannot clone body after it is used'); } - const processDone = new Promise(resolve => { - spawned.on('exit', (code, signal) => { - cleanup(); - resolve({code, signal}); - }); - - spawned.on('error', err => { - cleanup(); - resolve({error: err}); - }); - - if (spawned.stdin) { - spawned.stdin.on('error', err => { - cleanup(); - resolve({error: err}); - }); - } - }); + // check that body is a stream and not form-data object + // note: we can't clone the form-data object without having it as a dependency + if (body instanceof Stream && typeof body.getBoundary !== 'function') { + // tee instance body + p1 = new PassThrough(); + p2 = new PassThrough(); + body.pipe(p1); + body.pipe(p2); + // set instance body to teed body and return the other teed body + instance[INTERNALS].body = p1; + body = p2; + } - function destroy() { - if (spawned.stdout) { - spawned.stdout.destroy(); - } + return body; +} - if (spawned.stderr) { - spawned.stderr.destroy(); - } +/** + * Performs the operation "extract a `Content-Type` value from |object|" as + * specified in the specification: + * https://fetch.spec.whatwg.org/#concept-bodyinit-extract + * + * This function assumes that instance.body is present. + * + * @param Mixed instance Any options.body input + */ +function extractContentType(body) { + if (body === null) { + // body is null + return null; + } else if (typeof body === 'string') { + // body is string + return 'text/plain;charset=UTF-8'; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + return 'application/x-www-form-urlencoded;charset=UTF-8'; + } else if (isBlob(body)) { + // body is blob + return body.type || null; + } else if (Buffer.isBuffer(body)) { + // body is buffer + return null; + } else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + return null; + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + return null; + } else if (typeof body.getBoundary === 'function') { + // detect form data input from form-data module + return `multipart/form-data;boundary=${body.getBoundary()}`; + } else if (body instanceof Stream) { + // body is stream + // can't really do much about this + return null; + } else { + // Body constructor defaults other things to string + return 'text/plain;charset=UTF-8'; } +} - const handlePromise = () => pFinally(Promise.all([ - processDone, - getStream(spawned, 'stdout', {encoding, buffer, maxBuffer}), - getStream(spawned, 'stderr', {encoding, buffer, maxBuffer}) - ]).then(arr => { - const result = arr[0]; - result.stdout = arr[1]; - result.stderr = arr[2]; - - if (result.error || result.code !== 0 || result.signal !== null) { - const err = makeError(result, { - joinedCmd, - parsed, - timedOut - }); - - // TODO: missing some timeout logic for killed - // https://github.com/nodejs/node/blob/master/lib/child_process.js#L203 - // err.killed = spawned.killed || killed; - err.killed = err.killed || spawned.killed; +/** + * The Fetch Standard treats this as if "total bytes" is a property on the body. + * For us, we have to explicitly get it with a function. + * + * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes + * + * @param Body instance Instance of Body + * @return Number? Number of bytes, or null if not possible + */ +function getTotalBytes(instance) { + const body = instance.body; - if (!parsed.opts.reject) { - return err; - } - throw err; + if (body === null) { + // body is null + return 0; + } else if (isBlob(body)) { + return body.size; + } else if (Buffer.isBuffer(body)) { + // body is buffer + return body.length; + } else if (body && typeof body.getLengthSync === 'function') { + // detect form data input from form-data module + if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x + body.hasKnownLength && body.hasKnownLength()) { + // 2.x + return body.getLengthSync(); } + return null; + } else { + // body is stream + return null; + } +} - return { - stdout: handleOutput(parsed.opts, result.stdout), - stderr: handleOutput(parsed.opts, result.stderr), - code: 0, - failed: false, - killed: false, - signal: null, - cmd: joinedCmd, - timedOut: false - }; - }), destroy); - - crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed); - - handleInput(spawned, parsed.opts.input); - - spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected); - spawned.catch = onrejected => handlePromise().catch(onrejected); +/** + * Write a Body to a Node.js WritableStream (e.g. http.Request) object. + * + * @param Body instance Instance of Body + * @return Void + */ +function writeToStream(dest, instance) { + const body = instance.body; - return spawned; -}; -// TODO: set `stderr: 'ignore'` when that option is implemented -module.exports.stdout = (...args) => module.exports(...args).then(x => x.stdout); + if (body === null) { + // body is null + dest.end(); + } else if (isBlob(body)) { + body.stream().pipe(dest); + } else if (Buffer.isBuffer(body)) { + // body is buffer + dest.write(body); + dest.end(); + } else { + // body is stream + body.pipe(dest); + } +} -// TODO: set `stdout: 'ignore'` when that option is implemented -module.exports.stderr = (...args) => module.exports(...args).then(x => x.stderr); +// expose Promise +Body.Promise = global.Promise; -module.exports.shell = (cmd, opts) => handleShell(module.exports, cmd, opts); +/** + * headers.js + * + * Headers class offers convenient helpers + */ -module.exports.sync = (cmd, args, opts) => { - const parsed = handleArgs(cmd, args, opts); - const joinedCmd = joinCmd(cmd, args); +const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; +const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; - if (isStream(parsed.opts.input)) { - throw new TypeError('The `input` option cannot be a stream in sync mode'); +function validateName(name) { + name = `${name}`; + if (invalidTokenRegex.test(name) || name === '') { + throw new TypeError(`${name} is not a legal HTTP header name`); } +} - const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts); - result.code = result.status; - - if (result.error || result.status !== 0 || result.signal !== null) { - const err = makeError(result, { - joinedCmd, - parsed - }); +function validateValue(value) { + value = `${value}`; + if (invalidHeaderCharRegex.test(value)) { + throw new TypeError(`${value} is not a legal HTTP header value`); + } +} - if (!parsed.opts.reject) { - return err; +/** + * Find the key in the map object given a header name. + * + * Returns undefined if not found. + * + * @param String name Header name + * @return String|Undefined + */ +function find(map, name) { + name = name.toLowerCase(); + for (const key in map) { + if (key.toLowerCase() === name) { + return key; } - - throw err; } + return undefined; +} - return { - stdout: handleOutput(parsed.opts, result.stdout), - stderr: handleOutput(parsed.opts, result.stderr), - code: 0, - failed: false, - signal: null, - cmd: joinedCmd, - timedOut: false - }; -}; - -module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd, opts); - +const MAP = Symbol('map'); +class Headers { + /** + * Headers class + * + * @param Object headers Response headers + * @return Void + */ + constructor() { + let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; -/***/ }), + this[MAP] = Object.create(null); -/***/ 2160: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); -"use strict"; + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } -// Older verions of Node.js might not have `util.getSystemErrorName()`. -// In that case, fall back to a deprecated internal. -const util = __nccwpck_require__(1669); + return; + } -let uv; + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } -if (typeof util.getSystemErrorName === 'function') { - module.exports = util.getSystemErrorName; -} else { - try { - uv = process.binding('uv'); + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } - if (typeof uv.errname !== 'function') { - throw new TypeError('uv.errname is not a function'); + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); } - } catch (err) { - console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err); - uv = null; } - module.exports = code => errname(uv, code); -} - -// Used for testing the fallback behavior -module.exports.__test__ = errname; - -function errname(uv, code) { - if (uv) { - return uv.errname(code); - } + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } - if (!(code < 0)) { - throw new Error('err >= 0'); + return this[MAP][key].join(', '); } - return `Unknown system error ${code}`; -} - - - -/***/ }), - -/***/ 7023: -/***/ ((module) => { - -"use strict"; - -const alias = ['stdin', 'stdout', 'stderr']; + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; -const hasAlias = opts => alias.some(x => Boolean(opts[x])); + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; -module.exports = opts => { - if (!opts) { - return null; + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } } - if (opts.stdio && hasAlias(opts)) { - throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`); + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; } - if (typeof opts.stdio === 'string') { - return opts.stdio; + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } } - const stdio = opts.stdio || []; - - if (!Array.isArray(stdio)) { - throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; } - const result = []; - const len = Math.max(stdio.length, alias.length); - - for (let i = 0; i < len; i++) { - let value = null; - - if (stdio[i] !== undefined) { - value = stdio[i]; - } else if (opts[alias[i]] !== undefined) { - value = opts[alias[i]]; + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; } - - result[i] = value; } - return result; -}; - - -/***/ }), - -/***/ 9286: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const {PassThrough} = __nccwpck_require__(2413); - -module.exports = options => { - options = Object.assign({}, options); + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } - const {array} = options; - let {encoding} = options; - const buffer = encoding === 'buffer'; - let objectMode = false; + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } - if (array) { - objectMode = !(encoding || buffer); - } else { - encoding = encoding || 'utf8'; + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); } - if (buffer) { - encoding = null; + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; - let len = 0; - const ret = []; - const stream = new PassThrough({objectMode}); +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); - if (encoding) { - stream.setEncoding(encoding); - } +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); - stream.on('data', chunk => { - ret.push(chunk); +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; - if (objectMode) { - len = ret.length; - } else { - len += chunk.length; - } + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; }); +} - stream.getBufferedValue = () => { - if (array) { - return ret; - } +const INTERNAL = Symbol('internal'); - return buffer ? Buffer.concat(ret, len) : ret.join(''); +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 }; + return iterator; +} - stream.getBufferedLength = () => len; - - return stream; -}; - - -/***/ }), - -/***/ 2560: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } -"use strict"; + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; -const pump = __nccwpck_require__(8341); -const bufferStream = __nccwpck_require__(9286); + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } -class MaxBufferError extends Error { - constructor() { - super('maxBuffer exceeded'); - this.name = 'MaxBufferError'; - } -} + this[INTERNAL].index = index + 1; -function getStream(inputStream, options) { - if (!inputStream) { - return Promise.reject(new Error('Expected a stream')); + return { + value: values[index], + done: false + }; } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); - options = Object.assign({maxBuffer: Infinity}, options); - - const {maxBuffer} = options; +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); - let stream; - return new Promise((resolve, reject) => { - const rejectPromise = error => { - if (error) { // A null check - error.bufferedData = stream.getBufferedValue(); - } - reject(error); - }; +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); - stream = pump(inputStream, bufferStream(options), error => { - if (error) { - rejectPromise(error); - return; - } + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } - resolve(); - }); + return obj; +} - stream.on('data', () => { - if (stream.getBufferedLength() > maxBuffer) { - rejectPromise(new MaxBufferError()); +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } } - }); - }).then(() => stream.getBufferedValue()); + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; } -module.exports = getStream; -module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); -module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); -module.exports.MaxBufferError = MaxBufferError; - - -/***/ }), - -/***/ 2597: -/***/ ((module) => { - -"use strict"; - - -var isStream = module.exports = function (stream) { - return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function'; -}; - -isStream.writable = function (stream) { - return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object'; -}; - -isStream.readable = function (stream) { - return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object'; -}; - -isStream.duplex = function (stream) { - return isStream.writable(stream) && isStream.readable(stream); -}; - -isStream.transform = function (stream) { - return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object'; -}; - - -/***/ }), +const INTERNALS$1 = Symbol('Response internals'); -/***/ 2509: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; -"use strict"; +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; -const path = __nccwpck_require__(5622); -const pathKey = __nccwpck_require__(7299); + Body.call(this, body, opts); -module.exports = opts => { - opts = Object.assign({ - cwd: process.cwd(), - path: process.env[pathKey()] - }, opts); + const status = opts.status || 200; + const headers = new Headers(opts.headers); - let prev; - let pth = path.resolve(opts.cwd); - const ret = []; + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } - while (prev !== pth) { - ret.push(path.join(pth, 'node_modules/.bin')); - prev = pth; - pth = path.resolve(pth, '..'); + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; } - // ensure the running `node` binary is used - ret.push(path.dirname(process.execPath)); - - return ret.concat(opts.path).join(path.delimiter); -}; - -module.exports.env = opts => { - opts = Object.assign({ - env: process.env - }, opts); - - const env = Object.assign({}, opts.env); - const path = pathKey({env}); - - opts.path = env[path]; - env[path] = module.exports(opts); - - return env; -}; - + get url() { + return this[INTERNALS$1].url || ''; + } -/***/ }), + get status() { + return this[INTERNALS$1].status; + } -/***/ 7299: -/***/ ((module) => { + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } -"use strict"; + get redirected() { + return this[INTERNALS$1].counter > 0; + } -module.exports = opts => { - opts = opts || {}; + get statusText() { + return this[INTERNALS$1].statusText; + } - const env = opts.env || process.env; - const platform = opts.platform || process.platform; + get headers() { + return this[INTERNALS$1].headers; + } - if (platform !== 'win32') { - return 'PATH'; + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); } +} - return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path'; -}; +Body.mixIn(Response.prototype); +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); -/***/ }), +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); -/***/ 9317: -/***/ ((module, exports) => { +const INTERNALS$2 = Symbol('Request internals'); -exports = module.exports = SemVer +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; -var debug -/* istanbul ignore next */ -if (typeof process === 'object' && - process.env && - process.env.NODE_DEBUG && - /\bsemver\b/i.test(process.env.NODE_DEBUG)) { - debug = function () { - var args = Array.prototype.slice.call(arguments, 0) - args.unshift('SEMVER') - console.log.apply(console, args) - } -} else { - debug = function () {} -} +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -exports.SEMVER_SPEC_VERSION = '2.0.0' +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} -var MAX_LENGTH = 256 -var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || - /* istanbul ignore next */ 9007199254740991 +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} -// Max safe segment length for coercion. -var MAX_SAFE_COMPONENT_LENGTH = 16 +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; -// The actual regexps go on exports.re -var re = exports.re = [] -var src = exports.src = [] -var R = 0 + let parsedURL; -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parse_url(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parse_url(`${input}`); + } + input = {}; + } else { + parsedURL = parse_url(input.url); + } -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); -var NUMERICIDENTIFIER = R++ -src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' -var NUMERICIDENTIFIERLOOSE = R++ -src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; -var NONNUMERICIDENTIFIER = R++ -src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' - -// ## Main Version -// Three dot-separated numeric identifiers. - -var MAINVERSION = R++ -src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')' - -var MAINVERSIONLOOSE = R++ -src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')' - -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. - -var PRERELEASEIDENTIFIER = R++ -src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + - '|' + src[NONNUMERICIDENTIFIER] + ')' - -var PRERELEASEIDENTIFIERLOOSE = R++ -src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + - '|' + src[NONNUMERICIDENTIFIER] + ')' - -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. - -var PRERELEASE = R++ -src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + - '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' - -var PRERELEASELOOSE = R++ -src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' - -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. - -var BUILDIDENTIFIER = R++ -src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' - -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. - -var BUILD = R++ -src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + - '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' - -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. - -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. - -var FULL = R++ -var FULLPLAIN = 'v?' + src[MAINVERSION] + - src[PRERELEASE] + '?' + - src[BUILD] + '?' - -src[FULL] = '^' + FULLPLAIN + '$' - -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + - src[PRERELEASELOOSE] + '?' + - src[BUILD] + '?' - -var LOOSE = R++ -src[LOOSE] = '^' + LOOSEPLAIN + '$' - -var GTLT = R++ -src[GTLT] = '((?:<|>)?=?)' - -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -var XRANGEIDENTIFIERLOOSE = R++ -src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' -var XRANGEIDENTIFIER = R++ -src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' - -var XRANGEPLAIN = R++ -src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:' + src[PRERELEASE] + ')?' + - src[BUILD] + '?' + - ')?)?' - -var XRANGEPLAINLOOSE = R++ -src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:' + src[PRERELEASELOOSE] + ')?' + - src[BUILD] + '?' + - ')?)?' - -var XRANGE = R++ -src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' -var XRANGELOOSE = R++ -src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' - -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -var COERCE = R++ -src[COERCE] = '(?:^|[^\\d])' + - '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:$|[^\\d])' - -// Tilde ranges. -// Meaning is "reasonably at or greater than" -var LONETILDE = R++ -src[LONETILDE] = '(?:~>?)' - -var TILDETRIM = R++ -src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' -re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') -var tildeTrimReplace = '$1~' - -var TILDE = R++ -src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' -var TILDELOOSE = R++ -src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' - -// Caret ranges. -// Meaning is "at least and backwards compatible with" -var LONECARET = R++ -src[LONECARET] = '(?:\\^)' - -var CARETTRIM = R++ -src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' -re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') -var caretTrimReplace = '$1^' - -var CARET = R++ -src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' -var CARETLOOSE = R++ -src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' - -// A simple gt/lt/eq thing, or just "" to indicate "any version" -var COMPARATORLOOSE = R++ -src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' -var COMPARATOR = R++ -src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' - -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -var COMPARATORTRIM = R++ -src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + - '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' - -// this one has to use the /g flag -re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') -var comparatorTrimReplace = '$1$2$3' - -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -var HYPHENRANGE = R++ -src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAIN] + ')' + - '\\s*$' - -var HYPHENRANGELOOSE = R++ -src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s*$' - -// Star ranges basically just allow anything at all. -var STAR = R++ -src[STAR] = '(<|>)?=?\\s*\\*' - -// Compile to actual regexp objects. -// All are flag-free, unless they were created above with a flag. -for (var i = 0; i < R; i++) { - debug(i, src[i]) - if (!re[i]) { - re[i] = new RegExp(src[i]) - } -} - -exports.parse = parse -function parse (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); - if (version instanceof SemVer) { - return version - } + const headers = new Headers(init.headers || input.headers || {}); - if (typeof version !== 'string') { - return null - } + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } - if (version.length > MAX_LENGTH) { - return null - } + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; - var r = options.loose ? re[LOOSE] : re[FULL] - if (!r.test(version)) { - return null - } + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } - try { - return new SemVer(version, options) - } catch (er) { - return null - } -} + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; -exports.valid = valid -function valid (version, options) { - var v = parse(version, options) - return v ? v.version : null -} + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } -exports.clean = clean -function clean (version, options) { - var s = parse(version.trim().replace(/^[=v]+/, ''), options) - return s ? s.version : null -} + get method() { + return this[INTERNALS$2].method; + } -exports.SemVer = SemVer + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } -function SemVer (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - if (version instanceof SemVer) { - if (version.loose === options.loose) { - return version - } else { - version = version.version - } - } else if (typeof version !== 'string') { - throw new TypeError('Invalid Version: ' + version) - } + get headers() { + return this[INTERNALS$2].headers; + } - if (version.length > MAX_LENGTH) { - throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') - } + get redirect() { + return this[INTERNALS$2].redirect; + } - if (!(this instanceof SemVer)) { - return new SemVer(version, options) - } + get signal() { + return this[INTERNALS$2].signal; + } - debug('SemVer', version, options) - this.options = options - this.loose = !!options.loose + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} - var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) +Body.mixIn(Request.prototype); - if (!m) { - throw new TypeError('Invalid Version: ' + version) - } +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); - this.raw = version +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') - } + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') - } + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') - } + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map(function (id) { - if (/^[0-9]+$/.test(id)) { - var num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num - } - } - return id - }) - } + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } - this.build = m[5] ? m[5].split('.') : [] - this.format() -} + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } -SemVer.prototype.format = function () { - this.version = this.major + '.' + this.minor + '.' + this.patch - if (this.prerelease.length) { - this.version += '-' + this.prerelease.join('.') - } - return this.version -} + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } -SemVer.prototype.toString = function () { - return this.version -} + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } -SemVer.prototype.compare = function (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } - return this.compareMain(other) || this.comparePre(other) -} + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close'); + } -SemVer.prototype.compareMain = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js - return compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); } -SemVer.prototype.comparePre = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 - } - - var i = 0 - do { - var a = this.prerelease[i] - var b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) -} - -// preminor will bump the version up to the next minor release, and immediately -// down to pre-release. premajor and prepatch work the same way. -SemVer.prototype.inc = function (release, identifier) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier) - this.inc('pre', identifier) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier) - } - this.inc('pre', identifier) - break - - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if (this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ - } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case 'pre': - if (this.prerelease.length === 0) { - this.prerelease = [0] - } else { - var i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } - } - if (i === -1) { - // didn't increment anything - this.prerelease.push(0) - } - } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0] - } - } else { - this.prerelease = [identifier, 0] - } - } - break - - default: - throw new Error('invalid increment argument: ' + release) - } - this.format() - this.raw = this.version - return this -} +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ -exports.inc = inc -function inc (version, release, loose, identifier) { - if (typeof (loose) === 'string') { - identifier = loose - loose = undefined - } +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); - try { - return new SemVer(version, loose).inc(release, identifier).version - } catch (er) { - return null - } -} + this.type = 'aborted'; + this.message = message; -exports.diff = diff -function diff (version1, version2) { - if (eq(version1, version2)) { - return null - } else { - var v1 = parse(version1) - var v2 = parse(version2) - var prefix = '' - if (v1.prerelease.length || v2.prerelease.length) { - prefix = 'pre' - var defaultResult = 'prerelease' - } - for (var key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return prefix + key - } - } - } - return defaultResult // may be undefined - } + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); } -exports.compareIdentifiers = compareIdentifiers - -var numeric = /^[0-9]+$/ -function compareIdentifiers (a, b) { - var anum = numeric.test(a) - var bnum = numeric.test(b) +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; - if (anum && bnum) { - a = +a - b = +b - } +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; +const resolve_url = Url.resolve; - return a === b ? 0 - : (anum && !bnum) ? -1 - : (bnum && !anum) ? 1 - : a < b ? -1 - : 1 -} +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { -exports.rcompareIdentifiers = rcompareIdentifiers -function rcompareIdentifiers (a, b) { - return compareIdentifiers(b, a) -} + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } -exports.major = major -function major (a, loose) { - return new SemVer(a, loose).major -} + Body.Promise = fetch.Promise; -exports.minor = minor -function minor (a, loose) { - return new SemVer(a, loose).minor -} + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); -exports.patch = patch -function patch (a, loose) { - return new SemVer(a, loose).patch -} + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; -exports.compare = compare -function compare (a, b, loose) { - return new SemVer(a, loose).compare(new SemVer(b, loose)) -} + let response = null; -exports.compareLoose = compareLoose -function compareLoose (a, b) { - return compare(a, b, true) -} + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + request.body.destroy(error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; -exports.rcompare = rcompare -function rcompare (a, b, loose) { - return compare(b, a, loose) -} + if (signal && signal.aborted) { + abort(); + return; + } -exports.sort = sort -function sort (list, loose) { - return list.sort(function (a, b) { - return exports.compare(a, b, loose) - }) -} + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; -exports.rsort = rsort -function rsort (list, loose) { - return list.sort(function (a, b) { - return exports.rcompare(a, b, loose) - }) -} + // send request + const req = send(options); + let reqTimeout; -exports.gt = gt -function gt (a, b, loose) { - return compare(a, b, loose) > 0 -} + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } -exports.lt = lt -function lt (a, b, loose) { - return compare(a, b, loose) < 0 -} + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } -exports.eq = eq -function eq (a, b, loose) { - return compare(a, b, loose) === 0 -} + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } -exports.neq = neq -function neq (a, b, loose) { - return compare(a, b, loose) !== 0 -} + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + finalize(); + }); -exports.gte = gte -function gte (a, b, loose) { - return compare(a, b, loose) >= 0 -} + req.on('response', function (res) { + clearTimeout(reqTimeout); -exports.lte = lte -function lte (a, b, loose) { - return compare(a, b, loose) <= 0 -} + const headers = createHeadersLenient(res.headers); -exports.cmp = cmp -function cmp (a, op, b, loose) { - switch (op) { - case '===': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a === b + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); - case '!==': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a !== b + // HTTP fetch step 5.3 + const locationURL = location === null ? null : resolve_url(request.url, location); - case '': - case '=': - case '==': - return eq(a, b, loose) + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } - case '!=': - return neq(a, b, loose) + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } - case '>': - return gt(a, b, loose) + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; - case '>=': - return gte(a, b, loose) + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } - case '<': - return lt(a, b, loose) + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } - case '<=': - return lte(a, b, loose) + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } - default: - throw new TypeError('Invalid operator: ' + op) - } -} + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); -exports.Comparator = Comparator -function Comparator (comp, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp - } else { - comp = comp.value - } - } + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); - if (!(this instanceof Comparator)) { - return new Comparator(comp, options) - } + // HTTP-network fetch step 12.1.1.4: handle content codings - debug('comparator', comp, options) - this.options = options - this.loose = !!options.loose - this.parse(comp) + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version - } + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; - debug('comp', this) -} + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } -var ANY = {} -Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] - var m = comp.match(r) + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + return; + } - if (!m) { - throw new TypeError('Invalid comparator: ' + comp) - } + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } - this.operator = m[1] - if (this.operator === '=') { - this.operator = '' - } + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY - } else { - this.semver = new SemVer(m[2], this.options.loose) - } + writeToStream(req, request); + }); } +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; -Comparator.prototype.toString = function () { - return this.value -} +// expose Promise +fetch.Promise = global.Promise; -Comparator.prototype.test = function (version) { - debug('Comparator.test', version, this.options.loose) +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.default = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; - if (this.semver === ANY) { - return true - } - if (typeof version === 'string') { - version = new SemVer(version, this.options) - } +/***/ }), - return cmp(version, this.operator, this.semver, this.options) -} +/***/ 1223: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -Comparator.prototype.intersects = function (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') - } +var wrappy = __nccwpck_require__(2940) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) - var rangeTmp + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) - if (this.operator === '') { - rangeTmp = new Range(comp.value, options) - return satisfies(this.value, rangeTmp, options) - } else if (comp.operator === '') { - rangeTmp = new Range(this.value, options) - return satisfies(comp.semver, rangeTmp, options) +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) } - - var sameDirectionIncreasing = - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '>=' || comp.operator === '>') - var sameDirectionDecreasing = - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '<=' || comp.operator === '<') - var sameSemVer = this.semver.version === comp.semver.version - var differentDirectionsInclusive = - (this.operator === '>=' || this.operator === '<=') && - (comp.operator === '>=' || comp.operator === '<=') - var oppositeDirectionsLessThan = - cmp(this.semver, '<', comp.semver, options) && - ((this.operator === '>=' || this.operator === '>') && - (comp.operator === '<=' || comp.operator === '<')) - var oppositeDirectionsGreaterThan = - cmp(this.semver, '>', comp.semver, options) && - ((this.operator === '<=' || this.operator === '<') && - (comp.operator === '>=' || comp.operator === '>')) - - return sameDirectionIncreasing || sameDirectionDecreasing || - (sameSemVer && differentDirectionsInclusive) || - oppositeDirectionsLessThan || oppositeDirectionsGreaterThan + f.called = false + return f } -exports.Range = Range -function Range (range, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - - if (range instanceof Range) { - if (range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease) { - return range - } else { - return new Range(range.raw, options) - } - } - - if (range instanceof Comparator) { - return new Range(range.value, options) +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} - if (!(this instanceof Range)) { - return new Range(range, options) - } - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease +/***/ }), - // First, split based on boolean or || - this.raw = range - this.set = range.split(/\s*\|\|\s*/).map(function (range) { - return this.parseRange(range.trim()) - }, this).filter(function (c) { - // throw out any that are not relevant for whatever reason - return c.length - }) +/***/ 4294: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + range) - } +module.exports = __nccwpck_require__(4219); - this.format() -} -Range.prototype.format = function () { - this.range = this.set.map(function (comps) { - return comps.join(' ').trim() - }).join('||').trim() - return this.range -} +/***/ }), -Range.prototype.toString = function () { - return this.range -} +/***/ 4219: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -Range.prototype.parseRange = function (range) { - var loose = this.options.loose - range = range.trim() - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] - range = range.replace(hr, hyphenReplace) - debug('hyphen replace', range) - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, re[COMPARATORTRIM]) +"use strict"; - // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[TILDETRIM], tildeTrimReplace) - // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[CARETTRIM], caretTrimReplace) +var net = __nccwpck_require__(1631); +var tls = __nccwpck_require__(4016); +var http = __nccwpck_require__(8605); +var https = __nccwpck_require__(7211); +var events = __nccwpck_require__(8614); +var assert = __nccwpck_require__(2357); +var util = __nccwpck_require__(1669); - // normalize spaces - range = range.split(/\s+/).join(' ') - // At this point, the range is completely trimmed and - // ready to be split into comparators. +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; - var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] - var set = range.split(' ').map(function (comp) { - return parseComparator(comp, this.options) - }, this).join(' ').split(/\s+/) - if (this.options.loose) { - // in loose mode, throw out any that are not valid comparators - set = set.filter(function (comp) { - return !!comp.match(compRe) - }) - } - set = set.map(function (comp) { - return new Comparator(comp, this.options) - }, this) - return set +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; } -Range.prototype.intersects = function (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') - } +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} - return this.set.some(function (thisComparators) { - return thisComparators.every(function (thisComparator) { - return range.set.some(function (rangeComparators) { - return rangeComparators.every(function (rangeComparator) { - return thisComparator.intersects(rangeComparator, options) - }) - }) - }) - }) +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; } -// Mostly just for testing and legacy API reasons -exports.toComparators = toComparators -function toComparators (range, options) { - return new Range(range, options).set.map(function (comp) { - return comp.map(function (c) { - return c.value - }).join(' ').trim().split(' ') - }) +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; } -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -function parseComparator (comp, options) { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp -} - -function isX (id) { - return !id || id.toLowerCase() === 'x' || id === '*' -} - -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 -function replaceTildes (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceTilde(comp, options) - }).join(' ') -} - -function replaceTilde (comp, options) { - var r = options.loose ? re[TILDELOOSE] : re[TILDE] - return comp.replace(r, function (_, M, m, p, pr) { - debug('tilde', comp, _, M, m, p, pr) - var ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0 - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else if (pr) { - debug('replaceTilde pr', pr) - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } else { - // ~1.2.3 == >=1.2.3 <1.3.0 - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - debug('tilde return', ret) - return ret - }) -} +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 -// ^1.2.3 --> >=1.2.3 <2.0.0 -// ^1.2.0 --> >=1.2.0 <2.0.0 -function replaceCarets (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceCaret(comp, options) - }).join(' ') -} - -function replaceCaret (comp, options) { - debug('caret', comp, options) - var r = options.loose ? re[CARETLOOSE] : re[CARET] - return comp.replace(r, function (_, M, m, p, pr) { - debug('caret', comp, _, M, m, p, pr) - var ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - if (M === '0') { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else { - ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + (+M + 1) + '.0.0' - } - } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + (+M + 1) + '.0.0' + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; } } - - debug('caret return', ret) - return ret - }) -} - -function replaceXRanges (comp, options) { - debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map(function (comp) { - return replaceXRange(comp, options) - }).join(' ') + socket.destroy(); + self.removeSocket(socket); + }); } +util.inherits(TunnelingAgent, events.EventEmitter); -function replaceXRange (comp, options) { - comp = comp.trim() - var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] - return comp.replace(r, function (ret, gtlt, M, m, p, pr) { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - var xM = isX(M) - var xm = xM || isX(m) - var xp = xm || isX(p) - var anyX = xp +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); - if (gtlt === '=' && anyX) { - gtlt = '' - } + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0' - } else { - // nothing is forbidden - ret = '*' - } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 - } - p = 0 - - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - // >1.2.3 => >= 1.2.4 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 - } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 - } - } + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); - ret = gtlt + M + '.' + m + '.' + p - } else if (xm) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (xp) { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + function onFree() { + self.emit('free', socket, options); } - debug('xRange return', ret) - - return ret - }) -} - -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -function replaceStars (comp, options) { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[STAR], '') -} - -// This function is passed to string.replace(re[HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0 -function hyphenReplace ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = '>=' + fM + '.0.0' - } else if (isX(fp)) { - from = '>=' + fM + '.' + fm + '.0' - } else { - from = '>=' + from - } - - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = '<' + (+tM + 1) + '.0.0' - } else if (isX(tp)) { - to = '<' + tM + '.' + (+tm + 1) + '.0' - } else if (tpr) { - to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr - } else { - to = '<=' + to - } + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); + } + }); +}; - return (from + ' ' + to).trim() -} +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); -// if ANY of the sets match ALL of its comparators, then pass -Range.prototype.test = function (version) { - if (!version) { - return false + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port + } + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; } - - if (typeof version === 'string') { - version = new SemVer(version, this.options) + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); } - for (var i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true - } - } - return false -} + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); -function testSet (set, version, options) { - for (var i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false - } + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; } - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === ANY) { - continue - } - - if (set[i].semver.prerelease.length > 0) { - var allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true - } - } - } - - // Version has a -pre, but it's not one of the ones we like. - return false + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); } - return true -} - -exports.satisfies = satisfies -function satisfies (version, range, options) { - try { - range = new Range(range, options) - } catch (er) { - return false - } - return range.test(version) -} + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); -exports.maxSatisfying = maxSatisfying -function maxSatisfying (versions, range, options) { - var max = null - var maxSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!max || maxSV.compare(v) === -1) { - // compare(max, v, true) - max = v - maxSV = new SemVer(max, options) - } + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; } - }) - return max -} - -exports.minSatisfying = minSatisfying -function minSatisfying (versions, range, options) { - var min = null - var minSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!min || minSV.compare(v) === 1) { - // compare(min, v, true) - min = v - minSV = new SemVer(min, options) - } + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; } - }) - return min -} - -exports.minVersion = minVersion -function minVersion (range, loose) { - range = new Range(range, loose) - - var minver = new SemVer('0.0.0') - if (range.test(minver)) { - return minver - } - - minver = new SemVer('0.0.0-0') - if (range.test(minver)) { - return minver + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); } - minver = null - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] + function onError(cause) { + connectReq.removeAllListeners(); - comparators.forEach(function (comparator) { - // Clone to avoid manipulating the comparator's semver object. - var compver = new SemVer(comparator.semver.version) - switch (comparator.operator) { - case '>': - if (compver.prerelease.length === 0) { - compver.patch++ - } else { - compver.prerelease.push(0) - } - compver.raw = compver.format() - /* fallthrough */ - case '': - case '>=': - if (!minver || gt(minver, compver)) { - minver = compver - } - break - case '<': - case '<=': - /* Ignore maximum versions */ - break - /* istanbul ignore next */ - default: - throw new Error('Unexpected operation: ' + comparator.operator) - } - }) + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); } +}; - if (minver && range.test(minver)) { - return minver +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; } + this.sockets.splice(pos, 1); - return null -} - -exports.validRange = validRange -function validRange (range, options) { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, options).range || '*' - } catch (er) { - return null + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); } -} +}; -// Determine if version is less than all the versions possible in the range -exports.ltr = ltr -function ltr (version, range, options) { - return outside(version, range, '<', options) -} +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); -// Determine if version is greater than all the versions possible in the range. -exports.gtr = gtr -function gtr (version, range, options) { - return outside(version, range, '>', options) + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); } -exports.outside = outside -function outside (version, range, hilo, options) { - version = new SemVer(version, options) - range = new Range(range, options) - - var gtfn, ltefn, ltfn, comp, ecomp - switch (hilo) { - case '>': - gtfn = gt - ltefn = lte - ltfn = lt - comp = '>' - ecomp = '>=' - break - case '<': - gtfn = lt - ltefn = gte - ltfn = gt - comp = '<' - ecomp = '<=' - break - default: - throw new TypeError('Must provide a hilo val of "<" or ">"') - } - // If it satisifes the range it is not outside - if (satisfies(version, range, options)) { - return false +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; } + return host; // for v0.11 or later +} - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. - - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - var high = null - var low = null - - comparators.forEach(function (comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') - } - high = high || comparator - low = low || comparator - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } } - }) - - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false - } - - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false } } - return true -} - -exports.prerelease = prerelease -function prerelease (version, options) { - var parsed = parse(version, options) - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null -} - -exports.intersects = intersects -function intersects (r1, r2, options) { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2) + return target; } -exports.coerce = coerce -function coerce (version) { - if (version instanceof SemVer) { - return version - } - - if (typeof version !== 'string') { - return null - } - - var match = version.match(re[COERCE]) - if (match == null) { - return null +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); + } + console.error.apply(console, args); } - - return parse(match[1] + - '.' + (match[2] || '0') + - '.' + (match[3] || '0')) +} else { + debug = function() {}; } +exports.debug = debug; // for test /***/ }), -/***/ 2116: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -var shebangRegex = __nccwpck_require__(2998); - -module.exports = function (str) { - var match = str.match(shebangRegex); - - if (!match) { - return null; - } - - var arr = match[0].replace(/#! ?/, '').split(' '); - var bin = arr[0].split('/').pop(); - var arg = arr[1]; - - return (bin === 'env' ? - arg : - bin + (arg ? ' ' + arg : '') - ); -}; - - -/***/ }), - -/***/ 2998: -/***/ ((module) => { +/***/ 5030: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = /^#!.*/; - - -/***/ }), - -/***/ 3411: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = which -which.sync = whichSync - -var isWindows = process.platform === 'win32' || - process.env.OSTYPE === 'cygwin' || - process.env.OSTYPE === 'msys' - -var path = __nccwpck_require__(5622) -var COLON = isWindows ? ';' : ':' -var isexe = __nccwpck_require__(7126) - -function getNotFoundError (cmd) { - var er = new Error('not found: ' + cmd) - er.code = 'ENOENT' - - return er -} - -function getPathInfo (cmd, opt) { - var colon = opt.colon || COLON - var pathEnv = opt.path || process.env.PATH || '' - var pathExt = [''] - - pathEnv = pathEnv.split(colon) - - var pathExtExe = '' - if (isWindows) { - pathEnv.unshift(process.cwd()) - pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM') - pathExt = pathExtExe.split(colon) - - - // Always test the cmd itself first. isexe will check to make sure - // it's found in the pathExt set. - if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') - pathExt.unshift('') - } - // If it has a slash, then we don't bother searching the pathenv. - // just check the file itself, and that's it. - if (cmd.match(/\//) || isWindows && cmd.match(/\\/)) - pathEnv = [''] +Object.defineProperty(exports, "__esModule", ({ value: true })); - return { - env: pathEnv, - ext: pathExt, - extExe: pathExtExe +function getUserAgent() { + if (typeof navigator === "object" && "userAgent" in navigator) { + return navigator.userAgent; } -} -function which (cmd, opt, cb) { - if (typeof opt === 'function') { - cb = opt - opt = {} + if (typeof process === "object" && "version" in process) { + return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; } - var info = getPathInfo(cmd, opt) - var pathEnv = info.env - var pathExt = info.ext - var pathExtExe = info.extExe - var found = [] + return ""; +} - ;(function F (i, l) { - if (i === l) { - if (opt.all && found.length) - return cb(null, found) - else - return cb(getNotFoundError(cmd)) - } +exports.getUserAgent = getUserAgent; +//# sourceMappingURL=index.js.map - var pathPart = pathEnv[i] - if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') - pathPart = pathPart.slice(1, -1) - var p = path.join(pathPart, cmd) - if (!pathPart && (/^\.[\\\/]/).test(cmd)) { - p = cmd.slice(0, 2) + p - } - ;(function E (ii, ll) { - if (ii === ll) return F(i + 1, l) - var ext = pathExt[ii] - isexe(p + ext, { pathExt: pathExtExe }, function (er, is) { - if (!er && is) { - if (opt.all) - found.push(p + ext) - else - return cb(null, p + ext) - } - return E(ii + 1, ll) - }) - })(0, pathExt.length) - })(0, pathEnv.length) -} +/***/ }), -function whichSync (cmd, opt) { - opt = opt || {} +/***/ 1463: +/***/ ((__unused_webpack_module, exports) => { - var info = getPathInfo(cmd, opt) - var pathEnv = info.env - var pathExt = info.ext - var pathExtExe = info.extExe - var found = [] +"use strict"; - for (var i = 0, l = pathEnv.length; i < l; i ++) { - var pathPart = pathEnv[i] - if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') - pathPart = pathPart.slice(1, -1) - var p = path.join(pathPart, cmd) - if (!pathPart && /^\.[\\\/]/.test(cmd)) { - p = cmd.slice(0, 2) + p - } - for (var j = 0, ll = pathExt.length; j < ll; j ++) { - var cur = p + pathExt[j] - var is - try { - is = isexe.sync(cur, { pathExt: pathExtExe }) - if (is) { - if (opt.all) - found.push(cur) - else - return cur - } - } catch (ex) {} +exports.fromCallback = function (fn) { + return Object.defineProperty(function (...args) { + if (typeof args[args.length - 1] === 'function') fn.apply(this, args) + else { + return new Promise((resolve, reject) => { + fn.call( + this, + ...args, + (err, res) => (err != null) ? reject(err) : resolve(res) + ) + }) } - } - - if (opt.all && found.length) - return found - - if (opt.nothrow) - return null + }, 'name', { value: fn.name }) +} - throw getNotFoundError(cmd) +exports.fromPromise = function (fn) { + return Object.defineProperty(function (...args) { + const cb = args[args.length - 1] + if (typeof cb !== 'function') return fn.apply(this, args) + else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb) + }, 'name', { value: fn.name }) } @@ -33032,6 +15499,8 @@ module.exports = { const { parse } = __nccwpck_require__(1150) const core = __nccwpck_require__(2186) +const { GitHub, getOctokitOptions } = __nccwpck_require__(3030) +const { throttling } = __nccwpck_require__(9968) const path = __nccwpck_require__(5622) const { @@ -33048,6 +15517,30 @@ const { const { dedent, execCmd } = __nccwpck_require__(8505) +const getOctokit = (token) => { + const Octokit = GitHub.plugin(throttling) + + const options = getOctokitOptions(token, { + throttle: { + onRateLimit: (retryAfter, options) => { + core.warning(`Request quota exhausted for request ${ options.method } ${ options.url }`) + + if (options.request.retryCount === 0) { + // only retries once + core.info(`Retrying after ${ retryAfter } seconds!`) + return true + } + }, + onAbuseLimit: (retryAfter, options) => { + // does not retry, only logs a warning + core.warning(`Abuse detected for request ${ options.method } ${ options.url }`) + } + } + }) + + return new Octokit(options) +} + const init = (repo) => { let github let baseBranch @@ -33243,7 +15736,8 @@ const init = (repo) => { } module.exports = { - init + init, + getOctokit } /***/ }), @@ -33516,7 +16010,6 @@ var __webpack_exports__ = {}; // This entry need to be wrapped in an IIFE because it need to be isolated against other modules in the chunk. (() => { const core = __nccwpck_require__(2186) -const github = __nccwpck_require__(5438) const fs = __nccwpck_require__(5747) const Git = __nccwpck_require__(109) @@ -33537,7 +16030,8 @@ const { } = __nccwpck_require__(4570) const run = async () => { - const client = new github.GitHub(GITHUB_TOKEN) + const client = Git.getOctokit(GITHUB_TOKEN) + // const client = github.getOctokit(GITHUB_TOKEN) const repos = await parseConfig() diff --git a/package-lock.json b/package-lock.json index 5f49845d..5aa11af3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -277,6 +277,15 @@ } } }, + "@octokit/plugin-throttling": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-throttling/-/plugin-throttling-3.4.1.tgz", + "integrity": "sha512-qCQ+Z4AnL9OrXvV59EH3GzPxsB+WyqufoCjiCJXJxTbnt3W+leXbXw5vHrMp4NG9ltw00McFWIxIxNQAzLNoTA==", + "requires": { + "@octokit/types": "^6.0.1", + "bottleneck": "^2.15.3" + } + }, "@octokit/request": { "version": "5.4.12", "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.4.12.tgz", @@ -734,8 +743,7 @@ "bottleneck": { "version": "2.19.5", "resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz", - "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==", - "dev": true + "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==" }, "brace-expansion": { "version": "1.1.11", diff --git a/package.json b/package.json index 8f7dc969..0faa6c66 100644 --- a/package.json +++ b/package.json @@ -29,6 +29,7 @@ "dependencies": { "@actions/core": "^1.3.0", "@actions/github": "^5.0.0", + "@octokit/plugin-throttling": "^3.4.1", "@putout/git-status-porcelain": "^1.1.0", "action-input-parser": "^1.2.1", "fs-extra": "^10.0.0", diff --git a/src/git.js b/src/git.js index a71c00b2..82d97701 100644 --- a/src/git.js +++ b/src/git.js @@ -1,5 +1,7 @@ const { parse } = require('@putout/git-status-porcelain') const core = require('@actions/core') +const { GitHub, getOctokitOptions } = require('@actions/github/lib/utils') +const { throttling } = require('@octokit/plugin-throttling') const path = require('path') const { @@ -16,6 +18,30 @@ const { const { dedent, execCmd } = require('./helpers') +const getOctokit = (token) => { + const Octokit = GitHub.plugin(throttling) + + const options = getOctokitOptions(token, { + throttle: { + onRateLimit: (retryAfter, options) => { + core.warning(`Request quota exhausted for request ${ options.method } ${ options.url }`) + + if (options.request.retryCount === 0) { + // only retries once + core.info(`Retrying after ${ retryAfter } seconds!`) + return true + } + }, + onAbuseLimit: (retryAfter, options) => { + // does not retry, only logs a warning + core.warning(`Abuse detected for request ${ options.method } ${ options.url }`) + } + } + }) + + return new Octokit(options) +} + const init = (repo) => { let github let baseBranch @@ -211,5 +237,6 @@ const init = (repo) => { } module.exports = { - init + init, + getOctokit } \ No newline at end of file diff --git a/src/index.js b/src/index.js index 25e18cff..95623dd3 100644 --- a/src/index.js +++ b/src/index.js @@ -1,5 +1,4 @@ const core = require('@actions/core') -const github = require('@actions/github') const fs = require('fs') const Git = require('./git') @@ -20,7 +19,8 @@ const { } = require('./config') const run = async () => { - const client = github.getOctokit(GITHUB_TOKEN) + const client = Git.getOctokit(GITHUB_TOKEN) + // const client = github.getOctokit(GITHUB_TOKEN) const repos = await parseConfig()